": downtime_in_seconds,
+ }
+ }
+ }
+ """
+ date_start, date_end = get_timerange_from_date(date_str)
+ url = f'https://{prometheus_server}/prometheus/api/v1/query_range?query=sum(count_over_time(probe_success{{job="site", cluster="{cluster}", server!~"^t.*"}}[24h] offset -24h)) by (server, group, instance) - sum(sum_over_time(probe_success{{job="site", cluster="{cluster}", server!~"^t.*"}}[24h] offset -24h)) by (server, group, instance)&start={date_start}&end={date_end}&step=86400'
+ response = requests.get(url, auth=prometheus_auth)
+ response.raise_for_status()
+
+ data = response.json()
+ if data["status"] != "success":
+ raise Exception("Failed to fetch data from Prometheus")
+
+ data = data.get("data", {}).get("result", [])
+
+ result = {}
+ for i in data:
+ server = i.get("metric").get("server")
+ group = i.get("metric").get("group")
+ if server not in result:
+ result[server] = {"__total_downtime": 0}
+ if group not in result[server]:
+ result[server][group] = {"__total_downtime": 0}
+
+ downtime = int(i.get("values")[0][-1]) * scrape_interval
+ result[server]["__total_downtime"] += downtime
+ result[server][group]["__total_downtime"] += downtime
+ result[server][group][i.get("metric").get("instance")] = downtime
+
+ result["__total_downtime"] = sum(
+ [result[server]["__total_downtime"] for server in result if server != "__total_downtime"]
+ )
+
+ return result
+
+
+def get_server_downtime_data_for_date(
+ clusters: str, date_str: str, prometheus_server: str, prometheus_auth: tuple
+) -> dict:
+ server_downtime = {}
+ for cluster in clusters:
+ done = False
+ while not done:
+ try:
+ print(f"Fetching server data for {cluster} on {date_str}...")
+ server_downtime[cluster] = get_servers_downtime_data(
+ cluster, date_str, prometheus_server, prometheus_auth
+ )
+ done = True
+ except Exception as e:
+ print(f"Error fetching server data for {cluster} on {date_str}: {e}")
+ print("Retrying...")
+ time.sleep(5)
+
+ server_downtime["__total_downtime"] = sum(
+ [server_downtime[cluster]["__total_downtime"] for cluster in clusters]
+ )
+ return server_downtime
+
+
+def get_site_downtime_data_for_date(
+ clusters: str, date_str: str, prometheus_server: str, prometheus_auth: tuple
+) -> dict:
+ site_downtime = {}
+ for cluster in clusters:
+ done = False
+ while not done:
+ try:
+ print(f"Fetching site data for {cluster} on {date_str}...")
+ site_downtime[cluster] = get_sites_downtime_data(
+ cluster, date_str, prometheus_server, prometheus_auth
+ )
+ done = True
+ except Exception as e:
+ print(f"Error fetching site data for {cluster} on {date_str}: {e}")
+ print("Retrying...")
+ time.sleep(5)
+
+ site_downtime["__total_downtime"] = sum(
+ [site_downtime[cluster]["__total_downtime"] for cluster in clusters]
+ )
+ return site_downtime
+
+
+def get_data_for_all_clusters(
+ start_date: str, end_date: str, clusters: list, prometheus_server: str, prometheus_auth: tuple
+) -> dict:
+ # Generate the list of last_n_days between start_date and end_date (inclusive)
+ start = datetime.datetime.strptime(start_date, "%Y-%m-%d").date()
+ end = datetime.datetime.strptime(end_date, "%Y-%m-%d").date()
+ days = (end - start).days + 1
+ date_list = [(start + datetime.timedelta(days=i)).isoformat() for i in range(days)]
+
+ server_downtime_file_name = get_report_path(f"server_downtime_data_{start_date}_to_{end_date}.json")
+ site_downtime_file_name = get_report_path(f"site_downtime_data_{start_date}_to_{end_date}.json")
+
+ if not os.path.exists(server_downtime_file_name):
+ server_downtime = {}
+ for date in date_list:
+ server_downtime[date] = get_server_downtime_data_for_date(
+ clusters, date, prometheus_server, prometheus_auth
+ )
+
+ with open(server_downtime_file_name, "w") as f:
+ json.dump(server_downtime, f, indent=4)
+ print(f"Server downtime data collected and saved to {server_downtime_file_name}.")
+ else:
+ print(f"{server_downtime_file_name} already exists. Skipping server data collection.")
+
+ if not os.path.exists(site_downtime_file_name):
+ site_downtime = {}
+ for date in date_list:
+ site_downtime[date] = get_site_downtime_data_for_date(
+ clusters, date, prometheus_server, prometheus_auth
+ )
+
+ with open(site_downtime_file_name, "w") as f:
+ json.dump(site_downtime, f, indent=4)
+ print(f"Site downtime data collected and saved to {site_downtime_file_name}.")
+ else:
+ print(f"{site_downtime_file_name} already exists. Skipping site data collection.")
+
+ return {
+ "server_downtime": json.load(open(server_downtime_file_name, "r")),
+ "site_downtime": json.load(open(site_downtime_file_name, "r")),
+ }
+
+
+def generate_server_downtime_report( # noqa: C901
+ start_date: str, end_date: str, server_downtime_data: dict, clusters: list
+):
+ report_data = {} # store report data here
+
+ file_name = get_report_path(f"server_downtime_report_{start_date}_to_{end_date}.json")
+ if os.path.exists(file_name):
+ print(f"{file_name} already exists. Skipping report generation.")
+ return json.load(open(file_name, "r"))
+
+ ############# Prepare Some Common Data #############
+ # Prepare total downtime of each cluster for the whole period
+ cluster_total_downtime = {cluster: 0 for cluster in clusters}
+ for _, clusters_data in server_downtime_data.items():
+ for cluster in clusters:
+ if cluster.startswith("__"):
+ continue
+ cluster_total_downtime[cluster] += clusters_data.get(cluster, {}).get("__total_downtime", 0)
+ cluster_total_downtime["__total_downtime"] = sum(cluster_total_downtime.values())
+
+ # Prepare total downtime of each server in each cluster for the whole period
+ cluster_server_downtime = {cluster: {} for cluster in clusters}
+ for _, clusters_data in server_downtime_data.items():
+ for cluster in clusters:
+ if cluster.startswith("__"):
+ continue
+ for server, downtime in clusters_data.get(cluster, {}).items():
+ if server.startswith("__"):
+ continue
+ if server not in cluster_server_downtime[cluster]:
+ cluster_server_downtime[cluster][server] = 0
+ cluster_server_downtime[cluster][server] += downtime
+
+ clusters_sorted_by_downtime = sorted(cluster_total_downtime.items(), key=lambda x: x[1], reverse=True)
+ clusters_sorted_by_downtime = [i[0] for i in clusters_sorted_by_downtime if not i[0].startswith("__")]
+ report_data["clusters_sorted_by_downtime"] = clusters_sorted_by_downtime
+ ############# Prepare Report #############
+
+ # Servers with complete downtime (One Table with all the infos)
+ servers_with_complete_downtime = []
+ """
+ [
+ {
+ "dates": [date1, date2, ...],
+ "server": server,
+ "cluster": cluster,
+ "total_downtime": total_downtime_in_seconds,
+ "contribution_to_cluster_downtime": percentage,
+ "contribution_to_total_downtime": percentage,
+ }
+ ]
+ """
+
+ for date, clusters_data in server_downtime_data.items():
+ for cluster in clusters:
+ if cluster.startswith("__"):
+ continue
+ for server, downtime in clusters_data.get(cluster, {}).items():
+ if server.startswith("__"):
+ continue
+ if downtime >= minimum_downtime_to_consider_as_complete_down_server:
+ entry = next(
+ (
+ item
+ for item in servers_with_complete_downtime
+ if item["server"] == server and item["cluster"] == cluster
+ ),
+ None,
+ )
+ if entry:
+ entry["dates"].append(date)
+ entry["total_downtime"] += downtime
+ else:
+ servers_with_complete_downtime.append(
+ {
+ "cluster": cluster,
+ "server": server,
+ "dates": [date],
+ "total_downtime": downtime,
+ }
+ )
+
+ # Calculate contributions
+ for entry in servers_with_complete_downtime:
+ entry["contribution_to_cluster_downtime"] = round(
+ entry["total_downtime"] / cluster_total_downtime[entry["cluster"]] * 100
+ if cluster_total_downtime[entry["cluster"]] > 0
+ else 0,
+ 2,
+ )
+ entry["contribution_to_total_downtime"] = round(
+ entry["total_downtime"] / cluster_total_downtime["__total_downtime"] * 100
+ if cluster_total_downtime["__total_downtime"] > 0
+ else 0,
+ 2,
+ )
+
+ report_data["servers_with_complete_downtime"] = servers_with_complete_downtime
+
+ # Find top 5 servers of each cluster with highest downtime
+ # Use server_downtime_data to get the cluster of each server
+ top_20_servers_with_highest_downtime_per_cluster = {}
+ for cluster, servers in cluster_server_downtime.items():
+ sorted_servers = sorted(servers.items(), key=lambda x: x[1], reverse=True)[:20]
+ sorted_servers = [
+ (server, downtime)
+ for server, downtime in sorted_servers
+ if downtime >= minimum_downtime_to_consider_in_top_k_down_servers_per_cluster
+ ]
+ top_20_servers_with_highest_downtime_per_cluster[cluster] = [
+ {"server": server, "total_downtime": downtime} for server, downtime in sorted_servers
+ ]
+
+ report_data["top_20_servers_with_highest_downtime_per_cluster"] = (
+ top_20_servers_with_highest_downtime_per_cluster
+ )
+
+ with open(file_name, "w") as f:
+ json.dump(report_data, f, indent=4)
+ print(f"Server uptime report generated and saved to {file_name}.")
+
+ return report_data
+
+
+def generate_site_downtime_report(start_date: str, end_date: str, site_downtime_data: dict, clusters: list): # noqa: C901
+ report_data = {} # store report data here
+ file_name = get_report_path(f"site_downtime_report_{start_date}_to_{end_date}.json")
+ if os.path.exists(file_name):
+ print(f"{file_name} already exists. Skipping report generation.")
+ return json.load(open(file_name, "r"))
+
+ ############# Prepare Some Common Data #############
+ # Prepare total downtime of each cluster for the whole period
+ cluster_total_downtime = {cluster: 0 for cluster in clusters}
+ for _, clusters_data in site_downtime_data.items():
+ for cluster in clusters:
+ if cluster.startswith("__"):
+ continue
+ cluster_total_downtime[cluster] += clusters_data.get(cluster, {}).get("__total_downtime", 0)
+ cluster_total_downtime["__total_downtime"] = sum(cluster_total_downtime.values())
+
+ # Sites with full downtime in any of the days (One Table with all the infos)
+ sites_with_complete_downtime = []
+ for date, clusters_data in site_downtime_data.items():
+ """
+ [
+ {
+ "dates": [date1, date2, ...],
+ "site": site,
+ "server": server,
+ "group": group,
+ "cluster": cluster,
+ "total_downtime": total_downtime_in_seconds,
+ "contribution_to_server_downtime": percentage,
+ "contribution_to_cluster_downtime": percentage,
+ "contribution_to_total_downtime": percentage,
+ }
+ ]
+ """
+ for cluster in clusters:
+ if cluster.startswith("__"):
+ continue
+ for server, groups in clusters_data.get(cluster, {}).items():
+ if server.startswith("__"):
+ continue
+ for group, sites in groups.items():
+ if group.startswith("__"):
+ continue
+ for site, downtime in sites.items():
+ if site.startswith("__"):
+ continue
+ if downtime >= minimum_downtime_to_consider_as_complete_down_site:
+ entry = next(
+ (
+ item
+ for item in sites_with_complete_downtime
+ if item["site"] == site
+ and item["server"] == server
+ and item["cluster"] == cluster
+ and item["group"] == group
+ ),
+ None,
+ )
+ if entry:
+ entry["dates"].append(date)
+ entry["total_downtime"] += downtime
+ else:
+ sites_with_complete_downtime.append(
+ {
+ "cluster": cluster,
+ "server": server,
+ "group": group,
+ "site": site,
+ "dates": [date],
+ "total_downtime": downtime,
+ }
+ )
+ # Calculate contributions
+ for entry in sites_with_complete_downtime:
+ # Contribution to server downtime
+ server_downtime = 0
+ for _, clusters_data in site_downtime_data.items():
+ server_downtime += (
+ clusters_data.get(entry["cluster"], {}).get(entry["server"], {}).get("__total_downtime", 0)
+ )
+ entry["contribution_to_server_downtime"] = round(
+ entry["total_downtime"] / server_downtime * 100 if server_downtime > 0 else 0, 2
+ )
+
+ # Contribution to cluster downtime
+ entry["contribution_to_cluster_downtime"] = round(
+ entry["total_downtime"] / cluster_total_downtime[entry["cluster"]] * 100
+ if cluster_total_downtime[entry["cluster"]] > 0
+ else 0,
+ 2,
+ )
+ # Contribution to total downtime
+ entry["contribution_to_total_downtime"] = round(
+ entry["total_downtime"] / cluster_total_downtime["__total_downtime"] * 100
+ if cluster_total_downtime["__total_downtime"] > 0
+ else 0,
+ 2,
+ )
+
+ report_data["sites_with_complete_downtime"] = sites_with_complete_downtime
+
+ # Top 40 sites with highest downtime in each cluster
+ # Use site_downtime_data to get the cluster of each server
+ top_k_sites_with_highest_downtime_per_cluster = {}
+ for cluster in clusters:
+ if cluster.startswith("__"):
+ continue
+ site_info = {}
+ for _, clusters_data in site_downtime_data.items():
+ for server, groups in clusters_data.get(cluster, {}).items():
+ if server.startswith("__"):
+ continue
+ for group, sites in groups.items():
+ if group.startswith("__"):
+ continue
+ for site, downtime in sites.items():
+ if site.startswith("__"):
+ continue
+ key = (site, server, group)
+ if key not in site_info:
+ site_info[key] = 0
+ site_info[key] += downtime
+
+ # Sort by downtime and filter
+ sorted_sites = sorted(site_info.items(), key=lambda x: x[1], reverse=True)[:top_k_sites_count]
+ sorted_sites = [
+ (site_tuple, downtime)
+ for site_tuple, downtime in sorted_sites
+ if downtime >= minimum_downtime_to_consider_in_top_k_down_sites_per_cluster
+ ]
+ top_k_sites_with_highest_downtime_per_cluster[cluster] = [
+ {"site": site, "server": server, "bench": group, "total_downtime": downtime}
+ for (site, server, group), downtime in sorted_sites
+ ]
+ report_data["top_k_sites_with_highest_downtime_per_cluster"] = (
+ top_k_sites_with_highest_downtime_per_cluster
+ )
+
+ # Top 20 servers of each cluster with highest site downtime
+ top_20_servers_with_highest_site_downtime_per_cluster = {}
+ for cluster in clusters:
+ if cluster.startswith("__"):
+ continue
+ server_site_downtime = {}
+ for _, clusters_data in site_downtime_data.items():
+ for server, groups in clusters_data.get(cluster, {}).items():
+ if server.startswith("__"):
+ continue
+ if server not in server_site_downtime:
+ server_site_downtime[server] = 0
+ for group, sites in groups.items():
+ if group.startswith("__"):
+ continue
+ for site, downtime in sites.items():
+ if site.startswith("__"):
+ continue
+ server_site_downtime[server] += downtime
+
+ sorted_servers = sorted(server_site_downtime.items(), key=lambda x: x[1], reverse=True)[:20]
+ sorted_servers = [
+ (server, downtime)
+ for server, downtime in sorted_servers
+ if downtime
+ >= minimum_downtime_to_consider_in_top_k_servers_contributing_to_site_downtime_per_cluster
+ ]
+ top_20_servers_with_highest_site_downtime_per_cluster[cluster] = [
+ {"server": server, "total_downtime": downtime} for server, downtime in sorted_servers
+ ]
+
+ report_data["top_20_servers_with_highest_site_downtime_per_cluster"] = (
+ top_20_servers_with_highest_site_downtime_per_cluster
+ )
+
+ # List out the probematic sites found in reports for further investigation
+ problematic_sites = []
+ for entry in sites_with_complete_downtime:
+ problematic_sites.append(entry["site"])
+ for _, sites in top_k_sites_with_highest_downtime_per_cluster.items():
+ for site_entry in sites:
+ if site_entry["site"] not in problematic_sites:
+ problematic_sites.append(site_entry["site"])
+
+ report_data["problematic_sites"] = problematic_sites
+
+ # Analyze those sites for further issues
+ report_data["problematic_sites_analysis_report"] = analyze_sites_in_thread(problematic_sites)
+
+ with open(file_name, "w") as f:
+ json.dump(report_data, f, indent=4)
+ print(f"Site uptime report generated and saved to {file_name}.")
+
+ return report_data
+
+
+def analyze_sites_in_thread(sites: list, batch_size: int = 50) -> dict:
+ results = {}
+ threads: list[threading.Thread] = []
+ for i in range(0, len(sites), batch_size):
+ batch = sites[i : i + batch_size]
+ thread = threading.Thread(target=lambda b: results.update(analyze_sites(b)), args=(batch,))
+ threads.append(thread)
+ thread.start()
+
+ for thread in threads:
+ thread.join()
+
+ return results
+
+
+def analyze_sites(sites: list):
+ results = {}
+ status_code_map = {
+ 200: "reachable",
+ 401: "unauthorized",
+ 402: "site_suspended",
+ 403: "forbidden",
+ 404: "site_not_found",
+ 429: "rate_limited",
+ 500: "internal_server_error",
+ 502: "bad_gateway",
+ 503: "site_in_maintenance",
+ }
+ for site in sites:
+ results[site] = {
+ "status": "reachable", # reachable, timeout, error, redirect_external
+ "status_code": 0,
+ "redirects": [],
+ "error": "",
+ "proxy_server": "", # Frappe Cloud, cloudflare
+ }
+ try:
+ # Check if sites are reachable
+ try:
+ # If there are redirects, follow it and log those
+ done = False
+ url = f"https://{site}/api/method/ping"
+ while not done:
+ response = requests.get(url, timeout=(2, 10), allow_redirects=False)
+ # Record status code
+ results[site]["status_code"] = response.status_code
+ # Check if we are getting redirected to some other server in /api/method/ping check
+ results[site]["proxy_server"] = response.headers.get(
+ "Server", ""
+ ) or response.headers.get("server", "")
+ if response.status_code in [301, 302]:
+ # Record redirects
+ url = response.headers.get("Location")
+ results[site]["redirects"].append(url)
+ elif response.status_code in status_code_map:
+ results[site]["status"] = status_code_map.get(response.status_code, "unreachable")
+ done = True
+ else:
+ done = True
+
+ if results[site]["proxy_server"] and results[site]["proxy_server"].lower() != "frappe cloud":
+ results[site]["status"] = "redirect_external"
+
+ except requests.Timeout:
+ results[site]["status"] = "timeout"
+
+ # In that case, check dns record of the custom domain
+ # Finally add a message regarding possible issues with the site
+
+ except Exception as e:
+ results[site]["status"] = "error"
+ results[site]["error"] = str(e)
+ continue
+
+ return results
+
+
+def is_report_available(start_date: str, end_date: str):
+ files = [
+ get_report_path(f"server_downtime_report_{start_date}_to_{end_date}.json"),
+ get_report_path(f"site_downtime_report_{start_date}_to_{end_date}.json"),
+ ]
+ if all([os.path.exists(f) for f in files]):
+ return True
+ return False
diff --git a/press/press/doctype/downtime_analysis/test_downtime_analysis.py b/press/press/doctype/downtime_analysis/test_downtime_analysis.py
new file mode 100644
index 00000000000..3e3b7d35b40
--- /dev/null
+++ b/press/press/doctype/downtime_analysis/test_downtime_analysis.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests import IntegrationTestCase
+
+# On IntegrationTestCase, the doctype test records and all
+# link-field test record dependencies are recursively loaded
+# Use these module variables to add/remove to/from that list
+EXTRA_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+IGNORE_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+
+
+class IntegrationTestDowntimeAnalysis(IntegrationTestCase):
+ """
+ Integration tests for DowntimeAnalysis.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/drip_email/drip_email.js b/press/press/doctype/drip_email/drip_email.js
index cd1262242f1..a3b26506c7f 100644
--- a/press/press/doctype/drip_email/drip_email.js
+++ b/press/press/doctype/drip_email/drip_email.js
@@ -1,6 +1,47 @@
-// Copyright (c) 2016, Web Notes and contributors
+// Copyright (c) 2025, Frappe and contributors
// For license information, please see license.txt
frappe.ui.form.on('Drip Email', {
- refresh: function (frm) {},
+ refresh: function (frm) {
+ const doc = frm.doc;
+ const can_write = frappe.boot.user.can_write.includes(doc.doctype);
+ if (!frm.is_new() && !frm.is_dirty() && !doc.email_sent && can_write) {
+ frm.add_custom_button('Send a test email', () => {
+ frm.events.send_test_email(frm);
+ });
+ }
+ },
+
+ send_test_email(frm) {
+ let d = new frappe.ui.Dialog({
+ title: __('Send Test Email'),
+ fields: [
+ {
+ label: __('Site'),
+ fieldname: 'site',
+ fieldtype: 'Link',
+ filters: {
+ standby_for_product: frm.doc.product_trial,
+ },
+ options: 'Site',
+ reqd: 1,
+ },
+ {
+ label: __('Email'),
+ fieldname: 'email',
+ fieldtype: 'Data',
+ options: 'Email',
+ reqd: 1,
+ },
+ ],
+ primary_action_label: __('Send'),
+ primary_action({ site, email }) {
+ d.get_primary_btn().text(__('Sending...')).prop('disabled', true);
+ frm.call('send_test_email', { site, email }).then(() => {
+ d.get_primary_btn().text(__('Send again')).prop('disabled', false);
+ });
+ },
+ });
+ d.show();
+ },
});
diff --git a/press/press/doctype/drip_email/drip_email.json b/press/press/doctype/drip_email/drip_email.json
index cdd5216981e..e8f1100e1e5 100644
--- a/press/press/doctype/drip_email/drip_email.json
+++ b/press/press/doctype/drip_email/drip_email.json
@@ -10,7 +10,7 @@
"field_order": [
"enabled",
"email_type",
- "saas_app",
+ "product_trial",
"subject",
"column_break_7",
"send_by_consultant",
@@ -18,22 +18,20 @@
"sender",
"reply_to",
"pre_header",
+ "section_break_ehlw",
+ "condition",
+ "column_break_jext",
+ "html_pzsv",
"section_break_9",
- "message",
+ "content_type",
+ "message_html",
+ "message_markdown",
+ "message_rich_text",
"section_break_4",
+ "skip_sites_with_paid_plan",
"send_after",
"send_after_payment",
- "minimum_activation_level",
- "maximum_activation_level",
"column_break_2",
- "distribution",
- "manufacturing",
- "retail",
- "services",
- "education",
- "healthcare",
- "non_profit",
- "other",
"section_break_25",
"module_setup_guide"
],
@@ -90,14 +88,8 @@
},
{
"fieldname": "section_break_9",
- "fieldtype": "Section Break"
- },
- {
- "fieldname": "message",
- "fieldtype": "Text Editor",
- "in_list_view": 1,
- "label": "Message",
- "reqd": 1
+ "fieldtype": "Section Break",
+ "label": "Content"
},
{
"fieldname": "section_break_4",
@@ -116,96 +108,88 @@
"fieldtype": "Check",
"label": "Send After Payment"
},
- {
- "description": "1-7",
- "fieldname": "minimum_activation_level",
- "fieldtype": "Int",
- "label": "Minimum Activation Level"
- },
- {
- "description": "1-7",
- "fieldname": "maximum_activation_level",
- "fieldtype": "Int",
- "label": "Maximum Activation Level"
- },
{
"default": "(1-7)",
"fieldname": "column_break_2",
"fieldtype": "Column Break"
},
{
- "default": "0",
- "fieldname": "distribution",
- "fieldtype": "Check",
- "label": "Distribution"
+ "fieldname": "pre_header",
+ "fieldtype": "Data",
+ "label": "Pre Header"
},
{
- "default": "0",
- "fieldname": "manufacturing",
- "fieldtype": "Check",
- "label": "Manufacturing"
+ "fieldname": "section_break_25",
+ "fieldtype": "Section Break"
},
{
- "default": "0",
- "fieldname": "retail",
- "fieldtype": "Check",
- "label": "Retail"
+ "fieldname": "module_setup_guide",
+ "fieldtype": "Table",
+ "label": "Module Setup Guide",
+ "options": "Module Setup Guide"
},
{
"default": "0",
- "fieldname": "services",
+ "fieldname": "skip_sites_with_paid_plan",
"fieldtype": "Check",
- "label": "Services"
+ "label": "Skip Sites With Paid Plan"
},
{
- "default": "0",
- "fieldname": "education",
- "fieldtype": "Check",
- "label": "Education"
+ "fieldname": "condition",
+ "fieldtype": "Code",
+ "label": "Condition"
},
{
- "default": "0",
- "fieldname": "healthcare",
- "fieldtype": "Check",
- "label": "Healthcare"
+ "fieldname": "html_pzsv",
+ "fieldtype": "HTML",
+ "options": "Condition Examples:
\ndoc.status==\"Open\" account_request.country==\"Spain\" doc.total > 40000\n \n\nApp doc is available as app, Account Request as account_request and the current doc as just doc"
},
{
- "default": "0",
- "fieldname": "non_profit",
- "fieldtype": "Check",
- "label": "Non Profit"
+ "fieldname": "section_break_ehlw",
+ "fieldtype": "Section Break"
},
{
- "default": "0",
- "fieldname": "other",
- "fieldtype": "Check",
- "label": "Other"
+ "fieldname": "column_break_jext",
+ "fieldtype": "Column Break"
},
{
- "fieldname": "pre_header",
- "fieldtype": "Data",
- "label": "Pre Header"
+ "fieldname": "content_type",
+ "fieldtype": "Select",
+ "label": "Content Type",
+ "options": "Rich Text\nMarkdown\nHTML"
},
{
- "fieldname": "section_break_25",
- "fieldtype": "Section Break"
+ "depends_on": "eval: doc.content_type === 'Markdown'",
+ "fieldname": "message_markdown",
+ "fieldtype": "Markdown Editor",
+ "in_list_view": 1,
+ "label": "Message (Markdown)"
},
{
- "fieldname": "module_setup_guide",
- "fieldtype": "Table",
- "label": "Module Setup Guide",
- "options": "Module Setup Guide"
+ "depends_on": "eval: doc.content_type === 'Rich Text'",
+ "fieldname": "message_rich_text",
+ "fieldtype": "Text Editor",
+ "in_list_view": 1,
+ "label": "Message (Rich Text)"
+ },
+ {
+ "depends_on": "eval: doc.content_type === 'HTML'",
+ "fieldname": "message_html",
+ "fieldtype": "HTML Editor",
+ "in_list_view": 1,
+ "label": "Message (HTML)"
},
{
- "fieldname": "saas_app",
+ "fieldname": "product_trial",
"fieldtype": "Link",
- "label": "Saas App",
- "options": "Marketplace App"
+ "label": "Product Trial",
+ "options": "Product Trial"
}
],
+ "grid_page_length": 50,
"icon": "icon-envelope",
"links": [],
- "modified": "2022-08-24 17:58:28.497406",
+ "modified": "2025-05-22 15:14:07.377379",
"modified_by": "Administrator",
"module": "Press",
"name": "Drip Email",
@@ -237,8 +221,9 @@
"write": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "ASC",
"states": [],
"title_field": "subject"
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/drip_email/drip_email.py b/press/press/doctype/drip_email/drip_email.py
index 687a8b1e014..4b5258d9019 100644
--- a/press/press/doctype/drip_email/drip_email.py
+++ b/press/press/doctype/drip_email/drip_email.py
@@ -1,22 +1,60 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2015, Web Notes and contributors
# For license information, please see license.txt
+from __future__ import annotations
-from datetime import date, timedelta
-from typing import Dict, List
+from datetime import timedelta
import frappe
+import rq
+import rq.exceptions
+import rq.timeouts
+from frappe import _
from frappe.model.document import Document
+from frappe.rate_limiter import rate_limit
from frappe.utils.make_random import get_random
+from press.utils import log_error
+
class DripEmail(Document):
- def send(self, site_name=None, lead=None):
- if self.email_type in ["Drip", "Sign Up"] and site_name:
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.module_setup_guide.module_setup_guide import ModuleSetupGuide
+
+ condition: DF.Code | None
+ content_type: DF.Literal["Rich Text", "Markdown", "HTML"]
+ email_type: DF.Literal[
+ "Drip", "Sign Up", "Subscription Activation", "Whitepaper Feedback", "Onboarding"
+ ]
+ enabled: DF.Check
+ message_html: DF.HTMLEditor | None
+ message_markdown: DF.MarkdownEditor | None
+ message_rich_text: DF.TextEditor | None
+ module_setup_guide: DF.Table[ModuleSetupGuide]
+ pre_header: DF.Data | None
+ product_trial: DF.Link | None
+ reply_to: DF.Data | None
+ send_after: DF.Int
+ send_after_payment: DF.Check
+ send_by_consultant: DF.Check
+ sender: DF.Data
+ sender_name: DF.Data
+ skip_sites_with_paid_plan: DF.Check
+ subject: DF.SmallText
+ # end: auto-generated types
+
+ def send(self, site_name: str | None = None):
+ if self.evaluate_condition(site_name) and self.email_type in ["Drip", "Sign Up"] and site_name:
self.send_drip_email(site_name)
- def send_drip_email(self, site_name):
+ def send_drip_email(self, site_name: str, email: str | None = None):
site = frappe.get_doc("Site", site_name)
if self.email_type == "Drip" and site.status in ["Pending", "Broken"]:
return
@@ -25,6 +63,8 @@ def send_drip_email(self, site_name):
return
account_request = frappe.get_doc("Account Request", site.account_request)
+ if not email and account_request.unsubscribed_from_drip_emails:
+ return
if self.send_by_consultant:
consultant = self.select_consultant(site)
@@ -34,19 +74,20 @@ def send_drip_email(self, site_name):
self.send_mail(
context=dict(
full_name=account_request.full_name,
- email=account_request.email,
- domain=site.name,
+ email=email or account_request.email,
+ domain=site.host_name or site.name,
consultant=consultant,
site=site,
account_request=account_request,
),
- recipient=account_request.email,
+ recipient=email or account_request.email,
)
def send_mail(self, context, recipient):
# build the message
message = frappe.render_template(self.message, context)
- title = frappe.db.get_value("Marketplace App", self.saas_app, "title")
+ account_request = context.get("account_request", "")
+ app = frappe.db.get_value("Product Trial", self.product_trial, ["title", "logo"], as_dict=True)
# add to queue
frappe.sendmail(
@@ -56,12 +97,41 @@ def send_mail(self, context, recipient):
reply_to=self.reply_to,
reference_doctype="Drip Email",
reference_name=self.name,
- unsubscribe_message="Don't send me help messages",
- attachments=self.get_setup_guides(context.get("account_request", "")),
- template="drip_email",
- args={"message": message, "title": title},
+ unsubscribe_message="Unsubscribe",
+ unsubscribe_method="api/method/press.press.doctype.drip_email.drip_email.unsubscribe",
+ unsubscribe_params={"account_request": account_request.name},
+ attachments=self.get_setup_guides(account_request),
+ template="product_trial_email",
+ args={"message": message, "title": app.title, "logo": app.logo},
+ )
+
+ @property
+ def message(self):
+ if self.content_type == "Markdown":
+ return frappe.utils.md_to_html(self.message_markdown)
+ if self.content_type == "Rich Text":
+ return self.message_rich_text
+ return self.message_html
+
+ def evaluate_condition(self, site_name: str) -> bool:
+ """
+ Evaluate the condition to check if the email should be sent.
+ """
+ if not self.condition:
+ return True
+
+ product_trial = frappe.get_doc("Product Trial", self.product_trial)
+ site_account_request = frappe.db.get_value("Site", site_name, "account_request")
+ account_request = frappe.get_doc("Account Request", site_account_request)
+
+ eval_locals = dict(
+ app=product_trial,
+ doc=self,
+ account_request=account_request,
)
+ return frappe.safe_eval(self.condition, None, eval_locals)
+
def select_consultant(self, site) -> str:
"""
Select random ERPNext Consultant to send email.
@@ -78,7 +148,7 @@ def select_consultant(self, site) -> str:
self.sender_name = consultant.full_name
return consultant
- def get_setup_guides(self, account_request) -> List[Dict[str, str]]:
+ def get_setup_guides(self, account_request) -> list[dict[str, str]]:
if not account_request:
return []
@@ -86,21 +156,28 @@ def get_setup_guides(self, account_request) -> List[Dict[str, str]]:
for guide in self.module_setup_guide:
if account_request.industry == guide.industry:
attachments.append(
- frappe.db.get_value(
- "File", {"file_url": guide.setup_guide}, ["name as fid"], as_dict=1
- )
+ frappe.db.get_value("File", {"file_url": guide.setup_guide}, ["name as fid"], as_dict=1)
)
return attachments
@property
def sites_to_send_drip(self):
- signup_date = date.today() - timedelta(days=self.send_after)
+ signup_date = frappe.utils.getdate() - timedelta(days=self.send_after)
conditions = ""
- if self.saas_app:
- conditions += f'AND site.standby_for = "{self.saas_app}"'
+ if self.product_trial:
+ conditions += f'AND site.standby_for_product = "{self.product_trial}"'
+
+ if self.skip_sites_with_paid_plan:
+ paid_site_plans = frappe.get_all(
+ "Site Plan", {"enabled": True, "is_trial_plan": False, "document_type": "Site"}, pluck="name"
+ )
+
+ if paid_site_plans:
+ paid_site_plans_str = ", ".join(f"'{plan}'" for plan in paid_site_plans)
+ conditions += f" AND site.plan NOT IN ({paid_site_plans_str})"
sites = frappe.db.sql(
f"""
@@ -114,17 +191,37 @@ def sites_to_send_drip(self):
site.account_request = account_request.name
WHERE
site.status = "Active" AND
+ account_request.unsubscribed_from_drip_emails = 0 AND
DATE(account_request.creation) = "{signup_date}"
{conditions}
"""
)
- sites = [t[0] for t in sites]
- return sites
+ return [t[0] for t in sites] # site names
def send_to_sites(self):
- for site in self.sites_to_send_drip:
- self.send(site)
- # TODO: only send `Onboarding` mails to partners <19-04-21, Balamurali M> #
+ sites = self.sites_to_send_drip
+ for site in sites:
+ try:
+ # TODO: only send `Onboarding` mails to partners <19-04-21, Balamurali M> #
+ self.send(site)
+ frappe.db.commit()
+ except rq.timeouts.JobTimeoutException:
+ log_error(
+ "Drip Email Timeout",
+ drip_email=self.name,
+ site=site,
+ total_sites=len(self.sites),
+ )
+ frappe.db.rollback()
+ return
+ except Exception:
+ frappe.db.rollback()
+ log_error("Drip Email Error", drip_email=self.name, site=site)
+
+ @frappe.whitelist()
+ def send_test_email(self, site: str, email: str):
+ """Send test email to the given email address."""
+ self.send_drip_email(site, email)
def send_drip_emails():
@@ -133,18 +230,22 @@ def send_drip_emails():
"Drip Email", {"enabled": 1, "email_type": ("in", ("Drip", "Onboarding"))}
)
for drip_email_name in drip_emails:
- drip_email = frappe.get_doc("Drip Email", drip_email_name)
- drip_email.send_to_sites()
+ frappe.enqueue_doc(
+ "Drip Email",
+ drip_email_name,
+ "send_to_sites",
+ queue="long",
+ job_id=f"drip_email_send_to_sites:{drip_email_name}",
+ deduplicate=True,
+ )
def send_welcome_email():
"""Send welcome email to sites created in last 15 minutes."""
- welcome_drips = frappe.db.get_all(
- "Drip Email", {"email_type": "Sign Up", "enabled": 1}, pluck="name"
- )
+ welcome_drips = frappe.db.get_all("Drip Email", {"email_type": "Sign Up", "enabled": 1}, pluck="name")
for drip in welcome_drips:
welcome_email = frappe.get_doc("Drip Email", drip)
- _15_mins_ago = frappe.utils.add_to_date(None, minutes=-15)
+ _5_mins_ago = frappe.utils.add_to_date(None, minutes=-5)
tuples = frappe.db.sql(
f"""
SELECT
@@ -157,10 +258,45 @@ def send_welcome_email():
site.account_request = account_request.name
WHERE
site.status = "Active" and
- site.standby_for = "{welcome_email.saas_app}" and
- account_request.creation > "{_15_mins_ago}"
+ site.standby_for_product = "{welcome_email.product_trial}" and
+ account_request.creation > "{_5_mins_ago}"
"""
)
sites_in_last_15_mins = [t[0] for t in tuples]
for site in sites_in_last_15_mins:
welcome_email.send(site)
+
+
+@frappe.whitelist(allow_guest=True)
+@rate_limit(limit=5, seconds=60 * 60)
+def unsubscribe(email: str, account_request: str) -> None:
+ """
+ Unsubscribe from drip emails of a site.
+ """
+ if not account_request or not email:
+ return None
+
+ is_unsubscribed = frappe.db.get_value("Account Request", account_request, "unsubscribed_from_drip_emails")
+ if is_unsubscribed is None: # no account request found
+ return None
+
+ site = frappe.db.get_value("Site", {"account_request": account_request}, ["host_name", "name"], as_dict=1)
+ if is_unsubscribed: # already unsubscribed
+ return frappe.respond_as_web_page(
+ _("Already Unsubscribed"),
+ _(
+ f"You have already unsubscribed from receiving emails related to the site {frappe.bold(site.host_name or site.name)}."
+ ),
+ indicator_color="red",
+ )
+
+ frappe.db.set_value("Account Request", account_request, "unsubscribed_from_drip_emails", 1)
+ frappe.db.commit()
+
+ return frappe.respond_as_web_page(
+ _("Unsubscribed"),
+ _(
+ f"You have been unsubscribed from receiving emails related to the site {frappe.bold(site.host_name or site.name)}."
+ ),
+ indicator_color="green",
+ )
diff --git a/press/press/doctype/drip_email/patches/migrate_to_product_trial_field.py b/press/press/doctype/drip_email/patches/migrate_to_product_trial_field.py
new file mode 100644
index 00000000000..536a15af107
--- /dev/null
+++ b/press/press/doctype/drip_email/patches/migrate_to_product_trial_field.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2024, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+
+import frappe
+
+
+def execute():
+ frappe.reload_doctype("Drip Email")
+ frappe.db.sql("UPDATE `tabDrip Email` SET product_trial = saas_app")
diff --git a/press/press/doctype/drip_email/patches/set_correct_field_for_html.py b/press/press/doctype/drip_email/patches/set_correct_field_for_html.py
new file mode 100644
index 00000000000..886140965c7
--- /dev/null
+++ b/press/press/doctype/drip_email/patches/set_correct_field_for_html.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2024, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+
+import frappe
+
+
+def execute():
+ frappe.reload_doctype("Drip Email")
+ frappe.db.sql("UPDATE `tabDrip Email` SET message_html = message, content_type = 'HTML'")
diff --git a/press/press/doctype/drip_email/test_drip_email.py b/press/press/doctype/drip_email/test_drip_email.py
index 89ed5253176..8e65351aaf0 100644
--- a/press/press/doctype/drip_email/test_drip_email.py
+++ b/press/press/doctype/drip_email/test_drip_email.py
@@ -1,58 +1,76 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2015, Web Notes and Contributors
# See license.txt
+from __future__ import annotations
-import frappe
-import unittest
+from datetime import date, timedelta
+from typing import TYPE_CHECKING
+from unittest.mock import patch
+import frappe
+from frappe.tests.utils import FrappeTestCase
-from typing import Optional
-from datetime import date, timedelta
-from press.press.doctype.app.test_app import create_test_app
-from press.press.doctype.site.test_site import create_test_site
from press.press.doctype.account_request.test_account_request import (
create_test_account_request,
)
-from press.press.doctype.marketplace_app.test_marketplace_app import (
- create_test_marketplace_app,
-)
+from press.press.doctype.app.test_app import create_test_app
+from press.press.doctype.site.test_site import create_test_site
+from press.press.doctype.site_plan_change.test_site_plan_change import create_test_plan
+from press.saas.doctype.product_trial.test_product_trial import create_test_product_trial
+
+if TYPE_CHECKING:
+ from press.press.doctype.drip_email.drip_email import DripEmail
-def create_test_drip_email(send_after: int, saas_app: Optional[str] = None):
+def create_test_drip_email(
+ send_after: int,
+ product_trial: str | None = None,
+ skip_sites_with_paid_plan: bool = False,
+ email_type: str = "Drip",
+) -> DripEmail:
drip_email = frappe.get_doc(
{
"doctype": "Drip Email",
+ "enabled": 1,
"sender": "test@test.com",
"sender_name": "Test User",
"subject": "Drip Test",
"message": "Drip Top, Drop Top",
+ "email_type": email_type,
"send_after": send_after,
- "saas_app": saas_app,
+ "product_trial": product_trial,
+ "skip_sites_with_paid_plan": skip_sites_with_paid_plan,
}
).insert(ignore_if_duplicate=True)
drip_email.reload()
return drip_email
-class TestDripEmail(unittest.TestCase):
+class TestDripEmail(FrappeTestCase):
+ def setUp(self) -> None:
+ self.trial_site_plan = create_test_plan("Site", is_trial_plan=True)
+ self.paid_site_plan = create_test_plan("Site", is_trial_plan=False)
+
def tearDown(self):
frappe.db.rollback()
def test_correct_sites_are_selected_for_drip_email(self):
- test_app = create_test_app()
- test_marketplace_app = create_test_marketplace_app(test_app.name)
+ test_app = create_test_app("wiki", "Wiki")
+
+ test_product_trial = create_test_product_trial(test_app)
- drip_email = create_test_drip_email(0, saas_app=test_marketplace_app.name)
+ drip_email = create_test_drip_email(0, product_trial=test_product_trial.name)
- site1 = create_test_site("site1", standby_for=test_marketplace_app.name)
- site1.account_request = create_test_account_request(
- "site1", saas=True, saas_app=test_marketplace_app.name
- ).name
+ site1 = create_test_site(
+ "site1",
+ standby_for_product=test_product_trial.name,
+ account_request=create_test_account_request(
+ "site1", saas=True, product_trial=test_product_trial.name
+ ).name,
+ )
site1.save()
- site2 = create_test_site("site2")
- site2.account_request = create_test_account_request("site2").name
+ site2 = create_test_site("site2", account_request=create_test_account_request("site2").name)
site2.save()
create_test_site("site3") # Note: site is not created
@@ -62,8 +80,77 @@ def test_correct_sites_are_selected_for_drip_email(self):
def test_older_site_isnt_selected(self):
drip_email = create_test_drip_email(0)
site = create_test_site("site1")
- site.account_request = create_test_account_request(
- "site1", creation=date.today() - timedelta(1)
- ).name
+ site.account_request = create_test_account_request("site1", creation=date.today() - timedelta(1)).name
site.save()
self.assertNotEqual(drip_email.sites_to_send_drip, [site.name])
+
+ def test_drip_emails_not_sent_to_sites_with_paid_plan_having_special_flag(self):
+ """
+ If you enable `skip_sites_with_paid_plan` flag, drip emails should not be sent to sites with paid plan set
+ No matter whether they have paid for any invoice or not
+ """
+ test_app = create_test_app("wiki", "Wiki")
+ test_product_trial = create_test_product_trial(test_app)
+
+ drip_email = create_test_drip_email(
+ 0, product_trial=test_product_trial.name, skip_sites_with_paid_plan=True
+ )
+
+ site1 = create_test_site(
+ "site1",
+ standby_for_product=test_product_trial.name,
+ account_request=create_test_account_request(
+ "site1", saas=True, product_trial=test_product_trial.name
+ ).name,
+ plan=self.trial_site_plan.name,
+ )
+ site1.save()
+
+ site2 = create_test_site(
+ "site2",
+ standby_for_product=test_product_trial.name,
+ account_request=create_test_account_request(
+ "site2", saas=True, product_trial=test_product_trial.name
+ ).name,
+ plan=self.paid_site_plan.name,
+ )
+ site2.save()
+
+ site3 = create_test_site(
+ "site3",
+ standby_for_product=test_product_trial.name,
+ account_request=create_test_account_request(
+ "site3", saas=True, product_trial=test_product_trial.name
+ ).name,
+ plan=self.trial_site_plan.name,
+ )
+ site3.save()
+
+ self.assertEqual(drip_email.sites_to_send_drip, [site1.name, site3.name])
+
+ def test_welcome_mail_is_sent_for_new_signups(self):
+ from press.press.doctype.drip_email.drip_email import DripEmail, send_welcome_email
+
+ test_app = create_test_app("wiki", "Wiki")
+ test_product_trial = create_test_product_trial(test_app)
+ create_test_drip_email(
+ 0, product_trial=test_product_trial.name, skip_sites_with_paid_plan=True, email_type="Sign Up"
+ )
+
+ site1 = create_test_site(
+ "site1",
+ standby_for_product=test_product_trial.name,
+ account_request=create_test_account_request(
+ "site1", saas=True, product_trial=test_product_trial.name
+ ).name,
+ plan=self.trial_site_plan.name,
+ )
+ site1.save()
+
+ with patch.object(
+ DripEmail,
+ "send",
+ ) as send_welcome_mail:
+ send_welcome_email()
+
+ send_welcome_mail.assert_called()
diff --git a/press/press/doctype/erpnext_app/erpnext_app.py b/press/press/doctype/erpnext_app/erpnext_app.py
index 19a0bf41460..5992547f99c 100644
--- a/press/press/doctype/erpnext_app/erpnext_app.py
+++ b/press/press/doctype/erpnext_app/erpnext_app.py
@@ -8,4 +8,18 @@
class ERPNextApp(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ app: DF.Link
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/erpnext_consultant/erpnext_consultant.py b/press/press/doctype/erpnext_consultant/erpnext_consultant.py
index 58da86a4c1c..dfa5c7bbf0d 100644
--- a/press/press/doctype/erpnext_consultant/erpnext_consultant.py
+++ b/press/press/doctype/erpnext_consultant/erpnext_consultant.py
@@ -9,6 +9,23 @@
class ERPNextConsultant(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.erpnext_consultant_region.erpnext_consultant_region import (
+ ERPNextConsultantRegion,
+ )
+
+ active: DF.Check
+ territories: DF.TableMultiSelect[ERPNextConsultantRegion]
+ user: DF.Link
+ # end: auto-generated types
+
@property
def full_name(self):
return get_fullname(self.name)
diff --git a/press/press/doctype/erpnext_consultant/test_erpnext_consultant.py b/press/press/doctype/erpnext_consultant/test_erpnext_consultant.py
index c559d3102d1..4132b41fc4c 100644
--- a/press/press/doctype/erpnext_consultant/test_erpnext_consultant.py
+++ b/press/press/doctype/erpnext_consultant/test_erpnext_consultant.py
@@ -1,10 +1,9 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe and Contributors
# See license.txt
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestERPNextConsultant(unittest.TestCase):
+class TestERPNextConsultant(FrappeTestCase):
pass
diff --git a/press/press/doctype/erpnext_consultant_region/erpnext_consultant_region.py b/press/press/doctype/erpnext_consultant_region/erpnext_consultant_region.py
index 95102e8d4f4..a82ec9d17f9 100644
--- a/press/press/doctype/erpnext_consultant_region/erpnext_consultant_region.py
+++ b/press/press/doctype/erpnext_consultant_region/erpnext_consultant_region.py
@@ -8,4 +8,18 @@
class ERPNextConsultantRegion(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ territory: DF.Link
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/erpnext_site_settings/erpnext_site_settings.py b/press/press/doctype/erpnext_site_settings/erpnext_site_settings.py
index 51f76099bc9..3701940260d 100644
--- a/press/press/doctype/erpnext_site_settings/erpnext_site_settings.py
+++ b/press/press/doctype/erpnext_site_settings/erpnext_site_settings.py
@@ -1,12 +1,30 @@
# Copyright (c) 2022, Frappe and contributors
# For license information, please see license.txt
-import frappe
import json
+
+import frappe
from frappe.model.document import Document
class ERPNextSiteSettings(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ emails: DF.Int
+ expiry: DF.Date
+ plan: DF.Data | None
+ site: DF.Link
+ space: DF.Int
+ support_expiry: DF.Date | None
+ users: DF.Int
+ # end: auto-generated types
+
def on_update(self):
config_keys = ("users", "expiry", "emails", "space", "current_plan")
values = (self.users, self.expiry, self.emails, self.space, self.plan)
diff --git a/press/press/doctype/frappe_version/frappe_version.json b/press/press/doctype/frappe_version/frappe_version.json
index 6129424adc0..2ef9a3322ee 100644
--- a/press/press/doctype/frappe_version/frappe_version.json
+++ b/press/press/doctype/frappe_version/frappe_version.json
@@ -9,8 +9,11 @@
"field_order": [
"public",
"number",
+ "column_break_ramy",
"default",
- "status"
+ "status",
+ "dependency_table_section",
+ "dependencies"
],
"fields": [
{
@@ -45,6 +48,21 @@
"label": "Status",
"options": "Develop\nBeta\nStable\nEnd of Life",
"reqd": 1
+ },
+ {
+ "fieldname": "column_break_ramy",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "dependency_table_section",
+ "fieldtype": "Section Break",
+ "label": "Dependency Table"
+ },
+ {
+ "fieldname": "dependencies",
+ "fieldtype": "Table",
+ "label": "Version Dependencies",
+ "options": "Frappe Version Dependency"
}
],
"index_web_pages_for_search": 1,
@@ -54,10 +72,11 @@
"link_fieldname": "version"
}
],
- "modified": "2021-01-12 09:55:49.348217",
+ "modified": "2025-08-20 20:19:03.766199",
"modified_by": "Administrator",
"module": "Press",
"name": "Frappe Version",
+ "naming_rule": "Set by user",
"owner": "Administrator",
"permissions": [
{
@@ -71,9 +90,20 @@
"role": "System Manager",
"share": 1,
"write": 1
+ },
+ {
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Marketplace Manager",
+ "share": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
+ "states": [],
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/frappe_version/frappe_version.py b/press/press/doctype/frappe_version/frappe_version.py
index 2be15b696a4..eb3574f8208 100644
--- a/press/press/doctype/frappe_version/frappe_version.py
+++ b/press/press/doctype/frappe_version/frappe_version.py
@@ -1,11 +1,45 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
# import frappe
+import copy
+
from frappe.model.document import Document
+DEFAULT_DEPENDENCIES = [
+ {"dependency": "NVM_VERSION", "version": "0.36.0"},
+ {"dependency": "NODE_VERSION", "version": "18.16.0"},
+ {"dependency": "PYTHON_VERSION", "version": "3.11"},
+ {"dependency": "WKHTMLTOPDF_VERSION", "version": "0.12.5"},
+ {"dependency": "BENCH_VERSION", "version": "5.25.1"},
+]
+
class FrappeVersion(Document):
- pass
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.frappe_version_dependency.frappe_version_dependency import (
+ FrappeVersionDependency,
+ )
+
+ default: DF.Check
+ dependencies: DF.Table[FrappeVersionDependency]
+ number: DF.Int
+ public: DF.Check
+ status: DF.Literal["Develop", "Beta", "Stable", "End of Life"]
+ # end: auto-generated types
+
+ def before_insert(self):
+ self.set_dependencies()
+
+ def set_dependencies(self):
+ dependencies = copy.deepcopy(DEFAULT_DEPENDENCIES)
+ if not hasattr(self, "dependencies") or not self.dependencies:
+ self.extend("dependencies", dependencies)
diff --git a/press/press/doctype/frappe_version/test_frappe_version.py b/press/press/doctype/frappe_version/test_frappe_version.py
index ef9643e040b..53cf88ae615 100644
--- a/press/press/doctype/frappe_version/test_frappe_version.py
+++ b/press/press/doctype/frappe_version/test_frappe_version.py
@@ -1,20 +1,19 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-def create_test_frappe_version():
- """Create test Frappe Version doc"""
- frappe_version = frappe.get_doc(
- {"doctype": "Frappe Version", "name": "Version 13", "number": 13, "status": "Stable"}
- ).insert(ignore_if_duplicate=True)
- frappe_version.reload()
- return frappe_version
-
-
-class TestFrappeVersion(unittest.TestCase):
- pass
+class TestFrappeVersion(FrappeTestCase):
+ def test_create_frappe_version_with_default_dependencies(self):
+ number = 99 # version with no fixtures
+ frappe_version = frappe.get_doc(
+ {
+ "doctype": "Frappe Version",
+ "name": f"Version {number}",
+ "number": number,
+ }
+ ).insert()
+ self.assertEqual(len(frappe_version.dependencies), 5)
diff --git a/press/press/doctype/frappe_version_dependency/__init__.py b/press/press/doctype/frappe_version_dependency/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/frappe_version_dependency/frappe_version_dependency.json b/press/press/doctype/frappe_version_dependency/frappe_version_dependency.json
new file mode 100644
index 00000000000..8798889b4c1
--- /dev/null
+++ b/press/press/doctype/frappe_version_dependency/frappe_version_dependency.json
@@ -0,0 +1,41 @@
+{
+ "actions": [],
+ "creation": "2023-07-13 12:18:06.259601",
+ "default_view": "List",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "dependency",
+ "version"
+ ],
+ "fields": [
+ {
+ "fieldname": "dependency",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Dependency",
+ "reqd": 1
+ },
+ {
+ "fieldname": "version",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Version",
+ "reqd": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2023-07-19 15:45:52.544440",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Frappe Version Dependency",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": [],
+ "track_changes": 1
+}
\ No newline at end of file
diff --git a/press/press/doctype/frappe_version_dependency/frappe_version_dependency.py b/press/press/doctype/frappe_version_dependency/frappe_version_dependency.py
new file mode 100644
index 00000000000..2a5b6639167
--- /dev/null
+++ b/press/press/doctype/frappe_version_dependency/frappe_version_dependency.py
@@ -0,0 +1,24 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class FrappeVersionDependency(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ dependency: DF.Data
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ version: DF.Data
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/github_webhook_log/github_webhook_log.json b/press/press/doctype/github_webhook_log/github_webhook_log.json
index d824e7f5f76..de1034a87fd 100644
--- a/press/press/doctype/github_webhook_log/github_webhook_log.json
+++ b/press/press/doctype/github_webhook_log/github_webhook_log.json
@@ -109,7 +109,7 @@
],
"in_create": 1,
"links": [],
- "modified": "2021-01-18 10:13:12.602898",
+ "modified": "2025-11-22 22:29:45.236252",
"modified_by": "Administrator",
"module": "Press",
"name": "GitHub Webhook Log",
@@ -128,7 +128,8 @@
"write": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
- "track_changes": 1
-}
\ No newline at end of file
+ "states": []
+}
diff --git a/press/press/doctype/github_webhook_log/github_webhook_log.py b/press/press/doctype/github_webhook_log/github_webhook_log.py
index 6d49c6cbbd8..052e7844d06 100644
--- a/press/press/doctype/github_webhook_log/github_webhook_log.py
+++ b/press/press/doctype/github_webhook_log/github_webhook_log.py
@@ -1,34 +1,55 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
-
-import frappe
-import hmac
import hashlib
+import hmac
import json
+from typing import TYPE_CHECKING, Optional
+
+import frappe
from frappe.model.document import Document
from frappe.query_builder import Interval
from frappe.query_builder.functions import Now
+
from press.utils import log_error
+if TYPE_CHECKING:
+ from press.press.doctype.app_source.app_source import AppSource
+
class GitHubWebhookLog(Document):
- def validate(self):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ branch: DF.Data | None
+ event: DF.Data
+ git_reference_type: DF.Literal["tag", "branch"]
+ github_installation_id: DF.Data | None
+ payload: DF.Code
+ repository: DF.Data | None
+ repository_owner: DF.Data | None
+ signature: DF.Data
+ tag: DF.Data | None
+ # end: auto-generated types
+
+ def validate(self): # noqa: C901
secret = frappe.db.get_single_value("Press Settings", "github_webhook_secret")
digest = hmac.HMAC(secret.encode(), self.payload.encode(), hashlib.sha1)
if not hmac.compare_digest(digest.hexdigest(), self.signature):
frappe.throw("Invalid Signature")
- payload = self.parsed_payload
- repository = payload.repository
- installation = payload.installation
- if installation:
- self.github_installation_id = installation["id"]
+ payload = self.get_parsed_payload()
+ self.github_installation_id = payload.get("installation", {}).get("id")
- if payload.repository:
- self.repository = repository["name"]
- self.repository_owner = repository["owner"]["login"]
+ repository_detail = get_repository_details_from_payload(payload)
+ self.repository = repository_detail["name"]
+ self.repository_owner = repository_detail["owner"]
if self.event == "push":
ref_types = {"tags": "tag", "heads": "branch"}
@@ -44,72 +65,185 @@ def validate(self):
self.tag = payload.ref
elif self.git_reference_type == "branch":
self.branch = payload.ref
+ elif self.event == "release":
+ self.tag = payload.release.get("tag_name")
self.payload = json.dumps(payload, indent=4, sort_keys=True)
- def after_insert(self):
- payload = self.parsed_payload
+ def handle_events(self):
if self.event == "push":
- if self.git_reference_type == "branch":
- self.create_app_release(payload)
- elif self.git_reference_type == "tag":
- self.create_app_tag(payload)
+ self.handle_push_event()
+ elif self.event == "installation":
+ self.handle_installation_event()
+ elif self.event == "installation_repositories":
+ self.handle_repository_installation_event()
+ frappe.db.commit()
+
+ def handle_push_event(self):
+ payload = self.get_parsed_payload()
+ if self.git_reference_type == "branch":
+ self.create_app_releases(payload)
+ elif self.git_reference_type == "tag":
+ self.create_app_tag(payload)
+
+ def handle_installation_event(self):
+ payload = self.get_parsed_payload()
+ action = payload.get("action")
+ if action == "created" or action == "unsuspend":
+ self.handle_installation_created(payload)
+ elif action == "deleted" or action == "suspend":
+ self.handle_installation_deletion(payload)
+
+ def handle_repository_installation_event(self):
+ payload = self.get_parsed_payload()
+ if payload["action"] not in ["added", "removed"]:
+ return
+ owner = payload["installation"]["account"]["login"]
+ self.update_installation_ids(owner)
+
+ for repo in payload.get("repositories_removed", []):
+ set_uninstalled(owner, repo["name"])
+
+ def handle_installation_created(self, payload):
+ owner = payload["installation"]["account"]["login"]
+ self.update_installation_ids(owner)
+
+ def handle_installation_deletion(self, payload):
+ owner = payload["installation"]["account"]["login"]
+ repositories = payload.get("repositories", [])
+
+ for repo in repositories:
+ set_uninstalled(owner, repo["name"])
+
+ if len(repositories) == 0:
+ # Set all sources as uninstalled
+ set_uninstalled(owner)
+
+ def update_installation_ids(self, owner: str):
+ for name in get_sources(owner):
+ doc: "AppSource" = frappe.get_doc("App Source", name)
+ if not self.should_update_app_source(doc):
+ continue
+
+ self.update_app_source_installation_id(doc)
+
+ def update_app_source_installation_id(self, doc: "AppSource"):
+ doc.github_installation_id = self.github_installation_id
+ """
+ These two are assumptions, they will be resolved when
+ `doc.create_release` is called.
- @property
- def parsed_payload(self):
+ It is not called here, because it requires polling GitHub
+ which if the repository owner has several apps gets us
+ rate limited.
+ """
+ doc.uninstalled = False
+ doc.last_github_poll_failed = False
+ doc.db_update()
+
+ def should_update_app_source(self, doc: "AppSource"):
+ if doc.uninstalled or doc.last_github_poll_failed:
+ return True
+
+ return doc.github_installation_id != self.github_installation_id
+
+ def get_parsed_payload(self):
return frappe.parse_json(self.payload)
- def create_app_release(self, payload):
- try:
- source = frappe.get_value(
- "App Source",
- {
- "branch": self.branch,
- "repository": self.repository,
- "repository_owner": self.repository_owner,
- },
- ["name", "app"],
- as_dict=True,
- )
- if source:
- commit = payload.head_commit
- if frappe.db.exists(
- "App Release", {"app": source.app, "source": source.name, "hash": commit["id"]}
- ):
- return
- release = frappe.get_doc(
- {
- "doctype": "App Release",
- "app": source.app,
- "source": source.name,
- "hash": commit["id"],
- "message": commit["message"],
- "author": commit["author"]["name"],
- }
- )
- release.insert()
- except Exception:
- log_error("App Release Creation Error", payload=payload)
+ def create_app_releases(self, payload):
+ sources = frappe.db.get_all(
+ "App Source",
+ filters={
+ "branch": self.branch,
+ "repository": self.repository,
+ "repository_owner": self.repository_owner,
+ "enabled": 1,
+ },
+ fields=["name", "app"],
+ )
+
+ commit = payload.get("head_commit", {})
+ if len(sources) == 0 or not commit or not commit.get("id"):
+ return
+
+ for source in sources:
+ try:
+ create_app_release(source.name, source.app, commit)
+ except Exception:
+ log_error("App Release Creation Error", payload=payload, doc=self)
def create_app_tag(self, payload):
+ commit = payload.get("head_commit", {})
+ if not commit or not commit.get("id"):
+ return
+
+ tag = frappe.get_doc(
+ {
+ "doctype": "App Tag",
+ "tag": self.tag,
+ "hash": commit.get("id"),
+ "timestamp": commit.get("timestamp"),
+ "repository": self.repository,
+ "repository_owner": self.repository_owner,
+ "github_installation_id": self.github_installation_id,
+ }
+ )
+
try:
- commit = payload.head_commit
- tag = frappe.get_doc(
- {
- "doctype": "App Tag",
- "tag": self.tag,
- "hash": commit["id"],
- "timestamp": commit["timestamp"],
- "repository": self.repository,
- "repository_owner": self.repository_owner,
- "github_installation_id": self.github_installation_id,
- }
- )
tag.insert()
except Exception:
- log_error("App Tag Creation Error", payload=payload)
+ log_error("App Tag Creation Error", payload=payload, doc=self)
@staticmethod
def clear_old_logs(days=30):
table = frappe.qb.DocType("GitHub Webhook Log")
frappe.db.delete(table, filters=(table.creation < (Now() - Interval(days=days))))
+
+
+def set_uninstalled(owner: str, repository: str | None = None):
+ for name in get_sources(owner, repository):
+ frappe.db.set_value("App Source", name, "uninstalled", True)
+
+
+def get_sources(owner: str, repository: str | None = None) -> "list[str]":
+ filters = {"repository_owner": owner}
+ if repository:
+ filters["repository"] = repository
+
+ return frappe.db.get_all(
+ "App Source",
+ filters=filters,
+ pluck="name",
+ )
+
+
+def get_repository_details_from_payload(payload: dict):
+ r = payload.get("repository", {})
+ repo = r.get("name")
+ owner = r.get("owner", {}).get("login")
+
+ repos = payload.get("repositories_added", [])
+ if not repo and len(repos) == 1:
+ repo = repos[0].get("name")
+
+ if not owner and repos:
+ owner = repos[0].get("full_name", "").split("/")[0] or None
+
+ if not owner:
+ owner = payload.get("installation", {}).get("account", {}).get("login")
+
+ return dict(name=repo, owner=owner)
+
+
+def create_app_release(source: str, app: str, commit: dict):
+ release = frappe.get_doc(
+ {
+ "doctype": "App Release",
+ "app": app,
+ "source": source,
+ "hash": commit.get("id"),
+ "message": commit.get("message", "MESSAGE NOT FOUND"),
+ "author": commit.get("author", {}).get("name", "AUTHOR NOT FOUND"),
+ }
+ )
+ release.insert(ignore_permissions=True)
diff --git a/press/press/doctype/github_webhook_log/test_github_webhook_log.py b/press/press/doctype/github_webhook_log/test_github_webhook_log.py
index 1ef60078905..a44b97f458f 100644
--- a/press/press/doctype/github_webhook_log/test_github_webhook_log.py
+++ b/press/press/doctype/github_webhook_log/test_github_webhook_log.py
@@ -1,9 +1,8 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestGitHubWebhookLog(unittest.TestCase):
+class TestGitHubWebhookLog(FrappeTestCase):
pass
diff --git a/press/press/doctype/incident/__init__.py b/press/press/doctype/incident/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/incident/incident.js b/press/press/doctype/incident/incident.js
new file mode 100644
index 00000000000..093800eb507
--- /dev/null
+++ b/press/press/doctype/incident/incident.js
@@ -0,0 +1,30 @@
+// Copyright (c) 2023, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Incident', {
+ refresh(frm) {
+ [
+ [__('Reboot Database Server'), 'reboot_database_server'],
+ [__('Restart Down Benches'), 'restart_down_benches'],
+ [__('Cancel Stuck Jobs'), 'cancel_stuck_jobs'],
+ [__('Take Grafana screenshots'), 'regather_info_and_screenshots'],
+ ].forEach(([label, method, condition]) => {
+ if (typeof condition === 'undefined' || condition) {
+ frm.add_custom_button(
+ label,
+ () => {
+ frappe.confirm(
+ `Are you sure you want to ${label.toLowerCase()}?`,
+ () => frm.call(method).then((r) => frm.refresh()),
+ );
+ },
+ __('Actions'),
+ );
+ }
+ });
+ frm.call('get_down_site').then((r) => {
+ if (!r.message) return;
+ frm.add_web_link(`https://${r.message}`, __('Visit Down Site'));
+ });
+ },
+});
diff --git a/press/press/doctype/incident/incident.json b/press/press/doctype/incident/incident.json
new file mode 100644
index 00000000000..bc5b7a18822
--- /dev/null
+++ b/press/press/doctype/incident/incident.json
@@ -0,0 +1,251 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2023-10-16 18:45:05.744563",
+ "default_view": "List",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "alerts_tab",
+ "phone_call",
+ "alert",
+ "status",
+ "type",
+ "subtype",
+ "acknowledged_by",
+ "column_break_smnd",
+ "server",
+ "resource_type",
+ "resource",
+ "cluster",
+ "resolved_by",
+ "investigation",
+ "section_break_kjey",
+ "subject",
+ "description",
+ "preliminary_investigation_section",
+ "likely_cause",
+ "column_break_jrzi",
+ "corrective_suggestions",
+ "preventive_suggestions",
+ "section_break_bjjy",
+ "updates",
+ "called_customer",
+ "updates_tab",
+ "alerts",
+ "section_break_aevb",
+ "column_break_rbwa",
+ "route",
+ "sms_sent",
+ "show_in_website"
+ ],
+ "fields": [
+ {
+ "fieldname": "server",
+ "fieldtype": "Link",
+ "label": "Server",
+ "options": "Server"
+ },
+ {
+ "fieldname": "cluster",
+ "fieldtype": "Link",
+ "label": "Cluster",
+ "options": "Cluster"
+ },
+ {
+ "fieldname": "column_break_smnd",
+ "fieldtype": "Column Break",
+ "read_only": 1
+ },
+ {
+ "default": "Validating",
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "label": "Status",
+ "options": "Validating\nConfirmed\nAcknowledged\nInvestigating\nResolved\nAuto-Resolved\nPress-Resolved"
+ },
+ {
+ "default": "Database Down",
+ "fieldname": "type",
+ "fieldtype": "Select",
+ "label": "Type",
+ "options": "Database Down\nServer Down\nProxy Down"
+ },
+ {
+ "fieldname": "section_break_bjjy",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "alerts",
+ "fieldtype": "Table",
+ "label": "Alerts",
+ "options": "Incident Alerts"
+ },
+ {
+ "default": "0",
+ "fieldname": "sms_sent",
+ "fieldtype": "Check",
+ "label": "SMS Sent"
+ },
+ {
+ "fieldname": "alerts_tab",
+ "fieldtype": "Tab Break",
+ "label": "Overview"
+ },
+ {
+ "fieldname": "updates_tab",
+ "fieldtype": "Tab Break",
+ "label": "Alerts"
+ },
+ {
+ "fieldname": "acknowledged_by",
+ "fieldtype": "Link",
+ "label": "Acknowledged By",
+ "mandatory_depends_on": "eval: doc.status==\"Acknowledged\";",
+ "options": "User"
+ },
+ {
+ "fieldname": "column_break_rbwa",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "section_break_aevb",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "updates",
+ "fieldtype": "Table",
+ "label": "Updates",
+ "options": "Incident Updates"
+ },
+ {
+ "fieldname": "route",
+ "fieldtype": "Data",
+ "label": "Route"
+ },
+ {
+ "default": "0",
+ "fieldname": "show_in_website",
+ "fieldtype": "Check",
+ "label": "Show in Website"
+ },
+ {
+ "fieldname": "section_break_kjey",
+ "fieldtype": "Section Break",
+ "label": "Details"
+ },
+ {
+ "fieldname": "subject",
+ "fieldtype": "Data",
+ "label": "Subject"
+ },
+ {
+ "fieldname": "description",
+ "fieldtype": "Text Editor",
+ "label": "Description"
+ },
+ {
+ "fieldname": "resolved_by",
+ "fieldtype": "Link",
+ "label": "Resolved By",
+ "options": "User"
+ },
+ {
+ "default": "1",
+ "fieldname": "phone_call",
+ "fieldtype": "Check",
+ "label": "Phone Call"
+ },
+ {
+ "fieldname": "alert",
+ "fieldtype": "Link",
+ "label": "Alert",
+ "options": "Prometheus Alert Rule"
+ },
+ {
+ "fieldname": "resource_type",
+ "fieldtype": "Link",
+ "label": "Resource Type",
+ "options": "DocType"
+ },
+ {
+ "fieldname": "resource",
+ "fieldtype": "Dynamic Link",
+ "label": "Resource",
+ "options": "resource_type"
+ },
+ {
+ "fieldname": "subtype",
+ "fieldtype": "Select",
+ "label": "Subtype",
+ "options": "High CPU: user\nHigh CPU: iowait\nDisk full"
+ },
+ {
+ "fieldname": "preliminary_investigation_section",
+ "fieldtype": "Section Break",
+ "label": "Preliminary Investigation"
+ },
+ {
+ "fieldname": "likely_cause",
+ "fieldtype": "Text",
+ "label": "Likely Causes"
+ },
+ {
+ "fieldname": "column_break_jrzi",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "preventive_suggestions",
+ "fieldtype": "Table",
+ "label": "Preventive Suggestions",
+ "options": "Incident Suggestion"
+ },
+ {
+ "fieldname": "corrective_suggestions",
+ "fieldtype": "Table",
+ "label": "Corrective Suggestions",
+ "options": "Incident Suggestion"
+ },
+ {
+ "fieldname": "investigation",
+ "fieldtype": "Link",
+ "label": "Investigation",
+ "options": "Incident Investigator"
+ },
+ {
+ "default": "0",
+ "fieldname": "called_customer",
+ "fieldtype": "Check",
+ "label": "Called Customer"
+ }
+ ],
+ "has_web_view": 1,
+ "index_web_pages_for_search": 1,
+ "is_published_field": "show_in_website",
+ "links": [],
+ "modified": "2025-09-30 17:33:41.729833",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Incident",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "route": "incidents",
+ "row_format": "Dynamic",
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/incident/incident.py b/press/press/doctype/incident/incident.py
new file mode 100644
index 00000000000..de5b797ca9f
--- /dev/null
+++ b/press/press/doctype/incident/incident.py
@@ -0,0 +1,913 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+import urllib.parse
+from base64 import b64encode
+from datetime import timedelta
+from functools import cached_property
+from typing import TYPE_CHECKING
+
+import frappe
+import requests
+from frappe.types.DF import Phone
+from frappe.utils import cint
+from frappe.utils.background_jobs import enqueue_doc
+from frappe.utils.synchronization import filelock
+from frappe.website.website_generator import WebsiteGenerator
+from playwright.sync_api import Page, sync_playwright
+from tenacity import RetryError, retry, stop_after_attempt, wait_fixed
+from tenacity.retry import retry_if_not_result
+from twilio.base.exceptions import TwilioRestException
+
+from press.api.server import prometheus_query
+from press.press.doctype.agent_job.agent_job import AgentJob
+from press.press.doctype.bench.bench import Bench
+from press.press.doctype.communication_info.communication_info import get_communication_info
+from press.press.doctype.database_server.database_server import DatabaseServer
+from press.press.doctype.server.server import MARIADB_DATA_MNT_POINT
+from press.press.doctype.telegram_message.telegram_message import TelegramMessage
+from press.utils import log_error
+
+if TYPE_CHECKING:
+ from frappe.types import DF
+ from twilio.rest.api.v2010.account.call import CallInstance
+
+ from press.press.doctype.alertmanager_webhook_log.alertmanager_webhook_log import AlertmanagerWebhookLog
+ from press.press.doctype.incident_settings.incident_settings import IncidentSettings
+ from press.press.doctype.incident_settings_self_hosted_user.incident_settings_self_hosted_user import (
+ IncidentSettingsSelfHostedUser,
+ )
+ from press.press.doctype.incident_settings_user.incident_settings_user import (
+ IncidentSettingsUser,
+ )
+ from press.press.doctype.monitor_server.monitor_server import MonitorServer
+ from press.press.doctype.press_settings.press_settings import PressSettings
+ from press.press.doctype.server.server import Server
+
+INCIDENT_ALERT = "Sites Down" # TODO: make it a field or child table somewhere #
+INCIDENT_SCOPE = (
+ "server" # can be bench, cluster, server, etc. Not site, minor code changes required for that
+)
+
+MIN_FIRING_INSTANCES = 15 # minimum instances that should have fired for an incident to be valid
+MIN_FIRING_INSTANCES_FRACTION = (
+ 0.4 # 40%; minimum percentage of instances that should have fired for an incident to be valid
+)
+
+DAY_HOURS = range(9, 18)
+CONFIRMATION_THRESHOLD_SECONDS_DAY = 5 * 60 # 5 minutes;time after which humans are called
+CONFIRMATION_THRESHOLD_SECONDS_NIGHT = (
+ 10 * 60 # 10 minutes; time after which humans are called
+)
+CALL_THRESHOLD_SECONDS_DAY = 0 # 0 minutes;time after which humans are called
+CALL_THRESHOLD_SECONDS_NIGHT = (
+ 15 * 60 # 15 minutes; time after confirmation after which humans are called
+)
+CALL_REPEAT_INTERVAL_DAY = 15 * 60
+CALL_REPEAT_INTERVAL_NIGHT = 20 * 60
+
+
+class Incident(WebsiteGenerator):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.incident_alerts.incident_alerts import IncidentAlerts
+ from press.press.doctype.incident_suggestion.incident_suggestion import IncidentSuggestion
+ from press.press.doctype.incident_updates.incident_updates import IncidentUpdates
+
+ acknowledged_by: DF.Link | None
+ alert: DF.Link | None
+ alerts: DF.Table[IncidentAlerts]
+ called_customer: DF.Check
+ cluster: DF.Link | None
+ corrective_suggestions: DF.Table[IncidentSuggestion]
+ description: DF.TextEditor | None
+ investigation: DF.Link | None
+ likely_cause: DF.Text | None
+ phone_call: DF.Check
+ preventive_suggestions: DF.Table[IncidentSuggestion]
+ resolved_by: DF.Link | None
+ resource: DF.DynamicLink | None
+ resource_type: DF.Link | None
+ route: DF.Data | None
+ server: DF.Link | None
+ show_in_website: DF.Check
+ sms_sent: DF.Check
+ status: DF.Literal[
+ "Validating",
+ "Confirmed",
+ "Acknowledged",
+ "Investigating",
+ "Resolved",
+ "Auto-Resolved",
+ "Press-Resolved",
+ ]
+ subject: DF.Data | None
+ subtype: DF.Literal["High CPU: user", "High CPU: iowait", "Disk full"]
+ type: DF.Literal["Database Down", "Server Down", "Proxy Down"]
+ updates: DF.Table[IncidentUpdates]
+ # end: auto-generated types
+
+ def validate(self):
+ if not hasattr(self, "phone_call") and self.global_phone_call_enabled:
+ self.phone_call = True
+
+ @property
+ def global_phone_call_enabled(self) -> bool:
+ return bool(frappe.db.get_single_value("Incident Settings", "phone_call_alerts", cache=True))
+
+ @property
+ def global_email_alerts_enabled(self) -> bool:
+ return bool(frappe.db.get_single_value("Incident Settings", "email_alerts", cache=True))
+
+ def after_insert(self):
+ """
+ Start investigating the incident since we have already waited 5m before creating it
+ send sms and email notifications
+ """
+ try:
+ incident_investigator = frappe.get_doc(
+ {"doctype": "Incident Investigator", "incident": self.name, "server": self.server}
+ )
+ incident_investigator.insert(ignore_permissions=True)
+ self.investigation = incident_investigator.name
+ except frappe.ValidationError:
+ # Investigator in cool off period
+ pass
+ self.send_sms_via_twilio()
+ self.send_email_notification()
+ self.identify_affected_resource()
+
+ def on_update(self):
+ if self.has_value_changed("status"):
+ self.send_email_notification()
+ if self.status == "Confirmed" and not self.called_customer:
+ self.call_customers()
+
+ def vcpu(self, server_type, server_name):
+ vm_name = str(frappe.db.get_value(server_type, server_name, "virtual_machine"))
+ return int(
+ frappe.db.get_value("Virtual Machine", vm_name, "vcpu") or 16 # type: ignore
+ ) # 16 as DO and Scaleway servers have high CPU; Add a CPU field everywhere later
+
+ @cached_property
+ def database_server(self):
+ return str(frappe.db.get_value("Server", self.server, "database_server"))
+
+ @cached_property
+ def proxy_server(self):
+ return str(frappe.db.get_value("Server", self.server, "proxy_server"))
+
+ def get_load(self, name) -> float:
+ timespan = get_confirmation_threshold_duration()
+ load = prometheus_query(
+ f"""avg_over_time(node_load5{{instance="{name}", job="node"}}[{timespan}s])""",
+ lambda x: x,
+ "Asia/Kolkata",
+ timespan,
+ timespan + 1,
+ )["datasets"]
+ if load:
+ ret = load[0]["values"][-1]
+ else:
+ ret = -1 # no response
+ self.add_description(f"{name} load avg(5m): {ret if ret != -1 else 'No data'}")
+ return ret
+
+ def check_high_load(self, resource_type: str, resource: str):
+ load = self.get_load(resource)
+ if load < 0: # no response, likely down
+ return resource_type, resource
+ if load > 3 * self.vcpu(resource_type, resource):
+ return resource_type, resource
+ return False, False
+
+ def get_sites_down_list(self):
+ return "\n".join(self.sites_down)
+
+ def identify_affected_resource(self):
+ """
+ Identify the affected resource and set the resource field
+ """
+ self.add_description(f"{len(self.sites_down)} / {self.total_instances} sites down:")
+ self.add_description(self.get_sites_down_list())
+
+ for resource_type, resource in [
+ ("Database Server", self.database_server),
+ ("Server", self.server),
+ ("Proxy Server", self.proxy_server),
+ ]:
+ if self.check_high_load(resource_type, resource) != (False, False):
+ self.resource_type = resource_type
+ self.resource = resource
+ return
+
+ def confirm(self):
+ self.status = "Confirmed"
+ self.identify_affected_resource() # assume 1 resource; Occam's razor
+ self.identify_problem()
+ self.take_grafana_screenshots()
+ if self.down_bench:
+ self.comment_bench_web_err_log(self.down_bench)
+ self.save()
+
+ def get_last_n_lines_of_log(self, log: str, n: int = 100) -> str:
+ # get last n lines of log
+ lines = log.splitlines()
+ return "\n".join(lines[-n:]) if len(lines) > n else log
+
+ def comment_bench_web_err_log(self, bench_name: str):
+ # get last 100 lines of web.error.log from the bench
+ bench: Bench = Bench("Bench", bench_name)
+ try:
+ log = bench.get_server_log("web.error.log")["web.error.log"]
+ except Exception as e:
+ log = f"Error fetching web.error.log: {e!s}"
+
+ self.add_comment(
+ "Comment",
+ f"""Last 100 lines of web.error.log for bench {bench_name}:
+
+{self.get_last_n_lines_of_log(log)}
+
+""",
+ )
+
+ @frappe.whitelist()
+ def regather_info_and_screenshots(self):
+ self.identify_affected_resource()
+ self.identify_problem()
+ self.take_grafana_screenshots()
+
+ def get_cpu_state(self, resource: str) -> tuple[str, float]:
+ """
+ Returns the prominent CPU state and its percentage
+ """
+ timespan = get_confirmation_threshold_duration()
+ cpu_info = prometheus_query(
+ f"""avg by (mode)(rate(node_cpu_seconds_total{{instance="{resource}", job="node"}}[{timespan}s])) * 100""",
+ lambda x: x["mode"],
+ "Asia/Kolkata",
+ timespan,
+ timespan + 1,
+ )["datasets"]
+ mode_cpus: dict[str, int] = {x["name"]: x["values"][-1] for x in cpu_info} or {
+ "user": -1,
+ "idle": -1,
+ "softirq": -1,
+ "iowait": -1,
+ } # no info;
+ max_mode: str = max(mode_cpus, key=lambda k: mode_cpus[k])
+ max_cpu = mode_cpus[max_mode]
+ self.add_description(f"CPU Usage: {max_mode} {max_cpu if max_cpu > 0 else 'No data'}")
+ return max_mode, mode_cpus[max_mode]
+
+ def add_description(self, description: str):
+ if not self.description:
+ self.description = ""
+ self.description += "" + description + "
"
+
+ def add_corrective_suggestion(self, suggestion):
+ self.append(
+ "corrective_suggestions",
+ {
+ "suggestion": suggestion,
+ },
+ )
+
+ def add_preventive_suggestion(self, suggestion):
+ self.append(
+ "preventive_suggestions",
+ {
+ "suggestion": suggestion,
+ },
+ )
+
+ def update_user_db_issue(self):
+ self.subtype = "High CPU: user"
+ self.likely_causes = "Likely slow queries or many queries."
+ self.add_corrective_suggestion("Kill long running queries")
+ self.add_preventive_suggestion("Contact user to reduce queries")
+
+ def update_high_io_db_issue(self):
+ self.subtype = "High CPU: iowait"
+ self.likely_causes = "Not enough memory"
+ self.add_corrective_suggestion("Reboot Server")
+ self.add_preventive_suggestion("Upgrade database server for more memory")
+
+ def categorize_db_cpu_issues(self, cpu_state):
+ self.type = "Database Down"
+ if cpu_state == "user":
+ self.update_user_db_issue()
+ elif cpu_state == "iowait":
+ self.update_high_io_db_issue()
+
+ def update_user_server_issue(self):
+ pass
+
+ def update_high_io_server_issue(self):
+ pass
+
+ def categorize_server_cpu_issues(self, cpu_state):
+ self.type = "Server Down"
+ if cpu_state == "user":
+ self.update_user_server_issue()
+ elif cpu_state == "iowait":
+ self.update_high_io_server_issue()
+
+ def ping_sample_site(self):
+ if not (site := self.get_down_site()):
+ return None
+ try:
+ ret = requests.get(f"https://{site}/api/method/ping", timeout=10)
+ except requests.RequestException as e:
+ self.add_description(f"Error pinging sample site {site}: {e!s}")
+ return None
+ else:
+ self.add_description(f"Ping response for sample site {site}: {ret.status_code} {ret.reason}")
+ return ret.status_code
+
+ def categorize_disk_full_issue(self):
+ self.likely_cause = "Disk is full"
+ self.add_corrective_suggestion("Add more storage")
+ self.add_preventive_suggestion("Enable automatic addition of storage")
+
+ def identify_problem(self):
+ pong = self.ping_sample_site()
+ if not self.resource and pong and pong == 500:
+ db: DatabaseServer = frappe.get_doc("Database Server", self.database_server)
+ if db.is_disk_full(MARIADB_DATA_MNT_POINT):
+ self.resource_type = "Database Server"
+ self.resource = self.database_server
+ self.type = "Database Down"
+ self.subtype = "Disk full"
+ self.categorize_disk_full_issue()
+ self.send_disk_full_mail()
+ return
+ # TODO: Try more random shit if resource isn't identified
+ # Eg: Check mysql up/ docker up/ container up
+ # Ping site for error code to guess more accurately
+ # 500 would mean mysql down or bug in app/config
+ # 502 would mean server/bench down
+ # 504 overloaded workers
+
+ state, percent = self.get_cpu_state(self.resource)
+ if state == "idle" or percent < 70:
+ return
+
+ if self.resource_type == "Database Server":
+ self.categorize_db_cpu_issues(state)
+ elif self.resource_type == "Server":
+ self.categorize_server_cpu_issues(state)
+
+ # TODO: categorize proxy issues #
+
+ @property
+ def other_resource(self):
+ if self.resource_type == "Database Server":
+ return str(self.server)
+ if self.resource_type == "Server":
+ return str(frappe.db.get_value("Server", self.resource, "database_server"))
+ return None
+
+ def add_node_exporter_screenshot(self, page: Page, instance: str | None):
+ if not instance:
+ return
+
+ page.goto(
+ f"https://{self.monitor_server.name}{self.monitor_server.node_exporter_dashboard_path}&refresh=5m&var-DS_PROMETHEUS=Prometheus&var-job=node&var-node={instance}&from=now-1h&to=now"
+ )
+ page.wait_for_load_state("networkidle")
+
+ image = b64encode(page.screenshot()).decode("ascii")
+ self.add_description(f' ')
+
+ @cached_property
+ def monitor_server(self) -> MonitorServer:
+ press_settings: PressSettings = frappe.get_cached_doc("Press Settings")
+ if not (monitor_url := press_settings.monitor_server):
+ frappe.throw("Monitor Server not set in Press Settings")
+ return frappe.get_cached_doc("Monitor Server", monitor_url)
+
+ def get_grafana_auth_header(self):
+ username = str(self.monitor_server.grafana_username)
+ password = str(self.monitor_server.get_password("grafana_password"))
+ token = b64encode(f"{username}:{password}".encode()).decode("ascii")
+ return f"Basic {token}"
+
+ @filelock("grafana_screenshots") # prevent 100 chromes from opening
+ def take_grafana_screenshots(self):
+ if not frappe.db.get_single_value("Incident Settings", "grafana_screenshots"):
+ return
+ with sync_playwright() as p:
+ browser = p.chromium.launch(headless=True, channel="chromium")
+ page = browser.new_page(locale="en-IN", timezone_id="Asia/Kolkata")
+ page.set_extra_http_headers({"Authorization": self.get_grafana_auth_header()})
+
+ self.add_node_exporter_screenshot(page, self.resource or self.server)
+ self.add_node_exporter_screenshot(page, self.other_resource)
+
+ self.save()
+
+ @frappe.whitelist()
+ def reboot_database_server(self):
+ db_server_name = frappe.db.get_value("Server", self.server, "database_server")
+ if not db_server_name:
+ frappe.throw("No database server found for this server")
+ db_server = DatabaseServer("Database Server", db_server_name)
+ try:
+ db_server.reboot_with_serial_console()
+ except NotImplementedError:
+ db_server.reboot()
+ self.add_likely_cause("Rebooted database server.")
+ self.save()
+
+ @frappe.whitelist()
+ def cancel_stuck_jobs(self):
+ """
+ During db reboot/upgrade some jobs tend to get stuck. This is a hack to cancel those jobs
+ """
+ stuck_jobs = frappe.get_all(
+ "Agent Job",
+ {
+ "status": "Running",
+ INCIDENT_SCOPE: self.incident_scope,
+ "job_type": (
+ "in",
+ ["Fetch Database Table Schema", "Backup Site", "Restore Site"],
+ ), # to be safe
+ },
+ ["name", "job_type"],
+ limit=2,
+ ) # only 2 workers
+ for stuck_job in stuck_jobs:
+ job = AgentJob("Agent Job", stuck_job.name)
+ job.cancel_job()
+ self.add_likely_cause(f"Cancelled stuck {stuck_job.job_type} job {stuck_job.name}")
+ self.save()
+
+ def add_likely_cause(self, cause: str):
+ self.likely_cause = self.likely_cause + cause + "\n" if self.likely_cause else cause + "\n"
+
+ @cached_property
+ def down_bench(self):
+ down_benches = self.monitor_server.get_benches_down_for_server(str(self.server))
+ return down_benches[0] if down_benches else None
+
+ @frappe.whitelist()
+ def restart_down_benches(self):
+ """
+ Restart all benches on the server that are down
+ """
+ down_benches = self.monitor_server.get_benches_down_for_server(str(self.server))
+ if not down_benches:
+ frappe.throw("No down benches found for this server")
+ return
+ for bench_name in down_benches:
+ bench: Bench = Bench("Bench", bench_name)
+ bench.restart()
+ self.add_likely_cause(f"Restarted bench {bench_name}")
+ self.save()
+
+ def call_humans(self):
+ enqueue_doc(
+ self.doctype,
+ self.name,
+ "_call_humans",
+ queue="default",
+ timeout=1800,
+ enqueue_after_commit=True,
+ at_front=True,
+ job_id=f"call_humans:{self.name}",
+ deduplicate=True,
+ )
+
+ def get_humans(
+ self,
+ ):
+ """
+ Returns a list of users who are in the incident team
+ """
+ incident_settings: IncidentSettings = frappe.get_cached_doc("Incident Settings") # type: ignore
+ users = incident_settings.users
+ if frappe.db.exists("Self Hosted Server", {"server": self.server}) or frappe.db.get_value(
+ "Server", self.server, "is_self_hosted"
+ ):
+ users = incident_settings.self_hosted_users
+ ret: DF.Table = users
+ if self.status == "Acknowledged": # repeat the acknowledged user to be the first
+ for user in users:
+ if user.user == self.acknowledged_by:
+ ret.remove(user)
+ ret.insert(0, user)
+ return ret
+
+ @property
+ def twilio_phone_number(self):
+ press_settings: PressSettings = frappe.get_cached_doc("Press Settings")
+ return Phone(press_settings.twilio_phone_number)
+
+ @property
+ def twilio_client(self):
+ press_settings: PressSettings = frappe.get_cached_doc("Press Settings")
+ try:
+ return press_settings.twilio_client
+ except Exception:
+ log_error("Twilio Client not configured in Press Settings")
+ if not frappe.flags.in_test:
+ frappe.db.commit()
+ raise
+
+ @retry(
+ retry=retry_if_not_result(
+ lambda result: result in ["canceled", "completed", "failed", "busy", "no-answer", "in-progress"]
+ ),
+ wait=wait_fixed(1),
+ stop=stop_after_attempt(30),
+ )
+ def wait_for_pickup(self, call: CallInstance):
+ return call.fetch().status # will eventually be no-answer
+
+ def notify_unable_to_reach_twilio(self):
+ TelegramMessage.enqueue(
+ f"""Unable to reach Twilio for Incident in {self.server}
+
+Likely due to insufficient balance or incorrect credentials""",
+ reraise=True,
+ )
+
+ def call_human(self, human: IncidentSettingsUser | IncidentSettingsSelfHostedUser):
+ try:
+ return self.twilio_client.calls.create(
+ url="http://demo.twilio.com/docs/voice.xml",
+ to=human.phone,
+ from_=self.twilio_phone_number,
+ )
+ except TwilioRestException:
+ self.notify_unable_to_reach_twilio()
+ raise
+
+ def _call_humans(self):
+ if not self.phone_call or not self.global_phone_call_enabled:
+ return
+ if (
+ ignore_till := frappe.db.get_value("Server", self.server, "ignore_incidents_till")
+ ) and ignore_till > frappe.utils.now_datetime():
+ return
+ for human in self.get_humans():
+ if not (call := self.call_human(human)):
+ return # can't twilio
+ acknowledged = False
+ status = str(call.status)
+ try:
+ status = str(self.wait_for_pickup(call))
+ except RetryError:
+ status = "timeout" # not Twilio's status; mostly translates to no-answer
+ else:
+ if status in ["in-progress", "completed"]: # call was picked up
+ acknowledged = True
+ self.status = "Acknowledged"
+ self.acknowledged_by = human.user
+ break
+ finally:
+ self.add_acknowledgment_update(human, acknowledged=acknowledged, call_status=status)
+
+ def send_sms_via_twilio(self):
+ """
+ Sends an SMS to the members in the Incident team
+ Uses Twilio for sending the SMS.
+ Fetches all the Numbers and makes it a generator object for memory efficiency and
+ Runs them through a loop since Twilio Requires a single API call for
+ Sending one SMS to one number
+ Ref: https://support.twilio.com/hc/en-us/articles/223181548-Can-I-set-up-one-API-call-to-send-messages-to-a-list-of-people-
+ """
+ if (
+ ignore_since := frappe.db.get_value("Server", self.server, "ignore_incidents_till")
+ ) and ignore_since < frappe.utils.now_datetime():
+ return
+ domain = frappe.db.get_value("Press Settings", None, "domain")
+ incident_link = f"https://{domain}{self.get_url()}"
+ message = f"Incident on server: {self.server}\n\nURL: {incident_link}\n\nID: {self.name}"
+ for human in self.get_humans():
+ self.twilio_client.messages.create(to=human.phone, from_=self.twilio_phone_number, body=message)
+ self.reload() # In case the phone call status is modified by the investigator before the sms is sent
+ self.sms_sent = 1
+ self.save()
+
+ def send_disk_full_mail(self):
+ title = str(frappe.db.get_value("Server", self.server, "title"))
+ if self.resource_type:
+ title = str(frappe.db.get_value(self.resource_type, self.resource, "title"))
+ subject = f"Disk Full Incident on {title}"
+ message = f"""
+ Dear User,
+ You are receiving this mail as the storage has been filled up on your server: {self.resource} and you have automatic addition of storage disabled.
+ Please enable automatic addition of storage or add more storage manually to resolve the issue.
+ Best regards, Frappe Cloud Team
+ """
+ self.send_mail(subject, message)
+
+ def send_mail(self, subject: str, message: str):
+ try:
+ frappe.sendmail(
+ recipients=get_communication_info("Email", "Server Activity", "Server", self.server),
+ subject=subject,
+ reference_doctype=self.doctype,
+ reference_name=self.name,
+ template="incident",
+ args={
+ "message": message,
+ "link": f"dashboard/servers/{self.server}/analytics/",
+ },
+ now=True,
+ )
+
+ except Exception:
+ # Swallow the exception to avoid breaking the Incident creation
+ log_error("Incident Notification Email Failed")
+
+ def send_email_notification(self):
+ if not self.global_email_alerts_enabled:
+ return
+
+ if self.status == "Investigating":
+ return
+
+ # Notifications are only meaningful for incidents that are linked to a server and a team
+ team = frappe.db.get_value("Server", self.server, "team")
+ if (not self.server) or (not team):
+ return
+ subject = self.get_email_subject()
+ message = self.get_email_message()
+ self.send_mail(subject, message)
+
+ def get_email_subject(self):
+ title = str(frappe.db.get_value("Server", self.server, "title"))
+ name = title.removesuffix(" - Application") or self.server
+ return f"Incident on {name} - {self.alert}"
+
+ def get_email_message(self):
+ acknowledged_by = "An engineer"
+ if self.acknowledged_by:
+ acknowledged_by = frappe.db.get_value("User", self.acknowledged_by, "first_name")
+ return {
+ "Validating": "We are noticing some issues with sites on your server. We are giving it a few minutes to confirm before escalating this incident to our engineers.",
+ "Auto-Resolved": "Your sites are now up! This incident has resolved on its own. We will keep monitoring your sites for any further issues.",
+ "Confirmed": "We are still noticing issues with your sites. We are escalating this incident to our engineers.",
+ "Acknowledged": f"{acknowledged_by} from our team has acknowledged the incident and is actively investigating. Please allow them some time to diagnose and address the issue.",
+ "Resolved": f"Your sites are now up! {acknowledged_by} has resolved this incident. We will keep monitoring your sites for any further issues",
+ }[self.status]
+
+ def call_customers(self):
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_call_customers",
+ queue="default",
+ enqueue_after_commit=True,
+ at_front=True,
+ job_id=f"incident||call_customers||{self.name}",
+ deduplicate=True,
+ )
+
+ def _call_customers(self):
+ if not self.phone_call:
+ return
+
+ phone_nos = get_communication_info("Phone Call", "Incident", "Server", self.server)
+ if not phone_nos:
+ return
+
+ for phone_no in phone_nos:
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_call_customer",
+ queue="default",
+ timeout=1800,
+ enqueue_after_commit=True,
+ phone_no=phone_no,
+ )
+
+ self.add_comment("Comment", f"Called customers at {', '.join(phone_nos)}")
+
+ self.called_customer = 1
+ self.save()
+
+ def _call_customer(self, phone_no: str):
+ twilio_client = self.twilio_client
+ if not twilio_client:
+ return
+ from_phone = self.twilio_phone_number
+ server_title = frappe.db.get_value("Server", self.server, "title") or self.server
+ if not from_phone or not server_title:
+ return
+
+ server_title_encoded = urllib.parse.quote(server_title)
+
+ press_public_base_url = frappe.utils.get_url()
+ twilio_client.calls.create(
+ url=f"{press_public_base_url}/api/method/press.api.message.confirmed_incident?server_title={server_title_encoded}",
+ to=phone_no,
+ from_=from_phone,
+ )
+
+ def add_acknowledgment_update(
+ self,
+ human: IncidentSettingsUser | IncidentSettingsSelfHostedUser,
+ call_status: str | None = None,
+ acknowledged=False,
+ ):
+ """
+ Adds a new update to the Incident Document
+ """
+ if acknowledged:
+ update_note = f"Acknowledged by {human.user}"
+ else:
+ update_note = f"Acknowledgement failed for {human.user}"
+ if call_status:
+ update_note += f" with call status {call_status}"
+ self.append(
+ "updates",
+ {
+ "update_note": update_note,
+ "update_time": frappe.utils.now(),
+ },
+ )
+ self.save()
+
+ def set_acknowledgement(self, acknowledged_by):
+ """
+ Sets the Incident status to Acknowledged
+ """
+ self.status = "Acknowledged"
+ self.acknowledged_by = acknowledged_by
+ self.save()
+
+ @property
+ def incident_scope(self):
+ return getattr(self, INCIDENT_SCOPE)
+
+ @property
+ def total_instances(self) -> int:
+ return frappe.db.count(
+ "Site",
+ {"status": "Active", INCIDENT_SCOPE: self.incident_scope},
+ )
+
+ def check_resolved(self):
+ try:
+ last_resolved: AlertmanagerWebhookLog = frappe.get_last_doc(
+ "Alertmanager Webhook Log",
+ {
+ "status": "Resolved",
+ "group_key": ("like", f"%{self.incident_scope}%"),
+ "alert": self.alert,
+ },
+ )
+ except frappe.DoesNotExistError:
+ return
+ else:
+ if not last_resolved.is_enough_firing:
+ self.create_log_for_server(is_resolved=True)
+ self.resolve()
+
+ def resolve(self):
+ if self.status == "Validating":
+ self.status = "Auto-Resolved"
+ else:
+ self.status = "Resolved"
+ self.save()
+
+ def create_log_for_server(self, is_resolved: bool = False):
+ """We will create a incident log on the server activity for confirmed incidents and their resolution"""
+ try:
+ incidence_server: Server | DatabaseServer = frappe.get_cached_doc(
+ self.resource_type, self.resource
+ )
+ except Exception:
+ if not self.server:
+ return
+ incidence_server = frappe.get_cached_doc("Server", self.server)
+
+ incidence_server.create_log(
+ "Incident",
+ f"{self.alert} resolved" if is_resolved else f"{self.alert} reported",
+ )
+
+ @property
+ def time_to_call_for_help(self) -> bool:
+ return self.status == "Confirmed" and frappe.utils.now_datetime() - self.creation > timedelta(
+ seconds=get_confirmation_threshold_duration() + get_call_threshold_duration()
+ )
+
+ @property
+ def time_to_call_for_help_again(self) -> bool:
+ return self.status == "Acknowledged" and frappe.utils.now_datetime() - self.modified > timedelta(
+ seconds=get_call_repeat_interval()
+ )
+
+ @cached_property
+ def sites_down(self) -> list[str]:
+ return self.monitor_server.get_sites_down_for_server(str(self.server))
+
+ @frappe.whitelist()
+ def get_down_site(self):
+ return self.sites_down[0] if self.sites_down else None
+
+
+def get_confirmation_threshold_duration():
+ if frappe.utils.now_datetime().hour in DAY_HOURS:
+ return (
+ cint(frappe.db.get_value("Incident Settings", None, "confirmation_threshold_day"))
+ or CONFIRMATION_THRESHOLD_SECONDS_DAY
+ )
+ return (
+ cint(frappe.db.get_value("Incident Settings", None, "confirmation_threshold_night"))
+ or CONFIRMATION_THRESHOLD_SECONDS_NIGHT
+ )
+
+
+def get_call_threshold_duration():
+ if frappe.utils.now_datetime().hour in DAY_HOURS:
+ return (
+ cint(frappe.db.get_value("Incident Settings", None, "call_threshold_day"))
+ or CALL_THRESHOLD_SECONDS_DAY
+ )
+ return (
+ cint(frappe.db.get_value("Incident Settings", None, "call_threshold_night"))
+ or CALL_THRESHOLD_SECONDS_NIGHT
+ )
+
+
+def get_call_repeat_interval():
+ if frappe.utils.now_datetime().hour in DAY_HOURS:
+ return (
+ cint(frappe.db.get_value("Incident Settings", None, "call_repeat_interval_day"))
+ or CALL_REPEAT_INTERVAL_DAY
+ )
+ return (
+ cint(frappe.db.get_value("Incident Settings", None, "call_repeat_interval_night"))
+ or CALL_REPEAT_INTERVAL_NIGHT
+ )
+
+
+def validate_incidents():
+ validating_incidents = frappe.get_all(
+ "Incident",
+ filters={
+ "status": "Validating",
+ },
+ fields=["name", "creation"],
+ )
+ for incident_dict in validating_incidents:
+ if frappe.utils.now_datetime() - incident_dict.creation > timedelta(
+ seconds=get_confirmation_threshold_duration()
+ ):
+ incident = Incident("Incident", incident_dict.name)
+ incident.confirm()
+
+
+def resolve_incidents():
+ ongoing_incidents = frappe.get_all(
+ "Incident",
+ filters={
+ "status": ("in", ["Validating", "Confirmed", "Acknowledged"]),
+ },
+ pluck="name",
+ )
+ for incident_name in ongoing_incidents:
+ incident = Incident("Incident", incident_name)
+ incident.check_resolved()
+ if incident.time_to_call_for_help or incident.time_to_call_for_help_again:
+ incident.create_log_for_server()
+ incident.call_humans()
+
+
+def notify_ignored_servers():
+ servers = frappe.qb.DocType("Server")
+ if not (
+ ignored_servers := frappe.qb.from_(servers)
+ .select(servers.name, servers.ignore_incidents_till)
+ .where(servers.status == "Active")
+ .where(servers.ignore_incidents_till.isnotnull())
+ .run(as_dict=True)
+ ):
+ return
+
+ message = "The following servers are being ignored for incidents:\n\n"
+ for server in ignored_servers:
+ message += f"{server.name} till {frappe.utils.pretty_date(server.ignore_incidents_till)}\n"
+ message += "\n@adityahase @balamurali27 @saurabh6790\n"
+ TelegramMessage.enqueue(message)
+
+
+def on_doctype_update():
+ frappe.db.add_index("Incident", ["alert", "server", "status"])
diff --git a/press/press/doctype/incident/templates/incident.html b/press/press/doctype/incident/templates/incident.html
new file mode 100644
index 00000000000..de77f579bd6
--- /dev/null
+++ b/press/press/doctype/incident/templates/incident.html
@@ -0,0 +1,30 @@
+{% extends "templates/web.html" %}
+
+{% block page_content %}
+
+
+
+ Time
+ Update
+
+{% for update in updates %}
+
+ {{ update.update_time }}
+ {{ update.update_note }}
+
+{% endfor %}
+
+{% endblock %}
+
+
\ No newline at end of file
diff --git a/press/press/doctype/incident/templates/incident_row.html b/press/press/doctype/incident/templates/incident_row.html
new file mode 100644
index 00000000000..d7014b453ab
--- /dev/null
+++ b/press/press/doctype/incident/templates/incident_row.html
@@ -0,0 +1,4 @@
+
+
diff --git a/press/press/doctype/incident/test_incident.py b/press/press/doctype/incident/test_incident.py
new file mode 100644
index 00000000000..bbb0355ce20
--- /dev/null
+++ b/press/press/doctype/incident/test_incident.py
@@ -0,0 +1,550 @@
+# Copyright (c) 2023, Frappe and Contributors
+# See license.txt
+from __future__ import annotations
+
+import math
+import zoneinfo
+from contextlib import suppress
+from datetime import datetime, timedelta
+from unittest.mock import Mock, patch
+
+import frappe
+from frappe.tests.utils import FrappeTestCase
+from hypothesis import given, settings
+from hypothesis import strategies as st
+from twilio.base.exceptions import TwilioRestException
+
+from press.press.doctype.agent_job.agent_job import AgentJob
+from press.press.doctype.alertmanager_webhook_log.alertmanager_webhook_log import (
+ AlertmanagerWebhookLog,
+)
+from press.press.doctype.alertmanager_webhook_log.test_alertmanager_webhook_log import (
+ create_test_alertmanager_webhook_log,
+)
+from press.press.doctype.incident.incident import (
+ CALL_REPEAT_INTERVAL_NIGHT,
+ CALL_THRESHOLD_SECONDS_NIGHT,
+ CONFIRMATION_THRESHOLD_SECONDS_NIGHT,
+ MIN_FIRING_INSTANCES,
+ MIN_FIRING_INSTANCES_FRACTION,
+ Incident,
+ resolve_incidents,
+ validate_incidents,
+)
+from press.press.doctype.prometheus_alert_rule.test_prometheus_alert_rule import (
+ create_test_prometheus_alert_rule,
+)
+from press.press.doctype.site.test_site import create_test_site
+from press.press.doctype.team.test_team import create_test_press_admin_team
+from press.press.doctype.telegram_message.telegram_message import TelegramMessage
+from press.utils.test import foreground_enqueue_doc
+
+
+class MockTwilioCallInstance:
+ def __init__(self, sid="test", status="queued"):
+ self.sid = sid
+ self.status = status
+
+ def fetch(self):
+ return self
+
+
+class MockTwilioCallList:
+ def __init__(self, status="queued", *args, **kwargs):
+ self.status = status
+
+ def create(self, *args, **kwargs):
+ return MockTwilioCallInstance(status=self.status)
+
+
+class MockTwilioMessageInstance:
+ def __init__(self, *args, **kwargs):
+ pass
+
+
+class MockTwilioMessageList:
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def create(self, *args, **kwargs):
+ return MockTwilioMessageInstance()
+
+
+class MockTwilioClient:
+ def __init__(self, *args, **kwargs):
+ pass
+
+ @property
+ def calls(self):
+ return MockTwilioCallList()
+
+ @property
+ def messages(self):
+ return MockTwilioMessageList()
+
+
+@st.composite
+def get_total_and_firing_for_ongoing_incident(draw) -> tuple[int, int]:
+ total = draw(st.integers(min_value=1, max_value=50))
+ firing = draw(
+ st.integers(
+ min_value=min(
+ MIN_FIRING_INSTANCES + 1, math.floor(MIN_FIRING_INSTANCES_FRACTION * total) + 1, total
+ ),
+ max_value=total,
+ )
+ )
+ return total, firing
+
+
+@st.composite
+def get_total_firing_and_resolved_for_resolved_incident(draw) -> tuple[int, int, int]:
+ """Generate a tuple of total and resolved instances such that incident is resolved."""
+ total = draw(st.integers(min_value=1, max_value=50))
+ firing = draw(
+ st.integers(
+ min_value=min(
+ MIN_FIRING_INSTANCES + 1, math.floor(MIN_FIRING_INSTANCES_FRACTION * total) + 1, total
+ ), # enough instances to trigger incident
+ max_value=total,
+ )
+ )
+ resolved = draw(
+ st.integers(
+ min_value=firing - min(MIN_FIRING_INSTANCES, math.floor(MIN_FIRING_INSTANCES_FRACTION * total)),
+ max_value=firing, # at least 1 firing and at most all firing instances should be resolved
+ )
+ )
+ return total, firing, resolved
+
+
+@patch(
+ "press.press.doctype.alertmanager_webhook_log.alertmanager_webhook_log.enqueue_doc",
+ new=foreground_enqueue_doc,
+)
+@patch.object(AlertmanagerWebhookLog, "send_telegram_notification", new=Mock())
+@patch.object(AlertmanagerWebhookLog, "react", new=Mock())
+@patch("press.press.doctype.incident.incident.frappe.db.commit", new=Mock())
+@patch.object(AgentJob, "enqueue_http_request", new=Mock())
+@patch("press.press.doctype.site.site._change_dns_record", new=Mock())
+@patch("press.press.doctype.press_settings.press_settings.Client", new=MockTwilioClient)
+@patch("press.press.doctype.incident.incident.enqueue_doc", new=foreground_enqueue_doc)
+@patch("tenacity.nap.time", new=Mock()) # no sleep
+@patch.object(Incident, "sites_down", new=[])
+@patch.object(Incident, "down_bench", new=[])
+class TestIncident(FrappeTestCase):
+ def setUp(self):
+ super().setUp()
+
+ self.from_ = "+911234567892"
+ frappe.db.set_single_value("Press Settings", "twilio_account_sid", "test")
+ frappe.db.set_single_value("Press Settings", "twilio_api_key_sid", "test")
+ frappe.db.set_single_value("Press Settings", "twilio_api_key_secret", "test")
+ frappe.db.set_single_value("Press Settings", "twilio_phone_number", self.from_)
+
+ self._create_test_incident_settings()
+
+ def tearDown(self):
+ frappe.db.rollback()
+
+ def _create_test_incident_settings(self):
+ user1 = create_test_press_admin_team().user
+ user2 = create_test_press_admin_team().user
+ self.test_phno_1 = "+911234567890"
+ self.test_phno_2 = "+911234567891"
+
+ # Purge Incident Settings if exists
+ if frappe.db.exists("Incident Settings"):
+ frappe.delete_doc("Incident Settings", "Incident Settings")
+
+ frappe.get_doc(
+ {
+ "doctype": "Incident Settings",
+ "users": [
+ {
+ "user": user1,
+ "phone": self.test_phno_1,
+ },
+ {
+ "user": user2,
+ "phone": self.test_phno_2,
+ },
+ ],
+ }
+ ).insert()
+
+ @patch(
+ "press.press.doctype.incident.test_incident.MockTwilioCallList.create",
+ wraps=MockTwilioCallList("busy").create,
+ )
+ def test_incident_creation_places_phone_call_to_all_humans_in_incident_team_if_no_one_picks_up(
+ self, mock_calls_create: Mock
+ ):
+ frappe.get_doc(
+ {
+ "doctype": "Incident",
+ "alertname": "Test Alert",
+ }
+ ).insert().call_humans()
+ self.assertEqual(mock_calls_create.call_count, 2)
+ mock_calls_create.assert_any_call(
+ from_=self.from_,
+ to=self.test_phno_1,
+ url="http://demo.twilio.com/docs/voice.xml",
+ )
+ mock_calls_create.assert_any_call(
+ from_=self.from_,
+ to=self.test_phno_2,
+ url="http://demo.twilio.com/docs/voice.xml",
+ )
+
+ @patch(
+ "press.press.doctype.incident.test_incident.MockTwilioCallList.create",
+ wraps=MockTwilioCallList("completed").create,
+ )
+ def test_incident_calls_only_one_person_if_first_person_picks_up(self, mock_calls_create: Mock):
+ frappe.get_doc(
+ {
+ "doctype": "Incident",
+ "alertname": "Test Alert",
+ }
+ ).insert().call_humans()
+ self.assertEqual(mock_calls_create.call_count, 1)
+
+ @patch(
+ "press.press.doctype.incident.test_incident.MockTwilioCallList.create",
+ wraps=MockTwilioCallList("completed").create,
+ )
+ def test_incident_calls_stop_for_in_progress_state(self, mock_calls_create):
+ incident = frappe.get_doc(
+ {
+ "doctype": "Incident",
+ "alertname": "Test Alert",
+ }
+ ).insert()
+ incident.call_humans()
+ self.assertEqual(mock_calls_create.call_count, 1)
+ incident.reload()
+ self.assertEqual(len(incident.updates), 1)
+
+ @patch(
+ "press.press.doctype.incident.test_incident.MockTwilioCallList.create",
+ wraps=MockTwilioCallList("ringing").create,
+ )
+ def test_incident_calls_next_person_after_retry_limit(self, mock_calls_create):
+ frappe.get_doc(
+ {
+ "doctype": "Incident",
+ "alertname": "Test Alert",
+ }
+ ).insert().call_humans()
+ self.assertEqual(mock_calls_create.call_count, 2)
+
+ @patch("press.press.doctype.incident.incident.Incident.wait_for_pickup", new=Mock())
+ def test_incident_gets_created_on_alert_that_meets_conditions(self):
+ incident_count = frappe.db.count("Incident")
+ create_test_alertmanager_webhook_log()
+ self.assertEqual(frappe.db.count("Incident") - incident_count, 1)
+
+ def test_incident_not_created_when_sites_very_less_than_scope_is_down(self):
+ """1 out of 3 sites on server down"""
+ incident_count_before = frappe.db.count("Incident")
+ site = create_test_site()
+ create_test_site(server=site.server)
+ create_test_site(server=site.server)
+ create_test_alertmanager_webhook_log(site=site)
+ self.assertEqual(frappe.db.count("Incident"), incident_count_before)
+
+ def test_incident_created_when_sites_within_scope_is_down(self):
+ """3 out of 3 sites on server down"""
+ incident_count_before = frappe.db.count("Incident")
+ site = create_test_site()
+ site2 = create_test_site(server=site.server)
+ site3 = create_test_site(server=site.server)
+ create_test_alertmanager_webhook_log(site=site)
+ create_test_alertmanager_webhook_log(site=site2)
+ create_test_alertmanager_webhook_log(site=site3)
+ self.assertEqual(frappe.db.count("Incident") - incident_count_before, 1)
+
+ def test_call_event_creates_acknowledgement_update(self):
+ with patch.object(MockTwilioCallList, "create", new=MockTwilioCallList("completed").create):
+ incident = frappe.get_doc(
+ {
+ "doctype": "Incident",
+ "alertname": "Test Alert",
+ }
+ ).insert()
+ incident.call_humans()
+ incident.reload()
+ self.assertEqual(incident.status, "Acknowledged")
+ self.assertEqual(len(incident.updates), 1)
+ with patch.object(MockTwilioCallList, "create", new=MockTwilioCallList("no-answer").create):
+ incident = frappe.get_doc(
+ {
+ "doctype": "Incident",
+ "alertname": "Test Alert",
+ }
+ ).insert()
+ incident.call_humans()
+ incident.reload()
+ self.assertEqual(len(incident.updates), 2)
+
+ @patch(
+ "press.press.doctype.incident.test_incident.MockTwilioCallList.create",
+ wraps=MockTwilioCallList("completed").create,
+ )
+ def test_global_phone_call_alerts_disabled_wont_create_phone_calls(self, mock_calls_create):
+ frappe.db.set_single_value("Incident Settings", "phone_call_alerts", 0)
+ frappe.get_doc(
+ {
+ "doctype": "Incident",
+ "alertname": "Test Alert",
+ }
+ ).insert().call_humans()
+ mock_calls_create.assert_not_called()
+ frappe.get_doc(
+ {
+ "doctype": "Incident",
+ "alertname": "Test Alert",
+ "phone_call": False,
+ }
+ ).insert().call_humans()
+ mock_calls_create.assert_not_called()
+ frappe.db.set_single_value("Incident Settings", "phone_call_alerts", 1)
+ frappe.get_doc(
+ {
+ "doctype": "Incident",
+ "alertname": "Test Alert",
+ "phone_call": False,
+ }
+ ).insert().call_humans()
+ mock_calls_create.assert_not_called()
+
+ def test_duplicate_incidents_arent_created_for_same_alert(self):
+ incident_count_before = frappe.db.count("Incident")
+ site = create_test_site()
+ site2 = create_test_site(server=site.server)
+ create_test_alertmanager_webhook_log(site=site)
+ create_test_alertmanager_webhook_log(site=site2)
+ self.assertEqual(frappe.db.count("Incident") - 1, incident_count_before)
+ site3 = create_test_site() # new server
+ create_test_alertmanager_webhook_log(site=site3)
+ self.assertEqual(frappe.db.count("Incident") - 2, incident_count_before)
+
+ @patch(
+ "press.press.doctype.incident.test_incident.MockTwilioMessageList.create",
+ wraps=MockTwilioMessageList().create,
+ )
+ def test_incident_creation_sends_text_message(self, mock_messages_create: Mock):
+ frappe.get_doc(
+ {
+ "doctype": "Incident",
+ "alertname": "Test Alert",
+ }
+ ).insert()
+ self.assertEqual(mock_messages_create.call_count, 2)
+
+ def test_incident_gets_auto_resolved_when_resolved_alerts_fire(self):
+ site = create_test_site()
+ alert = create_test_prometheus_alert_rule()
+ create_test_alertmanager_webhook_log(site=site, alert=alert, status="firing")
+ incident = frappe.get_last_doc("Incident")
+ self.assertEqual(incident.status, "Validating")
+ create_test_alertmanager_webhook_log(site=site, alert=alert, status="resolved")
+ resolve_incidents()
+ incident.reload()
+ self.assertEqual(incident.status, "Auto-Resolved")
+
+ @given(get_total_and_firing_for_ongoing_incident())
+ @settings(max_examples=20, deadline=timedelta(seconds=5))
+ def test_is_enough_firing_is_true_for_ongoing_incident(self, total_firing):
+ alert = create_test_alertmanager_webhook_log()
+ total, firing = total_firing
+ firing_instances = [0] * firing
+ with (
+ patch.object(AlertmanagerWebhookLog, "total_instances", new=total),
+ patch.object(
+ AlertmanagerWebhookLog,
+ "past_alert_instances",
+ new=lambda x, y: firing_instances,
+ ),
+ ):
+ self.assertTrue(alert.is_enough_firing)
+
+ @given(get_total_firing_and_resolved_for_resolved_incident())
+ @settings(max_examples=20, deadline=timedelta(seconds=5))
+ def test_is_enough_firing_is_false_for_resolved_incident(self, total_firing_resolved):
+ alert = create_test_alertmanager_webhook_log(status="resolved")
+ total, firing, resolved = total_firing_resolved
+ firing_instances = set(range(firing))
+ resolved_instances = set(range(resolved))
+
+ with (
+ patch.object(AlertmanagerWebhookLog, "total_instances", new=total),
+ patch.object(
+ AlertmanagerWebhookLog,
+ "past_alert_instances",
+ side_effect=[firing_instances, resolved_instances],
+ ),
+ ):
+ self.assertFalse(alert.is_enough_firing)
+
+ def test_incident_does_not_resolve_when_other_alerts_are_still_firing_but_does_when_less_than_required_sites_are_down(
+ self,
+ ):
+ site = create_test_site()
+ site2 = create_test_site(server=site.server)
+ site3 = create_test_site(server=site.server)
+ alert = create_test_prometheus_alert_rule()
+
+ create_test_alertmanager_webhook_log(site=site, alert=alert, status="firing") # 33% sites down
+ create_test_alertmanager_webhook_log(site=site2, alert=alert, status="firing") # 66% sites down
+ incident: Incident = frappe.get_last_doc("Incident")
+ self.assertEqual(incident.status, "Validating")
+
+ create_test_alertmanager_webhook_log(site=site3, status="firing") # 3rd site down, nothing resolved
+ resolve_incidents()
+ incident.reload()
+ self.assertEqual(incident.status, "Validating")
+
+ create_test_alertmanager_webhook_log(site=site3, status="resolved") # 66% sites down, 1 resolved
+ resolve_incidents()
+ incident.reload()
+ self.assertEqual(incident.status, "Validating")
+
+ create_test_alertmanager_webhook_log(
+ site=site2, status="resolved"
+ ) # 33% sites down, 2 resolved # minimum resolved
+ resolve_incidents()
+ incident.reload()
+ self.assertEqual(incident.status, "Auto-Resolved")
+
+ def test_threshold_field_is_checked_before_calling(self):
+ create_test_alertmanager_webhook_log()
+ incident = frappe.get_last_doc("Incident")
+ incident.db_set("creation", frappe.utils.add_to_date(frappe.utils.now(), minutes=-1))
+ validate_incidents()
+ incident.reload()
+ self.assertEqual(incident.status, "Validating") # default min threshold is 5 mins
+ incident.db_set("creation", frappe.utils.add_to_date(frappe.utils.now(), minutes=-17))
+ validate_incidents()
+ incident.reload()
+ self.assertEqual(incident.status, "Confirmed")
+ incident.db_set("status", "Validating")
+ incident.db_set("creation", frappe.utils.add_to_date(frappe.utils.now(), minutes=-19))
+ frappe.db.set_single_value("Incident Settings", "confirmation_threshold_day", str(21 * 60))
+ frappe.db.set_single_value("Incident Settings", "confirmation_threshold_night", str(21 * 60))
+ validate_incidents()
+ incident.reload()
+ self.assertEqual(incident.status, "Validating")
+
+ @patch(
+ "press.press.doctype.incident.test_incident.MockTwilioCallList.create",
+ wraps=MockTwilioCallList("completed").create,
+ )
+ def test_calls_repeated_for_acknowledged_incidents(self, mock_calls_create):
+ create_test_alertmanager_webhook_log()
+ incident = frappe.get_last_doc("Incident")
+ incident.db_set("status", "Acknowledged")
+ resolve_incidents()
+ mock_calls_create.assert_not_called()
+ incident.reload() # datetime conversion
+ incident.db_set(
+ "modified",
+ incident.modified - timedelta(seconds=CALL_REPEAT_INTERVAL_NIGHT + 10),
+ update_modified=False,
+ ) # assume night interval is longer
+ resolve_incidents()
+ mock_calls_create.assert_called_once()
+
+ def test_repeat_call_calls_acknowledging_person_first(self):
+ create_test_alertmanager_webhook_log(
+ creation=frappe.utils.add_to_date(
+ frappe.utils.now(), minutes=-CONFIRMATION_THRESHOLD_SECONDS_NIGHT
+ )
+ )
+ incident = frappe.get_last_doc("Incident")
+ incident.db_set("status", "Confirmed")
+ incident.db_set(
+ "creation",
+ incident.creation
+ - timedelta(seconds=CONFIRMATION_THRESHOLD_SECONDS_NIGHT + CALL_THRESHOLD_SECONDS_NIGHT + 10),
+ )
+
+ with patch(
+ "press.press.doctype.incident.test_incident.MockTwilioCallList.create",
+ side_effect=[
+ MockTwilioCallList("busy").create(),
+ MockTwilioCallList("completed").create(),
+ ],
+ ):
+ resolve_incidents() # second guy picks up
+
+ incident.reload()
+ incident.db_set(
+ "modified",
+ incident.modified - timedelta(seconds=CALL_REPEAT_INTERVAL_NIGHT + 10),
+ update_modified=False,
+ )
+ with patch(
+ "press.press.doctype.incident.test_incident.MockTwilioCallList.create",
+ wraps=MockTwilioCallList("completed").create,
+ ) as mock_calls_create:
+ resolve_incidents()
+ mock_calls_create.assert_called_with(
+ to=self.test_phno_2, from_=self.from_, url="http://demo.twilio.com/docs/voice.xml"
+ )
+
+ @patch.object(TelegramMessage, "enqueue")
+ def test_telegram_message_is_sent_when_unable_to_reach_twilio(self, mock_telegram_send):
+ print(mock_telegram_send)
+ create_test_alertmanager_webhook_log()
+ incident = frappe.get_last_doc("Incident")
+ with (
+ patch.object(MockTwilioCallList, "create", side_effect=TwilioRestException("test", 500)),
+ suppress(TwilioRestException),
+ ):
+ incident.call_humans()
+ mock_telegram_send.assert_called_once()
+
+ def get_5_min_load_avg_prometheus_response(self, load_avg: float):
+ return {
+ "datasets": [
+ {
+ "name": {
+ "__name__": "node_load5",
+ "cluster": "Default",
+ "instance": "n1.local.frappe.dev",
+ "job": "node",
+ },
+ "values": [load_avg],
+ }
+ ],
+ "labels": [
+ datetime(2025, 1, 17, 12, 40, 41, 241000, tzinfo=zoneinfo.ZoneInfo(key="Asia/Kolkata")),
+ ],
+ }
+
+ def test_high_load_avg_on_resource_makes_it_affected(self):
+ create_test_alertmanager_webhook_log()
+ incident: Incident = frappe.get_last_doc("Incident")
+ with patch(
+ "press.press.doctype.incident.incident.prometheus_query",
+ side_effect=[
+ self.get_5_min_load_avg_prometheus_response(2.0),
+ self.get_5_min_load_avg_prometheus_response(32.0),
+ self.get_5_min_load_avg_prometheus_response(2.0),
+ ],
+ ):
+ incident.identify_affected_resource()
+ self.assertEqual(incident.resource, incident.server)
+ self.assertEqual(incident.resource_type, "Server")
+
+ def test_no_response_from_monitor_on_resource_makes_it_affected(self):
+ create_test_alertmanager_webhook_log()
+ incident: Incident = frappe.get_last_doc("Incident")
+ incident.identify_affected_resource()
+ self.assertEqual(
+ incident.resource, frappe.get_value("Server", incident.server, "database_server")
+ ) # database is checked first because history
+ self.assertEqual(incident.resource_type, "Database Server")
diff --git a/press/press/doctype/incident_alerts/__init__.py b/press/press/doctype/incident_alerts/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/incident_alerts/incident_alerts.json b/press/press/doctype/incident_alerts/incident_alerts.json
new file mode 100644
index 00000000000..f5acc304bbf
--- /dev/null
+++ b/press/press/doctype/incident_alerts/incident_alerts.json
@@ -0,0 +1,48 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2023-10-17 13:25:42.597749",
+ "default_view": "List",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "alert",
+ "alert_type",
+ "combined_alerts"
+ ],
+ "fields": [
+ {
+ "fieldname": "alert",
+ "fieldtype": "Link",
+ "label": "Alert",
+ "options": "Alertmanager Webhook Log"
+ },
+ {
+ "fetch_from": "alert.combined_alerts",
+ "fieldname": "combined_alerts",
+ "fieldtype": "Data",
+ "label": "Combined Alerts",
+ "read_only": 1
+ },
+ {
+ "fetch_from": "alert.alert",
+ "fieldname": "alert_type",
+ "fieldtype": "Data",
+ "label": "Alert Type",
+ "read_only": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2023-10-18 10:40:59.417560",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Incident Alerts",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/incident_alerts/incident_alerts.py b/press/press/doctype/incident_alerts/incident_alerts.py
new file mode 100644
index 00000000000..49cf86f5403
--- /dev/null
+++ b/press/press/doctype/incident_alerts/incident_alerts.py
@@ -0,0 +1,25 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class IncidentAlerts(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ alert: DF.Link | None
+ alert_type: DF.Data | None
+ combined_alerts: DF.Data | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/incident_settings/__init__.py b/press/press/doctype/incident_settings/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/incident_settings/incident_settings.js b/press/press/doctype/incident_settings/incident_settings.js
new file mode 100644
index 00000000000..2a7fabb499f
--- /dev/null
+++ b/press/press/doctype/incident_settings/incident_settings.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2023, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Incident Settings", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/incident_settings/incident_settings.json b/press/press/doctype/incident_settings/incident_settings.json
new file mode 100644
index 00000000000..79d012083ed
--- /dev/null
+++ b/press/press/doctype/incident_settings/incident_settings.json
@@ -0,0 +1,135 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2023-12-14 09:23:55.912233",
+ "default_view": "List",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "users",
+ "self_hosted_users",
+ "section_break_rnxb",
+ "enable_incident_detection",
+ "phone_call_alerts",
+ "email_alerts",
+ "grafana_screenshots",
+ "column_break_ehby",
+ "confirmation_threshold_day",
+ "call_threshold_day",
+ "call_repeat_interval_day",
+ "column_break_voyg",
+ "confirmation_threshold_night",
+ "call_threshold_night",
+ "call_repeat_interval_night"
+ ],
+ "fields": [
+ {
+ "fieldname": "users",
+ "fieldtype": "Table",
+ "label": "Users",
+ "options": "Incident Settings User"
+ },
+ {
+ "default": "1",
+ "fieldname": "phone_call_alerts",
+ "fieldtype": "Check",
+ "label": "Phone Call Alerts"
+ },
+ {
+ "default": "1",
+ "fieldname": "enable_incident_detection",
+ "fieldtype": "Check",
+ "label": "Enable Incident Detection"
+ },
+ {
+ "fieldname": "self_hosted_users",
+ "fieldtype": "Table",
+ "label": "Self Hosted Users",
+ "options": "Incident Settings Self Hosted User"
+ },
+ {
+ "fieldname": "section_break_rnxb",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "column_break_voyg",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "confirmation_threshold_day",
+ "fieldtype": "Duration",
+ "hide_days": 1,
+ "label": "Confirmation Threshold Day"
+ },
+ {
+ "fieldname": "confirmation_threshold_night",
+ "fieldtype": "Duration",
+ "hide_days": 1,
+ "label": "Confirmation Threshold Night"
+ },
+ {
+ "fieldname": "call_threshold_day",
+ "fieldtype": "Duration",
+ "hide_days": 1,
+ "label": "Call Threshold Day"
+ },
+ {
+ "fieldname": "call_threshold_night",
+ "fieldtype": "Duration",
+ "hide_days": 1,
+ "label": "Call Threshold Night"
+ },
+ {
+ "fieldname": "call_repeat_interval_day",
+ "fieldtype": "Duration",
+ "hide_days": 1,
+ "label": "Call Repeat Interval Day"
+ },
+ {
+ "fieldname": "call_repeat_interval_night",
+ "fieldtype": "Duration",
+ "hide_days": 1,
+ "label": "Call Repeat Interval Night"
+ },
+ {
+ "fieldname": "column_break_ehby",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "email_alerts",
+ "fieldtype": "Check",
+ "label": "Email Alerts"
+ },
+ {
+ "default": "0",
+ "fieldname": "grafana_screenshots",
+ "fieldtype": "Check",
+ "label": "Grafana Screenshots"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "issingle": 1,
+ "links": [],
+ "modified": "2025-01-24 11:07:55.944210",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Incident Settings",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "print": 1,
+ "read": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/incident_settings/incident_settings.py b/press/press/doctype/incident_settings/incident_settings.py
new file mode 100644
index 00000000000..d7d741d911a
--- /dev/null
+++ b/press/press/doctype/incident_settings/incident_settings.py
@@ -0,0 +1,38 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from __future__ import annotations
+
+from frappe.model.document import Document
+
+
+class IncidentSettings(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.incident_settings_self_hosted_user.incident_settings_self_hosted_user import (
+ IncidentSettingsSelfHostedUser,
+ )
+ from press.press.doctype.incident_settings_user.incident_settings_user import IncidentSettingsUser
+
+ call_repeat_interval_day: DF.Duration | None
+ call_repeat_interval_night: DF.Duration | None
+ call_threshold_day: DF.Duration | None
+ call_threshold_night: DF.Duration | None
+ confirmation_threshold_day: DF.Duration | None
+ confirmation_threshold_night: DF.Duration | None
+ email_alerts: DF.Check
+ enable_incident_detection: DF.Check
+ grafana_screenshots: DF.Check
+ phone_call_alerts: DF.Check
+ self_hosted_users: DF.Table[IncidentSettingsSelfHostedUser]
+ users: DF.Table[IncidentSettingsUser]
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/incident_settings/test_incident_settings.py b/press/press/doctype/incident_settings/test_incident_settings.py
new file mode 100644
index 00000000000..08dcaf2b2a4
--- /dev/null
+++ b/press/press/doctype/incident_settings/test_incident_settings.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2023, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestIncidentSettings(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/incident_settings_self_hosted_user/__init__.py b/press/press/doctype/incident_settings_self_hosted_user/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/incident_settings_self_hosted_user/incident_settings_self_hosted_user.json b/press/press/doctype/incident_settings_self_hosted_user/incident_settings_self_hosted_user.json
new file mode 100644
index 00000000000..79461be93ed
--- /dev/null
+++ b/press/press/doctype/incident_settings_self_hosted_user/incident_settings_self_hosted_user.json
@@ -0,0 +1,48 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-01-11 19:05:21.521739",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "user",
+ "column_break_ggho",
+ "phone"
+ ],
+ "fields": [
+ {
+ "fieldname": "user",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "User",
+ "options": "User",
+ "reqd": 1
+ },
+ {
+ "fetch_from": "user.phone",
+ "fetch_if_empty": 1,
+ "fieldname": "phone",
+ "fieldtype": "Phone",
+ "in_list_view": 1,
+ "label": "Phone",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_ggho",
+ "fieldtype": "Column Break"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2024-01-11 19:09:17.571549",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Incident Settings Self Hosted User",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/incident_settings_self_hosted_user/incident_settings_self_hosted_user.py b/press/press/doctype/incident_settings_self_hosted_user/incident_settings_self_hosted_user.py
new file mode 100644
index 00000000000..c0945331130
--- /dev/null
+++ b/press/press/doctype/incident_settings_self_hosted_user/incident_settings_self_hosted_user.py
@@ -0,0 +1,24 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class IncidentSettingsSelfHostedUser(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ phone: DF.Phone
+ user: DF.Link
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/incident_settings_user/__init__.py b/press/press/doctype/incident_settings_user/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/incident_settings_user/incident_settings_user.json b/press/press/doctype/incident_settings_user/incident_settings_user.json
new file mode 100644
index 00000000000..01d91c26ea7
--- /dev/null
+++ b/press/press/doctype/incident_settings_user/incident_settings_user.json
@@ -0,0 +1,49 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2023-12-14 09:27:19.932797",
+ "default_view": "List",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "user",
+ "column_break_fmmd",
+ "phone"
+ ],
+ "fields": [
+ {
+ "fieldname": "user",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "User",
+ "options": "User",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_fmmd",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fetch_from": "user.phone",
+ "fetch_if_empty": 1,
+ "fieldname": "phone",
+ "fieldtype": "Phone",
+ "in_list_view": 1,
+ "label": "Phone",
+ "reqd": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2024-01-11 19:09:25.518329",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Incident Settings User",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/incident_settings_user/incident_settings_user.py b/press/press/doctype/incident_settings_user/incident_settings_user.py
new file mode 100644
index 00000000000..0244ca715af
--- /dev/null
+++ b/press/press/doctype/incident_settings_user/incident_settings_user.py
@@ -0,0 +1,24 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class IncidentSettingsUser(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ phone: DF.Phone
+ user: DF.Link
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/incident_suggestion/__init__.py b/press/press/doctype/incident_suggestion/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/incident_suggestion/incident_suggestion.json b/press/press/doctype/incident_suggestion/incident_suggestion.json
new file mode 100644
index 00000000000..b15f215d9c2
--- /dev/null
+++ b/press/press/doctype/incident_suggestion/incident_suggestion.json
@@ -0,0 +1,44 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-01-17 18:18:35.669380",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "title",
+ "method_name",
+ "apply"
+ ],
+ "fields": [
+ {
+ "fieldname": "title",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Title"
+ },
+ {
+ "fieldname": "apply",
+ "fieldtype": "Button",
+ "in_list_view": 1,
+ "label": "Apply"
+ },
+ {
+ "fieldname": "method_name",
+ "fieldtype": "Data",
+ "label": "Method Name"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-01-23 22:23:23.513166",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Incident Suggestion",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/incident_suggestion/incident_suggestion.py b/press/press/doctype/incident_suggestion/incident_suggestion.py
new file mode 100644
index 00000000000..9c4a5d77951
--- /dev/null
+++ b/press/press/doctype/incident_suggestion/incident_suggestion.py
@@ -0,0 +1,26 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from __future__ import annotations
+
+from frappe.model.document import Document
+
+
+class IncidentSuggestion(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ method_name: DF.Data | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ title: DF.Data | None
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/incident_updates/__init__.py b/press/press/doctype/incident_updates/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/incident_updates/incident_updates.json b/press/press/doctype/incident_updates/incident_updates.json
new file mode 100644
index 00000000000..4d98ee1d97c
--- /dev/null
+++ b/press/press/doctype/incident_updates/incident_updates.json
@@ -0,0 +1,41 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2023-10-17 18:01:33.787818",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "update_note",
+ "update_time"
+ ],
+ "fields": [
+ {
+ "columns": 6,
+ "fieldname": "update_note",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Update Note"
+ },
+ {
+ "columns": 2,
+ "default": "now",
+ "fieldname": "update_time",
+ "fieldtype": "Datetime",
+ "in_list_view": 1,
+ "label": "Update Time"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2023-12-22 07:17:00.449557",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Incident Updates",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/incident_updates/incident_updates.py b/press/press/doctype/incident_updates/incident_updates.py
new file mode 100644
index 00000000000..ad13f942a75
--- /dev/null
+++ b/press/press/doctype/incident_updates/incident_updates.py
@@ -0,0 +1,24 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class IncidentUpdates(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ update_note: DF.Data | None
+ update_time: DF.Datetime | None
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/inspect_trace_id/__init__.py b/press/press/doctype/inspect_trace_id/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/inspect_trace_id/inspect_trace_id.js b/press/press/doctype/inspect_trace_id/inspect_trace_id.js
new file mode 100644
index 00000000000..73296644eb4
--- /dev/null
+++ b/press/press/doctype/inspect_trace_id/inspect_trace_id.js
@@ -0,0 +1,13 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Inspect Trace ID', {
+ refresh(frm) {
+ frm.disable_save();
+ },
+ trace_id(frm) {
+ if (frm.doc.trace_id) {
+ frm.call('fetch');
+ }
+ },
+});
diff --git a/press/press/doctype/inspect_trace_id/inspect_trace_id.json b/press/press/doctype/inspect_trace_id/inspect_trace_id.json
new file mode 100644
index 00000000000..c750a4d7efd
--- /dev/null
+++ b/press/press/doctype/inspect_trace_id/inspect_trace_id.json
@@ -0,0 +1,51 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-05-12 18:24:34.545167",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "trace_id",
+ "data"
+ ],
+ "fields": [
+ {
+ "fieldname": "trace_id",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Trace ID",
+ "reqd": 1
+ },
+ {
+ "fieldname": "data",
+ "fieldtype": "Code",
+ "label": "Data",
+ "read_only": 1
+ }
+ ],
+ "grid_page_length": 50,
+ "hide_toolbar": 1,
+ "index_web_pages_for_search": 1,
+ "is_virtual": 1,
+ "issingle": 1,
+ "links": [],
+ "modified": "2025-05-12 18:28:47.694295",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Inspect Trace ID",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "email": 1,
+ "print": 1,
+ "read": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/inspect_trace_id/inspect_trace_id.py b/press/press/doctype/inspect_trace_id/inspect_trace_id.py
new file mode 100644
index 00000000000..dca8e4280fb
--- /dev/null
+++ b/press/press/doctype/inspect_trace_id/inspect_trace_id.py
@@ -0,0 +1,83 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+import frappe
+import requests
+from frappe.model.document import Document
+from frappe.utils import add_to_date, now_datetime
+from frappe.utils.password import get_decrypted_password
+
+from press.utils import convert_user_timezone_to_utc
+
+
+class InspectTraceID(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ data: DF.Code | None
+ trace_id: DF.Data
+ # end: auto-generated types
+
+ def load_from_db(self):
+ frappe.only_for("Desk User")
+
+ @frappe.whitelist()
+ def fetch(self):
+ frappe.only_for("Desk User")
+
+ if not self.trace_id:
+ return
+
+ log_server = frappe.db.get_single_value("Press Settings", "log_server")
+ if not log_server:
+ return
+
+ url = f"https://{log_server}/elasticsearch/filebeat-*/_search"
+ password = get_decrypted_password("Log Server", log_server, "kibana_password")
+
+ start_datetime = convert_user_timezone_to_utc(add_to_date(days=-30))
+ end_datetime = convert_user_timezone_to_utc(now_datetime())
+
+ query = {
+ "query": {
+ "bool": {
+ "filter": [
+ {"match_phrase": {"json.uuid": self.trace_id}},
+ {"range": {"@timestamp": {"gt": start_datetime, "lte": end_datetime}}},
+ ],
+ }
+ },
+ "size": 1,
+ }
+
+ response = requests.post(url, json=query, auth=("frappe", password)).json()
+ records = response.get("hits", {}).get("hits", [])
+ self.data = frappe.as_json(records[0] if records else "")
+
+ # Not relevant
+ def db_update(self):
+ raise NotImplementedError
+
+ def delete(self):
+ raise NotImplementedError
+
+ def db_insert(self, *args, **kwargs):
+ raise NotImplementedError
+
+ @staticmethod
+ def get_list(filters=None, page_length=20, **kwargs):
+ pass
+
+ @staticmethod
+ def get_count(filters=None, **kwargs):
+ pass
+
+ @staticmethod
+ def get_stats(**kwargs):
+ pass
diff --git a/press/press/doctype/inspect_trace_id/test_inspect_trace_id.py b/press/press/doctype/inspect_trace_id/test_inspect_trace_id.py
new file mode 100644
index 00000000000..551577ba216
--- /dev/null
+++ b/press/press/doctype/inspect_trace_id/test_inspect_trace_id.py
@@ -0,0 +1,13 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+from frappe.tests.utils import FrappeTestCase
+
+
+class IntegrationTestInspectTraceID(FrappeTestCase):
+ """
+ Integration tests for InspectTraceID.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/invoice/fixtures/stripe_payment_intent_succeeded_webhook.json b/press/press/doctype/invoice/fixtures/stripe_payment_intent_succeeded_webhook.json
index a18f7853b18..cc2b84b6247 100644
--- a/press/press/doctype/invoice/fixtures/stripe_payment_intent_succeeded_webhook.json
+++ b/press/press/doctype/invoice/fixtures/stripe_payment_intent_succeeded_webhook.json
@@ -50,7 +50,10 @@
"id": "ch_1HtDyjGjnxV0XKmrM13MbJf2",
"invoice": null,
"livemode": false,
- "metadata": {},
+ "metadata": {
+ "gst": "144.36",
+ "payment_for": "prepaid_credits"
+ },
"object": "charge",
"on_behalf_of": null,
"order": null,
@@ -123,7 +126,10 @@
"invoice": null,
"last_payment_error": null,
"livemode": false,
- "metadata": {},
+ "metadata": {
+ "gst": "144.36",
+ "payment_for": "prepaid_credits"
+ },
"next_action": null,
"object": "payment_intent",
"on_behalf_of": null,
diff --git a/press/press/doctype/invoice/invoice.js b/press/press/doctype/invoice/invoice.js
index 0fc090b6e36..a1ff9f357a9 100644
--- a/press/press/doctype/invoice/invoice.js
+++ b/press/press/doctype/invoice/invoice.js
@@ -43,24 +43,42 @@ frappe.ui.form.on('Invoice', {
}
if (frm.doc.status == 'Paid' && frm.doc.stripe_invoice_id) {
- let btn = frm.add_custom_button('Refund Invoice', () =>
- frappe.confirm(
- 'This will refund the total amount paid on this invoice from Stripe. Continue?',
- () =>
- frm
+ frm.add_custom_button('Refund Invoice', () => {
+ let d = new frappe.ui.Dialog({
+ title: 'Refund Invoice',
+ fields: [
+ {
+ label: 'Reason',
+ fieldname: 'reason',
+ fieldtype: 'Data',
+ },
+ ],
+ primary_action({ reason }) {
+ if (!reason) {
+ frappe.msgprint('Please enter a reason for the refund.');
+ return;
+ }
+ d.hide();
+ frappe
.call({
doc: frm.doc,
method: 'refund',
- btn,
+ args: {
+ reason,
+ },
+ btn: d.get_primary_btn(),
})
.then((r) => {
if (r.message) {
- frappe.msgprint(`Refunded successfully.`);
+ frappe.msgprint('Refunded successfully.');
+ d.hide();
}
frm.refresh();
- }),
- ),
- );
+ });
+ },
+ });
+ d.show();
+ });
}
if (frm.doc.status == 'Invoice Created') {
diff --git a/press/press/doctype/invoice/invoice.json b/press/press/doctype/invoice/invoice.json
index 97aae929968..39f1583a7fa 100644
--- a/press/press/doctype/invoice/invoice.json
+++ b/press/press/doctype/invoice/invoice.json
@@ -11,32 +11,42 @@
"customer_email",
"currency",
"partner_email",
+ "frappe_partnership_date",
+ "customer_partnership_date",
+ "billing_email",
"column_break_4",
"status",
+ "refund_reason",
"marketplace",
"type",
"period_start",
"period_end",
"due_date",
"amended_from",
+ "budget_alert_sent",
"section_break_8",
"items",
+ "discounts_section",
+ "discount_note",
+ "column_break_mqqa",
+ "total_before_discount",
+ "total_discount_amount",
"section_break_10",
- "starting_balance",
- "ending_balance",
"payment_date",
"payment_attempt_count",
"payment_attempt_date",
+ "next_payment_attempt_date",
"payment_mode",
- "total_before_discount",
- "total_discount_amount",
+ "write_off_amount",
"column_break_15",
"total",
+ "total_before_tax",
"applied_credits",
"free_credits",
"amount_due",
+ "gst",
+ "amount_due_with_tax",
"amount_paid",
- "payout",
"section_break_15",
"credit_allocations",
"stripe_billing_section",
@@ -58,6 +68,14 @@
"column_break_44",
"razorpay_payment_id",
"razorpay_payment_method",
+ "mpesa_billing_section",
+ "mpesa_payment_record",
+ "mpesa_receipt_number",
+ "mpesa_invoice",
+ "mpesa_invoice_pdf",
+ "column_break_ouox",
+ "mpesa_request_id",
+ "mpesa_merchant_id",
"section_break_47",
"discounts"
],
@@ -69,7 +87,8 @@
"in_standard_filter": 1,
"label": "Team",
"options": "Team",
- "reqd": 1
+ "reqd": 1,
+ "search_index": 1
},
{
"fieldname": "customer_name",
@@ -77,7 +96,6 @@
"label": "Customer Name"
},
{
- "fetch_from": "team.user",
"fieldname": "customer_email",
"fieldtype": "Data",
"label": "Customer Email",
@@ -161,7 +179,8 @@
"fieldname": "stripe_invoice_id",
"fieldtype": "Data",
"label": "Stripe Invoice ID",
- "no_copy": 1
+ "no_copy": 1,
+ "search_index": 1
},
{
"allow_on_submit": 1,
@@ -179,20 +198,6 @@
"label": "Currency",
"options": "Currency"
},
- {
- "fieldname": "starting_balance",
- "fieldtype": "Currency",
- "hidden": 1,
- "label": "Starting Balance",
- "options": "currency"
- },
- {
- "fieldname": "ending_balance",
- "fieldtype": "Currency",
- "hidden": 1,
- "label": "Ending Balance",
- "options": "currency"
- },
{
"fieldname": "column_break_15",
"fieldtype": "Column Break"
@@ -230,6 +235,7 @@
"no_copy": 1
},
{
+ "default": "0",
"fieldname": "applied_credits",
"fieldtype": "Currency",
"label": "Applied Credits",
@@ -305,7 +311,7 @@
"fieldname": "type",
"fieldtype": "Select",
"label": "Type",
- "options": "Subscription\nPrepaid Credits\nService\nSummary"
+ "options": "Subscription\nPrepaid Credits\nService\nSummary\nPartnership Fees"
},
{
"depends_on": "eval:doc.type == 'Prepaid Credits'",
@@ -324,7 +330,7 @@
"fieldname": "payment_mode",
"fieldtype": "Select",
"label": "Payment Mode",
- "options": "\nCard\nPrepaid Credits\nNEFT\nPartner Credits"
+ "options": "\nCard\nPrepaid Credits\nNEFT\nPartner Credits\nPaid By Partner"
},
{
"fieldname": "frappe_partner_order",
@@ -389,11 +395,6 @@
"label": "Partner Email",
"read_only": 1
},
- {
- "fieldname": "payout",
- "fieldtype": "Currency",
- "label": "Saas Developer Payout"
- },
{
"fetch_from": "razorpay_payment_record.payment_id",
"fieldname": "razorpay_payment_id",
@@ -405,8 +406,127 @@
"fieldname": "marketplace",
"fieldtype": "Check",
"label": "Marketplace"
+ },
+ {
+ "fetch_from": "team.partnership_date",
+ "fieldname": "customer_partnership_date",
+ "fieldtype": "Date",
+ "label": "Customer Partnership Date",
+ "read_only": 1
+ },
+ {
+ "fetch_from": "team.frappe_partnership_date",
+ "fieldname": "frappe_partnership_date",
+ "fieldtype": "Date",
+ "label": "Frappe Partnership Date",
+ "read_only": 1
+ },
+ {
+ "fieldname": "gst",
+ "fieldtype": "Currency",
+ "label": "GST",
+ "options": "currency"
+ },
+ {
+ "fieldname": "total_before_tax",
+ "fieldtype": "Currency",
+ "hidden": 1,
+ "label": "Total Before Tax",
+ "options": "currency"
+ },
+ {
+ "fieldname": "write_off_amount",
+ "fieldtype": "Float",
+ "label": "Write-Off Amount",
+ "precision": "9"
+ },
+ {
+ "fieldname": "amount_due_with_tax",
+ "fieldtype": "Currency",
+ "label": "Amount Due (Including Tax)",
+ "no_copy": 1,
+ "options": "currency"
+ },
+ {
+ "fieldname": "discounts_section",
+ "fieldtype": "Section Break",
+ "label": "Discounts"
+ },
+ {
+ "fieldname": "discount_note",
+ "fieldtype": "Data",
+ "label": "Discount Note"
+ },
+ {
+ "fieldname": "column_break_mqqa",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "billing_email",
+ "fieldtype": "Data",
+ "label": "Billing Email"
+ },
+ {
+ "depends_on": "eval:doc.payment_mode == \"Card\" && doc.status == \"Unpaid\"",
+ "fieldname": "next_payment_attempt_date",
+ "fieldtype": "Date",
+ "label": "Next Payment Attempt Date"
+ },
+ {
+ "depends_on": "eval: doc.status=='Refunded'",
+ "fieldname": "refund_reason",
+ "fieldtype": "Data",
+ "label": "Refund Reason"
+ },
+ {
+ "fieldname": "mpesa_billing_section",
+ "fieldtype": "Section Break",
+ "label": "Mpesa Billing"
+ },
+ {
+ "fieldname": "mpesa_payment_record",
+ "fieldtype": "Data",
+ "label": "Mpesa Payment Record"
+ },
+ {
+ "fieldname": "mpesa_request_id",
+ "fieldtype": "Data",
+ "label": "Mpesa Request ID"
+ },
+ {
+ "fieldname": "mpesa_receipt_number",
+ "fieldtype": "Data",
+ "label": "Mpesa Receipt Number"
+ },
+ {
+ "fieldname": "column_break_ouox",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "mpesa_merchant_id",
+ "fieldtype": "Data",
+ "label": "Mpesa Merchant ID"
+ },
+ {
+ "fieldname": "mpesa_invoice",
+ "fieldtype": "Data",
+ "label": "Mpesa Invoice"
+ },
+ {
+ "allow_on_submit": 1,
+ "fieldname": "mpesa_invoice_pdf",
+ "fieldtype": "Attach",
+ "label": "Mpesa Invoice PDF"
+ },
+ {
+ "default": "0",
+ "fieldname": "budget_alert_sent",
+ "fieldtype": "Check",
+ "hidden": 1,
+ "label": "Budget Alert Sent"
}
],
+ "grid_page_length": 50,
"is_submittable": 1,
"links": [
{
@@ -423,9 +543,14 @@
"group": "Documents",
"link_doctype": "Stripe Payment Event",
"link_fieldname": "invoice"
+ },
+ {
+ "group": "Webhook Logs",
+ "link_doctype": "Stripe Webhook Log",
+ "link_fieldname": "invoice"
}
],
- "modified": "2023-02-27 23:46:40.091022",
+ "modified": "2026-01-05 21:33:27.298700",
"modified_by": "Administrator",
"module": "Press",
"name": "Invoice",
@@ -456,10 +581,20 @@
"role": "Press Admin",
"submit": 1,
"write": 1
+ },
+ {
+ "read": 1,
+ "role": "Press Member"
+ },
+ {
+ "read": 1,
+ "report": 1,
+ "role": "Site Manager"
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/invoice/invoice.py b/press/press/doctype/invoice/invoice.py
index 9d7113f8f5c..483261d8750 100644
--- a/press/press/doctype/invoice/invoice.py
+++ b/press/press/doctype/invoice/invoice.py
@@ -1,128 +1,381 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
-import frappe
+from __future__ import annotations
+
+import typing
+import frappe
from frappe import _
-from enum import Enum
-from press.utils import log_error
-from frappe.core.utils import find_all
-from frappe.utils import getdate, cint
+from frappe.model.document import Document
+from frappe.utils import cint, flt, getdate
from frappe.utils.data import fmt_money
+
from press.api.billing import get_stripe
-from frappe.model.document import Document
+from press.api.client import dashboard_whitelist
+from press.press.doctype.auto_scale_record.auto_scale_record import calculate_secondary_server_price
+from press.press.doctype.communication_info.communication_info import get_communication_info
+from press.utils import log_error
+from press.utils.billing import (
+ convert_stripe_money,
+ get_frappe_io_connection,
+ get_gateway_details,
+ get_partner_external_connection,
+ is_frappe_auth_disabled,
+)
+
+if typing.TYPE_CHECKING:
+ from press.press.doctype.usage_record.usage_record import UsageRecord
-from press.overrides import get_permission_query_conditions_for_doctype
-from press.utils.billing import get_frappe_io_connection, convert_stripe_money
+class Invoice(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
-class InvoiceDiscountType(Enum):
- FLAT_ON_TOTAL = "Flat On Total"
+ from typing import TYPE_CHECKING
+ if TYPE_CHECKING:
+ from frappe.types import DF
-discount_type_string_to_enum = {"Flat On Total": InvoiceDiscountType.FLAT_ON_TOTAL}
+ from press.press.doctype.invoice_credit_allocation.invoice_credit_allocation import (
+ InvoiceCreditAllocation,
+ )
+ from press.press.doctype.invoice_discount.invoice_discount import InvoiceDiscount
+ from press.press.doctype.invoice_item.invoice_item import InvoiceItem
+ from press.press.doctype.invoice_transaction_fee.invoice_transaction_fee import InvoiceTransactionFee
+
+ amended_from: DF.Link | None
+ amount_due: DF.Currency
+ amount_due_with_tax: DF.Currency
+ amount_paid: DF.Currency
+ applied_credits: DF.Currency
+ billing_email: DF.Data | None
+ budget_alert_sent: DF.Check
+ credit_allocations: DF.Table[InvoiceCreditAllocation]
+ currency: DF.Link | None
+ customer_email: DF.Data | None
+ customer_name: DF.Data | None
+ customer_partnership_date: DF.Date | None
+ discount_note: DF.Data | None
+ discounts: DF.Table[InvoiceDiscount]
+ due_date: DF.Date | None
+ exchange_rate: DF.Float
+ frappe_invoice: DF.Data | None
+ frappe_partner_order: DF.Data | None
+ frappe_partnership_date: DF.Date | None
+ free_credits: DF.Currency
+ gst: DF.Currency
+ invoice_pdf: DF.Attach | None
+ items: DF.Table[InvoiceItem]
+ marketplace: DF.Check
+ mpesa_invoice: DF.Data | None
+ mpesa_invoice_pdf: DF.Attach | None
+ mpesa_merchant_id: DF.Data | None
+ mpesa_payment_record: DF.Data | None
+ mpesa_receipt_number: DF.Data | None
+ mpesa_request_id: DF.Data | None
+ next_payment_attempt_date: DF.Date | None
+ partner_email: DF.Data | None
+ payment_attempt_count: DF.Int
+ payment_attempt_date: DF.Date | None
+ payment_date: DF.Date | None
+ payment_mode: DF.Literal["", "Card", "Prepaid Credits", "NEFT", "Partner Credits", "Paid By Partner"]
+ period_end: DF.Date | None
+ period_start: DF.Date | None
+ razorpay_order_id: DF.Data | None
+ razorpay_payment_id: DF.Data | None
+ razorpay_payment_method: DF.Data | None
+ razorpay_payment_record: DF.Link | None
+ refund_reason: DF.Data | None
+ status: DF.Literal[
+ "Draft", "Invoice Created", "Unpaid", "Paid", "Refunded", "Uncollectible", "Collected", "Empty"
+ ]
+ stripe_invoice_id: DF.Data | None
+ stripe_invoice_url: DF.Text | None
+ stripe_payment_intent_id: DF.Data | None
+ team: DF.Link
+ total: DF.Currency
+ total_before_discount: DF.Currency
+ total_before_tax: DF.Currency
+ total_discount_amount: DF.Currency
+ transaction_amount: DF.Currency
+ transaction_fee: DF.Currency
+ transaction_fee_details: DF.Table[InvoiceTransactionFee]
+ transaction_net: DF.Currency
+ type: DF.Literal["Subscription", "Prepaid Credits", "Service", "Summary", "Partnership Fees"]
+ write_off_amount: DF.Float
+ # end: auto-generated types
+
+ dashboard_fields = (
+ "period_start",
+ "period_end",
+ "team",
+ "items",
+ "currency",
+ "type",
+ "payment_mode",
+ "total",
+ "total_before_discount",
+ "total_before_tax",
+ "partner_email",
+ "amount_due",
+ "amount_paid",
+ "docstatus",
+ "gst",
+ "applied_credits",
+ "status",
+ "due_date",
+ "total_discount_amount",
+ "invoice_pdf",
+ "stripe_invoice_url",
+ "amount_due_with_tax",
+ "mpesa_invoice",
+ "mpesa_invoice_pdf",
+ "customer_name",
+ )
+ @staticmethod
+ def get_list_query(query, filters=None, **list_args):
+ StripeWebhookLog = frappe.qb.DocType("Stripe Webhook Log")
+ Invoice = frappe.qb.DocType("Invoice")
+
+ partner_customer = filters.get("partner_customer")
+ if partner_customer:
+ team_name = filters.get("team")
+ due_date = filters.get("due_date")
+ filters.pop("partner_customer")
+ query = (
+ frappe.qb.from_(Invoice)
+ .select(Invoice.name, Invoice.total, Invoice.amount_due, Invoice.status, Invoice.due_date)
+ .where(
+ (Invoice.team == team_name)
+ & (Invoice.due_date >= due_date[1])
+ & (Invoice.type == "Subscription")
+ )
+ )
+
+ invoices = (
+ query.select(StripeWebhookLog.name.as_("stripe_payment_failed"))
+ .left_join(StripeWebhookLog)
+ .on(
+ (Invoice.name == StripeWebhookLog.invoice)
+ & (StripeWebhookLog.event_type == "payment_intent.payment_failed")
+ )
+ .groupby(Invoice.name)
+ ).run(as_dict=True)
+
+ for invoice in invoices:
+ if stripe_log := invoice.stripe_payment_failed:
+ payload, failed_payment_method = frappe.db.get_value(
+ "Stripe Webhook Log", stripe_log, ["payload", "stripe_payment_method"]
+ )
+ payload = frappe.parse_json(payload)
+ invoice.stripe_payment_error = (
+ payload.get("data", {}).get("object", {}).get("last_payment_error", {}).get("message")
+ )
+ invoice.stripe_payment_failed_card = frappe.db.get_value(
+ "Stripe Payment Method", failed_payment_method, "last_4"
+ )
+
+ return invoices
+
+ def get_doc(self, doc):
+ doc.invoice_pdf = self.invoice_pdf or (self.currency == "USD" and self.get_pdf())
+ currency = frappe.get_value("Team", self.team, "currency")
+ price_field = "price_inr" if currency == "INR" else "price_usd"
+ currency_symbol = "₹" if currency == "INR" else "$"
+
+ for item in doc["items"]:
+ if item.document_type in ("Server", "Database Server"):
+ is_primary = frappe.get_value(item.document_type, item.document_name, "is_primary")
+ item.document_name = frappe.get_value(item.document_type, item.document_name, "title")
+ if server_plan := frappe.get_value("Server Plan", item.plan, price_field):
+ if not is_primary and item.document_type == "Server":
+ item.plan = (
+ f"{currency_symbol}{calculate_secondary_server_price(self.team, item.plan)}/hour"
+ )
+ else:
+ item.plan = f"{currency_symbol}{server_plan}/mo"
+ elif server_plan := frappe.get_value("Server Storage Plan", item.plan, price_field):
+ item.plan = f"Storage Add-on {currency_symbol}{server_plan}/GB"
+
+ elif item.document_type == "Marketplace App":
+ item.document_name = frappe.get_value(item.document_type, item.document_name, "title")
+ item.plan = (
+ f"{currency_symbol}{frappe.get_value('Marketplace App Plan', item.plan, price_field)}"
+ )
+ elif item.document_type == "Site":
+ hostname = frappe.get_value(item.document_type, item.document_name, "host_name")
+ if hostname:
+ item.document_name = hostname
+
+ @dashboard_whitelist()
+ def stripe_payment_url(self):
+ if not self.stripe_invoice_id:
+ return
+ frappe.response.location = self.get_stripe_payment_url()
+ frappe.response.type = "redirect"
+
+ def get_stripe_payment_url(self):
+ stripe_link_expired = (
+ self.status == "Unpaid" and frappe.utils.date_diff(frappe.utils.now(), self.due_date) > 30
+ )
+ if stripe_link_expired:
+ stripe = get_stripe()
+ stripe_invoice = stripe.Invoice.retrieve(self.stripe_invoice_id)
+ url = stripe_invoice.hosted_invoice_url
+ else:
+ url = self.stripe_invoice_url
+ return url
-class Invoice(Document):
def validate(self):
self.validate_team()
self.validate_dates()
self.validate_duplicate()
self.validate_items()
- self.validate_amount()
+ self.calculate_values()
self.compute_free_credits()
def before_submit(self):
if self.total > 0 and self.status != "Paid":
frappe.throw("Invoice must be Paid to be submitted")
+ def calculate_values(self):
+ if self.status == "Paid" and self.docstatus == 1:
+ # don't calculate if already invoice is paid and already submitted
+ return
+ self.calculate_total()
+ self.calculate_discounts()
+ self.calculate_amount_due()
+ self.apply_taxes_if_applicable()
+
@frappe.whitelist()
- def finalize_invoice(self):
+ def finalize_invoice(self): # noqa: C901
if self.type == "Prepaid Credits":
return
+ self.calculate_values()
+
if self.total == 0:
self.status = "Empty"
self.submit()
return
- team_enabled = frappe.db.get_value("Team", self.team, "enabled")
- if not team_enabled:
+ team = frappe.get_doc("Team", self.team)
+ if not team.enabled:
self.add_comment("Info", "Skipping finalize invoice because team is disabled")
+ self.save()
return
- if self.partner_email:
- self.apply_partner_discount()
+ if self.stripe_invoice_id:
+ # if stripe invoice is already created and paid,
+ # then update status and return early
+ stripe = get_stripe()
+ invoice = stripe.Invoice.retrieve(self.stripe_invoice_id)
+ if invoice.status == "paid":
+ self.status = "Paid"
+ self.update_transaction_details(invoice.charge)
+ self.submit()
+ self.unsuspend_sites_if_applicable()
+ return
# set as unpaid by default
self.status = "Unpaid"
+ self.update_item_descriptions()
- self.amount_due = self.total
-
- if self.payment_mode == "Partner Credits":
- self.payment_attempt_count += 1
- self.save()
- frappe.db.commit()
-
- self.cancel_applied_credits()
- self.apply_partner_credits()
- return
+ if self.amount_due > 0:
+ self.apply_credit_balance()
- self.apply_credit_balance()
if self.amount_due == 0:
self.status = "Paid"
- self.update_item_descriptions()
+ if self.status == "Paid" and self.stripe_invoice_id and self.amount_paid == 0:
+ stripe = get_stripe()
+ invoice = stripe.Invoice.retrieve(self.stripe_invoice_id)
+ payment_intent = stripe.PaymentIntent.retrieve(invoice.payment_intent)
+ if payment_intent.status == "processing":
+ # mark the fc invoice as Paid
+ # if the payment intent is processing, it means the invoice cannot be voided yet
+ # wait for invoice to be updated and then mark it as void if payment failed
+ # or issue a refund if succeeded
+ self.save() # status is already Paid, so no need to set again
+ else:
+ self.change_stripe_invoice_status("Void")
+ self.add_comment(
+ text=(
+ f"Stripe Invoice {self.stripe_invoice_id} voided because payment is done via credits."
+ )
+ )
- if self.payment_mode == "Prepaid Credits" and self.amount_due > 0:
- self.payment_attempt_count += 1
- self.save()
- frappe.db.commit()
+ self.save()
- frappe.throw(
- "Not enough credits for this invoice. Change payment mode to Card to"
- " pay using Stripe."
- )
+ if self.amount_due > 0:
+ if self.payment_mode == "Prepaid Credits":
+ self.add_comment(
+ "Comment",
+ "Not enough credits for this invoice. Change payment mode to Card to pay using Stripe.",
+ )
+ # we shouldn't depend on payment_mode to decide whether to create stripe invoice or not
+ # there should be a separate field in team to decide whether to create automatic invoices or not
+ if self.payment_mode == "Card":
+ self.create_stripe_invoice()
- try:
- self.create_stripe_invoice()
- except Exception:
- frappe.db.rollback()
- self.reload()
+ if self.status == "Paid":
+ self.submit()
+ self.unsuspend_sites_if_applicable()
- # log the traceback as comment
- msg = "" + frappe.get_traceback() + " "
- self.add_comment("Comment", _("Stripe Invoice Creation Failed") + " " + msg)
+ def unsuspend_sites_if_applicable(self):
+ if (
+ frappe.db.count(
+ "Invoice",
+ {
+ "status": "Unpaid",
+ "team": self.team,
+ "type": "Subscription",
+ "docstatus": ("<", 2),
+ },
+ )
+ == 0
+ ):
+ # unsuspend sites only if all invoices are paid
+ team = frappe.get_cached_doc("Team", self.team)
+ team.unsuspend_sites(f"Invoice {self.name} Payment Successful.")
- if not self.stripe_invoice_id:
- # if stripe invoice was created, find it and set it
- # so that we avoid scenarios where Stripe Invoice was created but not set in Frappe Cloud
- stripe_invoice_id = self.find_stripe_invoice()
- if stripe_invoice_id:
- self.stripe_invoice_id = stripe_invoice_id
- self.status = "Invoice Created"
- self.save()
+ def calculate_total(self):
+ total = 0
+ for item in self.items:
+ total += item.amount
+ self.total = flt(total, 2)
- frappe.db.commit()
+ def apply_taxes_if_applicable(self):
+ self.amount_due_with_tax = self.amount_due
+ self.gst = 0
- raise
+ if self.payment_mode == "Prepaid Credits":
+ return
- self.save()
+ if self.currency == "INR" and self.type == "Subscription":
+ gst_rate = frappe.db.get_single_value("Press Settings", "gst_percentage")
+ self.gst = flt(self.amount_due * gst_rate, 2)
+ self.amount_due_with_tax = flt(self.amount_due + self.gst, 2)
- if self.status == "Paid":
- self.submit()
+ def calculate_amount_due(self):
+ self.amount_due = flt(self.total - self.applied_credits, 2)
+ if self.amount_due < 0 and self.amount_due > -0.1:
+ self.write_off_amount = self.amount_due
+ self.amount_due = 0
- if frappe.db.count("Invoice", {"status": "Unpaid", "team": self.team}) < 2:
- # unsuspend sites only if all invoices are paid
- team = frappe.get_cached_doc("Team", self.team)
- team.unsuspend_sites(f"Invoice {self.name} Payment Successful.")
+ if self.amount_due > 0 and self.amount_due < 0.1:
+ self.write_off_amount = self.amount_due
+ self.amount_due = 0
def on_submit(self):
self.create_invoice_on_frappeio()
+ self.fetch_mpesa_invoice_pdf()
def on_update_after_submit(self):
self.create_invoice_on_frappeio()
+ self.fetch_mpesa_invoice_pdf()
def after_insert(self):
if self.get("amended_from"):
@@ -148,75 +401,99 @@ def after_insert(self):
)
def create_stripe_invoice(self):
- if self.payment_mode != "Card":
- return
-
- stripe = get_stripe()
-
- if self.type == "Prepaid Credits":
- return
-
- if self.status == "Paid":
- # void an existing invoice if payment was done via credits
- if self.stripe_invoice_id:
- stripe.Invoice.void_invoice(self.stripe_invoice_id)
- self.add_comment(
- text=(
- f"Stripe Invoice {self.stripe_invoice_id} voided because"
- " payment is done via credits."
- )
- )
- return
-
if self.stripe_invoice_id:
- invoice = stripe.Invoice.retrieve(self.stripe_invoice_id)
+ invoice = self.get_stripe_invoice()
stripe_invoice_total = convert_stripe_money(invoice.total)
- if self.amount_due == stripe_invoice_total:
+ if self.amount_due_with_tax == stripe_invoice_total:
# return if an invoice with the same amount is already created
return
- else:
- # if the amount is changed, void the stripe invoice and create a new one
- stripe.Invoice.void_invoice(self.stripe_invoice_id)
- formatted_amount = fmt_money(stripe_invoice_total, currency=self.currency)
- self.add_comment(
- text=(
- f"Stripe Invoice {self.stripe_invoice_id} of amount {formatted_amount} voided."
- )
- )
- self.stripe_invoice_id = ""
- self.stripe_invoice_url = ""
+ # if the amount is changed, void the stripe invoice and create a new one
+ self.change_stripe_invoice_status("Void")
+ formatted_amount = fmt_money(stripe_invoice_total, currency=self.currency)
+ self.add_comment(
+ text=(f"Stripe Invoice {self.stripe_invoice_id} of amount {formatted_amount} voided.")
+ )
+ self.stripe_invoice_id = ""
+ self.stripe_invoice_url = ""
+ self.save()
- if self.amount_due <= 0:
+ if self.amount_due_with_tax <= 0:
return
customer_id = frappe.db.get_value("Team", self.team, "stripe_customer_id")
- amount = int(self.amount_due * 100)
- stripe.InvoiceItem.create(
- customer=customer_id,
- description=self.get_stripe_invoice_item_description(),
- amount=amount,
- currency=self.currency.lower(),
- idempotency_key=f"invoiceitem:{self.name}:{amount}",
- )
- invoice = stripe.Invoice.create(
- customer=customer_id,
- collection_method="charge_automatically",
- auto_advance=True,
- idempotency_key=f"invoice:{self.name}:{amount}",
- )
- self.stripe_invoice_id = invoice["id"]
- self.status = "Invoice Created"
- self.save()
+ amount = int(self.amount_due_with_tax * 100)
+ self._make_stripe_invoice(customer_id, amount)
- def find_stripe_invoice(self):
+ def mandate_inactive(self, mandate_id):
stripe = get_stripe()
- invoices = stripe.Invoice.list(
- customer=frappe.db.get_value("Team", self.team, "stripe_customer_id")
+ mandate = stripe.Mandate.retrieve(mandate_id)
+ return mandate.status in ("inactive", "pending")
+
+ def _make_stripe_invoice(self, customer_id, amount):
+ mandate_id = self.get_mandate_id(customer_id)
+ if mandate_id and self.mandate_inactive(mandate_id):
+ frappe.db.set_value("Invoice", self.name, "payment_mode", "Prepaid Credits")
+ self.reload()
+ return None
+ try:
+ stripe = get_stripe()
+ invoice = stripe.Invoice.create(
+ customer=customer_id,
+ pending_invoice_items_behavior="exclude",
+ collection_method="charge_automatically",
+ auto_advance=True,
+ currency=self.currency.lower(),
+ payment_settings={"default_mandate": mandate_id},
+ idempotency_key=f"invoice:{self.name}:amount:{amount}",
+ )
+ stripe.InvoiceItem.create(
+ customer=customer_id,
+ invoice=invoice["id"],
+ description=self.get_stripe_invoice_item_description(),
+ amount=amount,
+ currency=self.currency.lower(),
+ idempotency_key=f"invoiceitem:{self.name}:amount:{amount}",
+ )
+ self.db_set(
+ {
+ "stripe_invoice_id": invoice["id"],
+ "status": "Invoice Created",
+ },
+ commit=True,
+ )
+ self.reload()
+ return invoice
+ except Exception:
+ frappe.db.rollback()
+ self.reload()
+
+ # log the traceback as comment
+ msg = "" + frappe.get_traceback() + " "
+ self.add_comment("Comment", _("Stripe Invoice Creation Failed") + " " + msg)
+ frappe.db.commit()
+
+ def get_mandate_id(self, customer_id):
+ mandate_id = frappe.get_value(
+ "Stripe Payment Method", {"team": self.team, "is_default": 1}, "stripe_mandate_id"
)
+ if not mandate_id:
+ return ""
+ return mandate_id
+
+ def find_stripe_invoice_if_not_set(self):
+ if self.stripe_invoice_id:
+ return
+ # if stripe invoice was created, find it and set it
+ # so that we avoid scenarios where Stripe Invoice was created but not set in Frappe Cloud
+ stripe = get_stripe()
+ invoices = stripe.Invoice.list(customer=frappe.db.get_value("Team", self.team, "stripe_customer_id"))
description = self.get_stripe_invoice_item_description()
for invoice in invoices.data:
- if invoice.lines.data[0].description == description and invoice.status != "void":
- return invoice["id"]
+ line_items = invoice.lines.data
+ if line_items and line_items[0].description == description and invoice.status != "void":
+ self.stripe_invoice_id = invoice["id"]
+ self.status = "Invoice Created"
+ self.save()
def get_stripe_invoice_item_description(self):
start = getdate(self.period_start)
@@ -230,13 +507,21 @@ def finalize_stripe_invoice(self):
stripe.Invoice.finalize_invoice(self.stripe_invoice_id)
def validate_duplicate(self):
- if self.type != "Subscription":
- return
+ invoice_exists = frappe.db.exists(
+ "Invoice",
+ {
+ "stripe_payment_intent_id": self.stripe_payment_intent_id,
+ "type": "Prepaid Credits",
+ "name": ("!=", self.name),
+ },
+ )
+ if self.type == "Prepaid Credits" and self.stripe_payment_intent_id and invoice_exists:
+ frappe.throw("Invoice with same Stripe payment intent exists", frappe.DuplicateEntryError)
- if self.period_start and self.period_end and self.is_new():
+ if self.type == "Subscription" and self.period_start and self.period_end and self.is_new():
query = (
f"select `name` from `tabInvoice` where team = '{self.team}' and"
- f" docstatus < 2 and ('{self.period_start}' between `period_start` and"
+ f" status = 'Draft' and ('{self.period_start}' between `period_start` and"
f" `period_end` or '{self.period_end}' between `period_start` and"
" `period_end`)"
)
@@ -250,43 +535,20 @@ def validate_duplicate(self):
)
def validate_team(self):
- team = frappe.get_cached_doc("Team", self.team)
+ team = frappe.get_doc("Team", self.team)
self.customer_name = team.billing_name or frappe.utils.get_fullname(self.team)
- self.customer_email = (
- frappe.db.get_value(
- "Communication Email", {"parent": team.user, "type": "invoices"}, ["value"]
- )
- or team.user
- )
+ self.customer_email = team.user
+ billing_emails = get_communication_info("Email", "Billing", "Team", self.team)
+ if billing_emails:
+ self.billing_email = billing_emails[0]
+ else:
+ self.billing_email = self.customer_email
self.currency = team.currency
if not self.payment_mode:
self.payment_mode = team.payment_mode
if not self.currency:
- frappe.throw(
- f"Cannot create Invoice because Currency is not set in Team {self.team}"
- )
-
- # To prevent copying of team level discounts again
- self.remove_previous_team_discounts()
-
- for invoice_discount in team.discounts:
- self.append(
- "discounts",
- {
- "discount_type": invoice_discount.discount_type,
- "based_on": invoice_discount.based_on,
- "percent": invoice_discount.percent,
- "amount": invoice_discount.amount,
- "via_team": True,
- },
- )
-
- def remove_previous_team_discounts(self):
- team_discounts = find_all(self.discounts, lambda x: x.via_team)
-
- for discount in team_discounts:
- self.remove(discount)
+ frappe.throw(f"Cannot create Invoice because Currency is not set in Team {self.team}")
def validate_dates(self):
if not self.period_start:
@@ -301,11 +563,62 @@ def validate_dates(self):
def update_item_descriptions(self):
for item in self.items:
- if not item.description and item.document_type == "Site" and item.plan:
- site_name = item.document_name.split(".archived")[0]
- plan = frappe.get_cached_value("Plan", item.plan, "plan_title")
+ if not item.description:
how_many_days = f"{cint(item.quantity)} day{'s' if item.quantity > 1 else ''}"
- item.description = f"{site_name} active for {how_many_days} on {plan} plan"
+ if item.document_type == "Site" and item.plan:
+ site_name = item.document_name.split(".archived")[0]
+ plan = frappe.get_cached_value("Site Plan", item.plan, "plan_title")
+ item.description = f"{site_name} active for {how_many_days} on {plan} plan"
+ elif item.document_type in ["Server", "Database Server"]:
+ server_title = frappe.get_cached_value(item.document_type, item.document_name, "title")
+ if item.plan == "Add-on Storage plan":
+ item.description = f"{server_title} Storage Add-on for {how_many_days}"
+ else:
+ item.description = f"{server_title} active for {how_many_days}"
+ elif item.document_type == "Server Snapshot":
+ item.description = f"{item.document_name} stored for {how_many_days}"
+ elif item.document_type == "Marketplace App":
+ app_title = frappe.get_cached_value("Marketplace App", item.document_name, "title")
+ item.description = f"Marketplace app {app_title} active for {how_many_days}"
+ else:
+ item.description = "Prepaid Credits"
+
+ def is_auto_scale_invoice_item(self, usage_record: UsageRecord) -> bool:
+ """Check if this a secondary server usage record"""
+ if usage_record.document_type != "Server":
+ return False
+
+ is_primary = frappe.db.get_value("Server", usage_record.document_name, "is_primary")
+ if is_primary:
+ return False
+
+ return True
+
+ def get_auto_scale_quantity(self, usage_record: UsageRecord) -> float:
+ """Get the duration the server was auto scaled for"""
+ last_up_scale_at = frappe.db.get_value(
+ "Auto Scale Record",
+ {
+ "secondary_server": usage_record.document_name,
+ "status": "Success",
+ "action": "Scale Up",
+ },
+ "modified",
+ )
+
+ last_down_scale_at = frappe.db.get_value(
+ "Auto Scale Record",
+ {
+ "secondary_server": usage_record.document_name,
+ "status": "Success",
+ "action": "Scale Down",
+ },
+ "modified",
+ )
+
+ # Since down scale is always followed
+ scale_duration = last_down_scale_at - last_up_scale_at
+ return round(scale_duration.total_seconds() / 3600, 2)
def add_usage_record(self, usage_record):
if self.type != "Subscription":
@@ -336,7 +649,15 @@ def add_usage_record(self, usage_record):
},
)
- invoice_item.quantity = (invoice_item.quantity or 0) + 1
+ if self.is_auto_scale_invoice_item(usage_record):
+ invoice_item.quantity = (
+ self.get_auto_scale_quantity(usage_record)
+ if not invoice_item.quantity
+ else invoice_item.quantity + self.get_auto_scale_quantity(usage_record)
+ )
+
+ else:
+ invoice_item.quantity = (invoice_item.quantity or 0) + 1
if usage_record.payout:
self.payout += usage_record.payout
@@ -369,12 +690,15 @@ def remove_usage_record(self, usage_record):
def get_invoice_item_for_usage_record(self, usage_record):
invoice_item = None
for row in self.items:
- if (
+ conditions = (
row.document_type == usage_record.document_type
and row.document_name == usage_record.document_name
and row.plan == usage_record.plan
and row.rate == usage_record.amount
- ):
+ )
+ if row.document_type == "Marketplace App":
+ conditions = conditions and row.site == usage_record.site
+ if conditions:
invoice_item = row
return invoice_item
@@ -384,87 +708,31 @@ def validate_items(self):
if row.quantity == 0:
items_to_remove.append(row)
else:
- row.amount = row.quantity * row.rate
+ row.amount = flt((row.quantity * row.rate), 2)
for item in items_to_remove:
self.remove(item)
- def validate_amount(self):
- # Already Submitted
- if self.docstatus == 1:
- return
-
- total = 0
- for item in self.items:
- total += item.amount
-
- self.total_before_discount = total
- self.set_total_and_discount()
-
def compute_free_credits(self):
- self.free_credits = sum(
- [d.amount for d in self.credit_allocations if d.source == "Free Credits"]
- )
+ self.free_credits = sum([d.amount for d in self.credit_allocations if d.source == "Free Credits"])
- def apply_partner_discount(self):
- # check if discount is already added
- for discount in self.discounts:
- if discount.note == "Flat Partner Discount":
- return
-
- # give 10% discount for partners
- total_partner_discount = 0
+ def calculate_discounts(self):
for item in self.items:
- if item.document_type in ("Site", "Server", "Database Server"):
- item.discount = item.amount * 0.1
- total_partner_discount += item.discount
-
- if total_partner_discount > 0:
- self.append(
- "discounts",
- {
- "discount_type": "Flat On Total",
- "based_on": "Amount",
- "percent": 0,
- "amount": total_partner_discount,
- "note": "Flat Partner Discount",
- "via_team": False,
- },
- )
-
- self.save()
- self.reload()
+ if item.discount_percentage:
+ item.discount = flt(item.amount * (item.discount_percentage / 100), 2)
- def set_total_and_discount(self):
- total_discount_amount = 0
-
- for invoice_discount in self.discounts:
- discount_type = discount_type_string_to_enum[invoice_discount.discount_type]
- if discount_type == InvoiceDiscountType.FLAT_ON_TOTAL:
- total_discount_amount += self.get_flat_on_total_discount_amount(invoice_discount)
-
- self.total_discount_amount = total_discount_amount
- self.total = self.total_before_discount - total_discount_amount
-
- def get_flat_on_total_discount_amount(self, invoice_discount):
- discount_amount = 0
-
- if invoice_discount.based_on == "Amount":
- if invoice_discount.amount > self.total_before_discount:
- frappe.throw(
- f"Discount amount {invoice_discount.amount} cannot be"
- f" greater than total amount {self.total_before_discount}"
- )
+ self.total_discount_amount = sum([item.discount for item in self.items]) + sum(
+ [d.amount for d in self.discounts]
+ )
- discount_amount = invoice_discount.amount
- elif invoice_discount.based_on == "Percent":
- if invoice_discount.percent > 100:
- frappe.throw(
- f"Discount percentage {invoice_discount.percent} cannot be greater than 100%"
- )
- discount_amount = self.total_before_discount * (invoice_discount.percent / 100)
+ npo_discount_applicable = frappe.db.get_value("Team", self.team, "apply_npo_discount")
+ if npo_discount_applicable:
+ npo_discount = frappe.db.get_single_value("Press Settings", "npo_discount")
+ if npo_discount:
+ self.total_discount_amount += flt(self.total * (npo_discount / 100), 2)
- return discount_amount
+ self.total_before_discount = self.total
+ self.total = flt(self.total_before_discount - self.total_discount_amount, 2)
def on_cancel(self):
# make reverse entries for credit allocations
@@ -481,32 +749,9 @@ def on_cancel(self):
doc.insert()
doc.submit()
- def apply_partner_credits(self):
- client = self.get_frappeio_connection()
- response = client.session.post(
- f"{client.url}/api/method/consume_credits_against_fc_invoice",
- headers=client.headers,
- data={"invoice": self.as_json()},
- )
-
- if response.ok:
- res = response.json()
- partner_order = res.get("message")
-
- if partner_order:
- self.frappe_partner_order = partner_order
- self.amount_paid = self.amount_due
- self.status = "Paid"
- self.save()
- self.submit()
- else:
- self.add_comment(
- text="Failed to pay via Partner credits" + " " + response.text
- )
-
def apply_credit_balance(self):
- # cancel applied credits to re-apply available credits
- self.cancel_applied_credits()
+ # previously we used to cancel and re-apply credits, but it messed up the balance transaction history
+ # so now we only do append-only operation while applying credits
balance = frappe.get_cached_doc("Team", self.team).get_balance()
if balance <= 0:
@@ -527,7 +772,7 @@ def apply_credit_balance(self):
unallocated_balances.reverse()
total_allocated = 0
- due = self.total
+ due = self.amount_due
for balance in unallocated_balances:
if due == 0:
break
@@ -559,30 +804,8 @@ def apply_credit_balance(self):
).insert()
balance_transaction.submit()
- self.applied_credits = total_allocated
- self.amount_due = self.total - self.applied_credits
-
- def cancel_applied_credits(self):
- for row in self.credit_allocations:
- doc = frappe.get_doc(
- doctype="Balance Transaction",
- type="Adjustment",
- source=row.source,
- team=self.team,
- amount=row.amount,
- description=(
- f"Reverse amount {row.get_formatted('amount')} of {row.transaction}"
- f" from invoice {self.name}"
- ),
- ).insert()
- doc.submit()
- self.applied_credits -= row.amount
-
- self.clear_credit_allocation_table()
- self.save()
-
- def clear_credit_allocation_table(self):
- self.set("credit_allocations", [])
+ self.applied_credits = sum(row.amount for row in self.credit_allocations)
+ self.calculate_values()
def create_next(self):
# the next invoice's period starts after this invoice ends
@@ -597,10 +820,10 @@ def create_next(self):
}, # Adding type 'Subscription' to ensure no other type messes with this
)
- if not already_exists:
- return frappe.get_doc(
- doctype="Invoice", team=self.team, period_start=next_start
- ).insert()
+ if already_exists:
+ return None
+
+ return frappe.get_doc(doctype="Invoice", team=self.team, period_start=next_start).insert()
def get_pdf(self):
print_format = self.meta.default_print_format
@@ -609,28 +832,35 @@ def get_pdf(self):
)
@frappe.whitelist()
- def create_invoice_on_frappeio(self):
+ def create_invoice_on_frappeio(self): # noqa: C901
if self.flags.skip_frappe_invoice:
- return
+ return None
if self.status != "Paid":
- return
+ return None
if self.amount_paid == 0:
- return
- if self.frappe_invoice or self.frappe_partner_order:
- return
+ return None
+ if self.frappe_invoice or self.frappe_partner_order or self.mpesa_receipt_number:
+ return None
+
+ if is_frappe_auth_disabled():
+ return None
try:
team = frappe.get_doc("Team", self.team)
- address = (
- frappe.get_doc("Address", team.billing_address) if team.billing_address else None
- )
+ address = frappe.get_doc("Address", team.billing_address) if team.billing_address else None
+ if not address:
+ # don't create invoice if address is not set
+ return None
+ if team.country != address.country:
+ # don't create invoice if team country and address country don't match
+ return None
client = self.get_frappeio_connection()
response = client.session.post(
f"{client.url}/api/method/create-fc-invoice",
headers=client.headers,
data={
"team": team.as_json(),
- "address": address.as_json(),
+ "address": address.as_json() if address else '""',
"invoice": self.as_json(),
},
)
@@ -657,9 +887,7 @@ def create_invoice_on_frappeio(self):
)
except Exception:
traceback = "" + frappe.get_traceback() + " "
- self.add_comment(
- text="Failed to create invoice on frappe.io" + " " + traceback
- )
+ self.add_comment(text="Failed to create invoice on frappe.io" + " " + traceback)
log_error(
"Frappe.io Invoice Creation Error",
@@ -669,12 +897,22 @@ def create_invoice_on_frappeio(self):
@frappe.whitelist()
def fetch_invoice_pdf(self):
if self.frappe_invoice:
+ from urllib.parse import urlencode
+
+ if is_frappe_auth_disabled():
+ return
+
client = self.get_frappeio_connection()
- url = (
- client.url + "/api/method/frappe.utils.print_format.download_pdf?"
- f"doctype=Sales%20Invoice&name={self.frappe_invoice}&"
- "format=Frappe%20Cloud&no_letterhead=0"
+ print_format = frappe.db.get_single_value("Press Settings", "print_format")
+ params = urlencode(
+ {
+ "doctype": "Sales Invoice",
+ "name": self.frappe_invoice,
+ "format": print_format,
+ "no_letterhead": 0,
+ }
)
+ url = client.url + "/api/method/frappe.utils.print_format.download_pdf?" + params
with client.session.get(url, headers=client.headers, stream=True) as r:
r.raise_for_status()
@@ -721,9 +959,11 @@ def update_transaction_details(self, stripe_charge=None):
},
)
self.save()
- return True
def update_razorpay_transaction_details(self, payment):
+ if not (payment["fee"] or payment["tax"]):
+ return
+
self.transaction_amount = convert_stripe_money(payment["amount"])
self.transaction_net = convert_stripe_money(payment["amount"] - payment["fee"])
self.transaction_fee = convert_stripe_money(payment["fee"])
@@ -753,6 +993,47 @@ def update_razorpay_transaction_details(self, payment):
self.save()
+ @frappe.whitelist()
+ def fetch_mpesa_invoice_pdf(self):
+ if not (self.mpesa_payment_record and self.mpesa_invoice):
+ return
+ gateway_info = get_gateway_details(self.mpesa_payment_record)
+ client = get_partner_external_connection(gateway_info[0])
+ try:
+ print_format = gateway_info[1]
+ from urllib.parse import urlencode
+
+ params = urlencode(
+ {
+ "doctype": "Sales Invoice",
+ "name": self.mpesa_invoice,
+ "format": print_format,
+ "no_letterhead": 0,
+ }
+ )
+ url = f"{client.url}/api/method/frappe.utils.print_format.download_pdf?{params}"
+
+ with client.session.get(url, headers=client.headers, stream=True) as r:
+ r.raise_for_status()
+ file_doc = frappe.get_doc(
+ {
+ "doctype": "File",
+ "attached_to_doctype": "Invoice",
+ "attached_to_name": self.name,
+ "attached_to_field": "mpesa_invoice_pdf",
+ "folder": "Home/Attachments",
+ "file_name": self.mpesa_invoice + ".pdf",
+ "is_private": 1,
+ "content": r.content,
+ }
+ )
+ file_doc.save(ignore_permissions=True)
+ self.mpesa_invoice_pdf = file_doc.file_url
+ self.save(ignore_permissions=True)
+
+ except Exception as e:
+ frappe.log_error(str(e), "Error fetching Sales Invoice PDF on external site")
+
@frappe.whitelist()
def refund(self, reason):
stripe = get_stripe()
@@ -765,48 +1046,14 @@ def refund(self, reason):
charge = payment_intent["charges"]["data"][0]["id"]
if not charge:
- frappe.throw(
- "Cannot refund payment because Stripe Charge not found for this invoice"
- )
+ frappe.throw("Cannot refund payment because Stripe Charge not found for this invoice")
stripe.Refund.create(charge=charge)
self.status = "Refunded"
+ self.refund_reason = reason
self.save()
self.add_comment(text=f"Refund reason: {reason}")
- def consume_credits_and_mark_as_paid(self, reason=None):
- if self.amount_due <= 0:
- frappe.throw("Amount due is less than or equal to 0")
-
- team = frappe.get_doc("Team", self.team)
- available_credits = team.get_balance()
- if available_credits < self.amount_due:
- available = frappe.utils.fmt_money(available_credits, 2, self.currency)
- frappe.throw(
- f"Available credits ({available}) is less than amount due"
- f" ({self.get_formatted('amount_due')})"
- )
-
- remark = "Manually consuming credits and marking the unpaid invoice as paid."
- if reason:
- remark += f" Reason: {reason}"
-
- self.change_stripe_invoice_status("Paid")
-
- # negative value to reduce balance by amount
- amount = self.amount_due * -1
- balance_transaction = team.allocate_credit_amount(
- amount, source="", remark=f"{remark}, Ref: Invoice {self.name}"
- )
-
- self.add_comment(
- text=(
- "Manually consuming credits and marking the unpaid invoice as paid."
- f" {frappe.utils.get_link_to_form('Balance Transaction', balance_transaction.name)}"
- )
- )
- self.db_set("status", "Paid")
-
@frappe.whitelist()
def change_stripe_invoice_status(self, status):
stripe = get_stripe()
@@ -827,6 +1074,12 @@ def refresh_stripe_payment_link(self):
# Also send back the updated payment link
return self.stripe_invoice_url
+ def get_stripe_invoice(self):
+ if not self.stripe_invoice_id:
+ return None
+ stripe = get_stripe()
+ return stripe.Invoice.retrieve(self.stripe_invoice_id)
+
def finalize_draft_invoices():
"""
@@ -877,7 +1130,7 @@ def finalize_unpaid_prepaid_credit_invoices():
"status": "Unpaid",
"type": "Subscription",
"period_end": ("<=", today),
- "payment_mode": ("in", ["Prepaid Credits", "Partner Credits"]),
+ "payment_mode": "Prepaid Credits",
},
pluck="name",
)
@@ -912,8 +1165,82 @@ def finalize_draft_invoice(invoice):
log_error("Invoice creation for next month failed", invoice=invoice.name)
-get_permission_query_conditions = get_permission_query_conditions_for_doctype("Invoice")
-
-
def calculate_gst(amount):
return amount * 0.18
+
+
+def get_permission_query_conditions(user):
+ from press.utils import get_current_team
+
+ if not user:
+ user = frappe.session.user
+
+ user_type = frappe.db.get_value("User", user, "user_type", cache=True)
+ if user_type == "System User":
+ return ""
+
+ team = get_current_team()
+
+ return f"(`tabInvoice`.`team` = {frappe.db.escape(team)})"
+
+
+def has_permission(doc, ptype, user):
+ from press.utils import get_current_team
+
+ if not user:
+ user = frappe.session.user
+
+ user_type = frappe.db.get_value("User", user, "user_type", cache=True)
+ if user_type == "System User":
+ return True
+
+ if ptype == "create":
+ return True
+
+ team = get_current_team(True)
+ team_members = [
+ d.user for d in frappe.db.get_all("Team Member", {"parenttype": "Team", "parent": doc.team}, ["user"])
+ ]
+ if doc.team == team.name or team.user in team_members:
+ return True
+ return False
+
+
+# M-pesa external site for webhook
+def create_sales_invoice_on_external_site(transaction_response):
+ client = get_partner_external_connection()
+ try:
+ # Define the necessary data for the Sales Invoice creation
+ data = {
+ "customer": transaction_response.get("team"),
+ "posting_date": frappe.utils.nowdate(),
+ "due_date": frappe.utils.add_days(frappe.utils.nowdate(), 30),
+ "items": [
+ {
+ "item_code": "Frappe Cloud Payment",
+ "qty": 1,
+ "rate": transaction_response.get("Amount"),
+ "description": "Payment for Mpesa transaction",
+ }
+ ],
+ "paid_amount": transaction_response.get("Amount"),
+ "status": "Paid",
+ }
+
+ # Post to the external site's sales invoice creation API
+ response = client.session.post(
+ f"{client.url}/api/method/frappe.client.insert",
+ headers=client.headers,
+ json={"doc": data},
+ )
+
+ if response.ok:
+ res = response.json()
+ sales_invoice = res.get("message")
+ if sales_invoice:
+ frappe.msgprint(_("Sales Invoice created successfully on external site."))
+ return sales_invoice
+ else:
+ frappe.throw(_("Failed to create Sales Invoice on external site."))
+ except Exception as e:
+ frappe.log_error(str(e), "Error creating Sales Invoice on external site")
diff --git a/press/press/doctype/invoice/stripe_webhook_handler.py b/press/press/doctype/invoice/stripe_webhook_handler.py
index 6651c94b385..66dbe5e6149 100644
--- a/press/press/doctype/invoice/stripe_webhook_handler.py
+++ b/press/press/doctype/invoice/stripe_webhook_handler.py
@@ -11,26 +11,60 @@
"invoice.payment_failed": "Failed",
}
+DISPUTE_EVENT_TYPE_MAP = {
+ "charge.dispute.created": "Created",
+ "charge.dispute.updated": "Updated",
+ "charge.dispute.closed": "Closed",
+}
+
-class StripeInvoiceWebhookHandler:
+class StripeWebhookHandler:
"""This class handles Stripe Invoice Webhook Events"""
def __init__(self, webhook_log):
self.webhook_log = webhook_log
def process(self):
- if self.webhook_log.event_type not in EVENT_TYPE_MAP.keys():
+ if self.webhook_log.event_type in DISPUTE_EVENT_TYPE_MAP:
+ event = frappe.parse_json(self.webhook_log.payload)
+ id = event["data"]["object"]["id"]
+ payment_intent = event["data"]["object"]["payment_intent"]
+ email = event["data"]["object"]["evidence"]["customer_email_address"]
+ reason = event["data"]["object"]["reason"]
+ status = event["data"]["object"]["status"]
+
+ try:
+ frappe.get_doc(
+ {
+ "doctype": "Payment Dispute",
+ "event_type": DISPUTE_EVENT_TYPE_MAP[self.webhook_log.event_type],
+ "dispute_id": id,
+ "payment_intent": payment_intent,
+ "email": email,
+ "reason": reason,
+ "status": status,
+ }
+ ).insert()
+ except Exception:
+ log_error("Stripe Payment Dispute Event Error", event=event)
+ raise
+
+ if self.webhook_log.event_type not in EVENT_TYPE_MAP:
return
event = frappe.parse_json(self.webhook_log.payload)
stripe_invoice = event["data"]["object"]
+
+ if not frappe.db.exists("Invoice", {"stripe_invoice_id": stripe_invoice["id"]}):
+ return
+
self.invoice = frappe.get_doc("Invoice", {"stripe_invoice_id": stripe_invoice["id"]})
event_type = self.webhook_log.event_type
payment_status = "Unpaid"
- if event_type == "invoice.payment_succeeded":
- payment_status = "Paid"
- elif event_type == "invoice.finalized" and stripe_invoice["status"] == "paid":
+ if event_type == "invoice.payment_succeeded" or (
+ event_type == "invoice.finalized" and stripe_invoice["status"] == "paid"
+ ):
payment_status = "Paid"
try:
@@ -50,5 +84,5 @@ def process(self):
raise
-def handle_stripe_invoice_webhook_events(doc, method):
- StripeInvoiceWebhookHandler(webhook_log=doc).process()
+def handle_stripe_webhook_events(doc, method):
+ StripeWebhookHandler(webhook_log=doc).process()
diff --git a/press/press/doctype/invoice/test_invoice.py b/press/press/doctype/invoice/test_invoice.py
index c22374c0018..32584ecc560 100644
--- a/press/press/doctype/invoice/test_invoice.py
+++ b/press/press/doctype/invoice/test_invoice.py
@@ -1,12 +1,11 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
-import unittest
-from unittest.mock import patch, Mock
+from unittest.mock import Mock, patch
import frappe
+from frappe.tests.utils import FrappeTestCase
from frappe.utils.data import add_days, today
from press.press.doctype.team.test_team import create_test_team
@@ -15,8 +14,10 @@
@patch.object(Invoice, "create_invoice_on_frappeio", new=Mock())
-class TestInvoice(unittest.TestCase):
+class TestInvoice(FrappeTestCase):
def setUp(self):
+ super().setUp()
+
self.team = create_test_team()
def tearDown(self):
@@ -31,9 +32,7 @@ def test_invoice_add_usage_record(self):
).insert()
for amount in [10, 20, 30]:
- usage_record = frappe.get_doc(
- doctype="Usage Record", team=self.team.name, amount=amount
- )
+ usage_record = frappe.get_doc(doctype="Usage Record", team=self.team.name, amount=amount)
usage_record.insert()
usage_record.submit()
@@ -48,7 +47,6 @@ def test_invoice_add_usage_record(self):
self.assertEqual(invoice.amount_due, 60)
def test_invoice_cancel_usage_record(self):
-
invoice = frappe.get_doc(
doctype="Invoice",
team=self.team.name,
@@ -58,9 +56,7 @@ def test_invoice_cancel_usage_record(self):
usage_records = []
for amount in [10, 20, 30, 40]:
- usage_record = frappe.get_doc(
- doctype="Usage Record", team=self.team.name, amount=amount
- )
+ usage_record = frappe.get_doc(doctype="Usage Record", team=self.team.name, amount=amount)
usage_record.insert()
usage_record.submit()
usage_records.append(usage_record)
@@ -79,7 +75,6 @@ def test_invoice_cancel_usage_record(self):
self.assertEqual(usage_records[0].invoice, None)
def test_invoice_with_credits_less_than_total(self):
-
invoice = frappe.get_doc(
doctype="Invoice",
team=self.team.name,
@@ -88,9 +83,7 @@ def test_invoice_with_credits_less_than_total(self):
).insert()
for amount in [10, 20, 30]:
- usage_record = frappe.get_doc(
- doctype="Usage Record", team=self.team.name, amount=amount
- )
+ usage_record = frappe.get_doc(doctype="Usage Record", team=self.team.name, amount=amount)
usage_record.insert()
usage_record.submit()
@@ -106,8 +99,7 @@ def test_invoice_with_credits_less_than_total(self):
except Exception as e:
self.assertEqual(
str(e),
- "Not enough credits for this invoice. Change payment mode to Card to"
- " pay using Stripe.",
+ "Not enough credits for this invoice. Change payment mode to Card to pay using Stripe.",
)
self.assertEqual(self.team.get_balance(), 0)
@@ -116,7 +108,6 @@ def test_invoice_with_credits_less_than_total(self):
self.assertEqual(invoice.applied_credits, 10)
def test_invoice_with_credits_more_than_total(self):
-
invoice = frappe.get_doc(
doctype="Invoice",
team=self.team.name,
@@ -125,9 +116,7 @@ def test_invoice_with_credits_more_than_total(self):
).insert()
for amount in [10, 20, 30]:
- usage_record = frappe.get_doc(
- doctype="Usage Record", team=self.team.name, amount=amount
- )
+ usage_record = frappe.get_doc(doctype="Usage Record", team=self.team.name, amount=amount)
usage_record.insert()
usage_record.submit()
@@ -146,7 +135,6 @@ def test_invoice_with_credits_more_than_total(self):
self.assertEqual(invoice.applied_credits, 60)
def test_invoice_credit_allocation(self):
-
# First Invoice
# Total: 600
# Team has 100 Free Credits and 1000 Prepaid Credits
@@ -197,8 +185,7 @@ def test_invoice_credit_allocation(self):
except Exception as e:
self.assertEqual(
str(e),
- "Not enough credits for this invoice. Change payment mode to Card to"
- " pay using Stripe.",
+ "Not enough credits for this invoice. Change payment mode to Card to pay using Stripe.",
)
self.assertEqual(invoice2.total, 700)
@@ -210,7 +197,6 @@ def test_invoice_credit_allocation(self):
)
def test_invoice_cancel_reverse_credit_allocation(self):
-
# First Invoice
# Total: 600
# Team has 100 Free Credits and 1000 Prepaid Credits
@@ -247,7 +233,6 @@ def test_invoice_cancel_reverse_credit_allocation(self):
self.assertEqual(self.team.get_balance(), 1100)
def test_intersecting_invoices(self):
-
invoice1 = frappe.get_doc(
doctype="Invoice",
team=self.team.name,
@@ -300,18 +285,15 @@ def test_prepaid_credits(self):
with open(
Path(__file__).parent / "fixtures/stripe_payment_intent_succeeded_webhook.json", "r"
) as payload:
- doc = frappe._dict(
- {"event_type": "payment_intent.succeeded", "payload": payload.read()}
- )
+ doc = frappe._dict({"event_type": "payment_intent.succeeded", "payload": payload.read()})
with patch.object(Invoice, "update_transaction_details", return_value=None):
process_stripe_webhook(doc, "")
- # balance should 900 after buying prepaid credits
- self.assertEqual(self.team.get_balance(), 900)
-
- def test_single_x_percent_flat_on_total(self):
+ # balance should 755.64 after buying prepaid credits with gst applied
+ self.assertEqual(self.team.get_balance(), 755.64)
+ def test_discount_amount(self):
invoice = frappe.get_doc(
doctype="Invoice",
team=self.team.name,
@@ -319,26 +301,16 @@ def test_single_x_percent_flat_on_total(self):
period_end=add_days(today(), 10),
).insert()
+ invoice.append("items", {"quantity": 1, "rate": 1000, "amount": 1000, "discount": 10})
invoice.append("items", {"quantity": 1, "rate": 1000, "amount": 1000})
invoice.save()
-
- # Before discount
- self.assertEqual(invoice.total, 1000)
-
- # Apply 10% discount
- invoice.append(
- "discounts", {"percent": 10, "discount_type": "Flat On Total", "based_on": "Percent"}
- )
- invoice.save()
-
- # After discount
invoice.reload()
- self.assertEqual(invoice.total_before_discount, 1000)
- self.assertEqual(invoice.total_discount_amount, 100)
- self.assertEqual(invoice.total, 900)
- def test_multiple_discounts_flat_on_total(self):
+ self.assertEqual(invoice.total_before_discount, 2000)
+ self.assertEqual(invoice.total_discount_amount, 10)
+ self.assertEqual(invoice.total, 2000 - 10)
+ def test_discount_percentage(self):
invoice = frappe.get_doc(
doctype="Invoice",
team=self.team.name,
@@ -346,83 +318,14 @@ def test_multiple_discounts_flat_on_total(self):
period_end=add_days(today(), 10),
).insert()
+ invoice.append("items", {"quantity": 1, "rate": 1000, "amount": 1000, "discount_percentage": 10})
invoice.append("items", {"quantity": 1, "rate": 1000, "amount": 1000})
- invoice.save()
-
- # Apply 10% discount
- invoice.append(
- "discounts", {"percent": 10, "discount_type": "Flat On Total", "based_on": "Percent"}
- )
-
- # Apply another 10%
- invoice.append(
- "discounts", {"percent": 10, "discount_type": "Flat On Total", "based_on": "Percent"}
- )
-
- invoice.save()
-
- # After discount
- invoice.reload()
- self.assertEqual(invoice.total_before_discount, 1000)
- self.assertEqual(invoice.total_discount_amount, 200)
- self.assertEqual(invoice.total, 800)
-
- def test_discount_borrowed_from_team(self):
-
- # Give 30% to team
- self.team.append(
- "discounts", {"percent": 30, "discount_type": "Flat On Total", "based_on": "Percent"}
- )
- self.team.save()
-
- invoice = frappe.get_doc(
- doctype="Invoice",
- team=self.team.name,
- period_start=today(),
- period_end=add_days(today(), 10),
- ).insert()
-
- # Add line items
- invoice.append("items", {"quantity": 1, "rate": 1000, "amount": 1000})
- invoice.save()
- invoice.reload()
-
- # After discount
- self.assertEqual(invoice.total_before_discount, 1000)
- self.assertEqual(invoice.total_discount_amount, 300)
- self.assertEqual(invoice.total, 700)
-
- def test_mix_discounts_flat_on_total_and_percent(self):
-
- # Give 30% to team
- self.team.append(
- "discounts", {"percent": 30, "discount_type": "Flat On Total", "based_on": "Percent"}
- )
- self.team.save()
-
- invoice = frappe.get_doc(
- doctype="Invoice",
- team=self.team.name,
- period_start=today(),
- period_end=add_days(today(), 10),
- ).insert()
-
- # Add line items
- invoice.append("items", {"quantity": 1, "rate": 500, "amount": 500})
- invoice.append("items", {"quantity": 1, "rate": 500, "amount": 500})
-
- # Apply 100 units discount
- invoice.append(
- "discounts", {"amount": 100, "discount_type": "Flat On Total", "based_on": "Amount"}
- )
-
invoice.save()
invoice.reload()
-
- # After discount
- self.assertEqual(invoice.total_before_discount, 1000)
- self.assertEqual(invoice.total_discount_amount, 400)
- self.assertEqual(invoice.total, 600)
+ self.assertEqual(invoice.items[0].discount, 100)
+ self.assertEqual(invoice.total_before_discount, 2000)
+ self.assertEqual(invoice.total_discount_amount, 100)
+ self.assertEqual(invoice.total, 2000 - 100)
def test_finalize_invoice_with_total_zero(self):
invoice = frappe.get_doc(
@@ -474,3 +377,186 @@ def test_create_stripe_invoice_with_prepaid_credits(self, mock_stripe):
).insert()
invoice.finalize_invoice()
self.assertEqual(invoice.stripe_invoice_id, None)
+
+ def test_negative_balance_case(self):
+ team = create_test_team("test22@example.com")
+
+ # add 10 credits
+ team.allocate_credit_amount(10, source="Prepaid Credits")
+ # transfer 5 credits
+ team.allocate_credit_amount(-5, source="Transferred Credits")
+ team.payment_mode = "Prepaid Credits"
+ team.save()
+
+ # consume 10 credits
+ invoice = frappe.get_doc(doctype="Invoice", team=team.name)
+ invoice.append("items", {"quantity": 1, "rate": 10, "amount": 10})
+ invoice.insert()
+
+ # finalize invoice
+ invoice.finalize_invoice()
+ self.assertTrue(invoice.status == "Unpaid")
+ self.assertTrue(invoice.amount_due > 0)
+
+ def test_negative_balance_case_2(self):
+ team = create_test_team("test22@example.com")
+ team.allocate_credit_amount(10, source="Prepaid Credits")
+
+ invoice = frappe.get_doc(doctype="Invoice", team=team.name)
+ invoice.append("items", {"quantity": 1, "rate": 8, "amount": 8})
+ invoice.insert()
+ invoice.finalize_invoice()
+
+ with self.assertRaises(frappe.ValidationError) as err:
+ team.allocate_credit_amount(-5, source="Transferred Credits")
+ self.assertTrue("is less than" in str(err.exception))
+
+ def test_negative_balance_allocation(self):
+ team = create_test_team("test22@example.com")
+ team.allocate_credit_amount(10, source="Prepaid Credits")
+ team.allocate_credit_amount(30, source="Prepaid Credits")
+
+ with self.assertRaises(frappe.ValidationError) as err:
+ team.allocate_credit_amount(-50, source="Transferred Credits")
+ self.assertTrue("is less than" in str(err.exception))
+
+ team.allocate_credit_amount(-35, source="Transferred Credits")
+ self.assertEqual(team.get_balance(), 5)
+ transactions = frappe.get_all(
+ "Balance Transaction",
+ filters={
+ "team": team.name,
+ "docstatus": 1,
+ "unallocated_amount": (">=", 0),
+ "source": "Prepaid Credits",
+ },
+ fields=["name", "unallocated_amount"],
+ order_by="creation asc",
+ )
+ self.assertEqual(len(transactions), 2)
+ self.assertEqual(transactions[0].unallocated_amount, 0)
+ self.assertEqual(transactions[1].unallocated_amount, 5)
+
+ def test_settle_negative_balance(self):
+ # create team
+ # allocate -100 credits
+ # try to settle by adding 200 credits
+ # the new unallocated amount should be 100
+
+ team = create_test_team("test22@example.com")
+ bt = frappe.new_doc("Balance Transaction")
+ bt.team = team.name
+ bt.amount = -100
+ bt.source = "Transferred Credits"
+ bt.type = "Adjustment"
+ bt.docstatus = 1
+ bt.db_insert()
+
+ settling_transaction = team.allocate_credit_amount(200, source="Prepaid Credits")
+ self.assertEqual(team.get_balance(), 100)
+
+ settling_transaction.reload()
+ self.assertEqual(settling_transaction.unallocated_amount, 100)
+
+ def test_invoice_for_update_after_submit_error(self):
+ team = create_test_team("jondoe@example.com")
+ team.allocate_credit_amount(10, source="Free Credits")
+ team.payment_mode = "Prepaid Credits"
+ team.save()
+
+ invoice = frappe.new_doc("Invoice", team=team.name)
+ invoice.append("items", {"quantity": 5, "rate": 0.33, "amount": 1.65})
+ invoice.append("items", {"quantity": 3, "rate": 2, "amount": 6, "discount_percentage": 10})
+ invoice.insert()
+ invoice.finalize_invoice() # finalize invoice submits the doc if invoice gets settled
+ self.assertEqual(invoice.status, "Paid")
+
+ before_total = invoice.total
+ before_total_before_discount = invoice.total_before_discount
+ before_total_discount_amount = invoice.total_discount_amount
+ invoice.validate()
+ invoice.save()
+ invoice.reload()
+
+ after_total = invoice.total
+ after_total_before_discount = invoice.total_before_discount
+ after_total_discount_amount = invoice.total_discount_amount
+ self.assertEqual(before_total, after_total)
+ self.assertEqual(before_total_before_discount, after_total_before_discount)
+ self.assertEqual(before_total_discount_amount, after_total_discount_amount)
+
+ def test_tax_without_credits(self):
+ team = create_test_team("tax_without_credits@example.com")
+ frappe.db.set_single_value("Press Settings", "gst_percentage", 0.18)
+
+ invoice = frappe.get_doc(doctype="Invoice", team=team.name)
+ invoice.append("items", {"quantity": 1, "rate": 10, "amount": 10})
+ invoice.insert()
+
+ invoice.finalize_invoice()
+ self.assertEqual(invoice.amount_due, 10)
+ self.assertEqual(invoice.amount_due_with_tax, 11.8)
+
+ def test_tax_with_credits(self):
+ """Test invoice with tax when payment mode is prepaid credits"""
+ team = create_test_team("tax_with_credits@example.com")
+ team.allocate_credit_amount(5, source="Prepaid Credits")
+ frappe.db.set_single_value("Press Settings", "gst_percentage", 0.18)
+
+ invoice = frappe.get_doc(doctype="Invoice", team=team.name)
+ invoice.append("items", {"quantity": 1, "rate": 10, "amount": 10})
+ invoice.insert()
+
+ invoice.finalize_invoice()
+ self.assertEqual(invoice.total, 10)
+ self.assertEqual(invoice.applied_credits, 5)
+ self.assertEqual(invoice.amount_due, 5)
+ self.assertEqual(invoice.amount_due_with_tax, 5)
+
+ @patch.object(Invoice, "create_stripe_invoice", new=Mock())
+ def test_tax_with_credits_with_card(self):
+ """Test invoice with tax when payment mode is card"""
+ team = create_test_team("tax_with_credits@example.com")
+ team.allocate_credit_amount(5, source="Prepaid Credits")
+ frappe.db.set_value("Team", team.name, "payment_mode", "Card")
+ # team.reload()
+ frappe.db.set_single_value("Press Settings", "gst_percentage", 0.18)
+
+ invoice = frappe.get_doc(doctype="Invoice", team=team.name)
+ invoice.append("items", {"quantity": 1, "rate": 10, "amount": 10})
+ invoice.insert()
+
+ invoice.finalize_invoice()
+ self.assertEqual(invoice.total, 10)
+ self.assertEqual(invoice.applied_credits, 5)
+ self.assertEqual(invoice.amount_due, 5)
+ self.assertEqual(invoice.amount_due_with_tax, 5.9)
+
+ def test_tax_for_usd_accounts(self):
+ team = create_test_team("tax_for_usd_accounts@example.com", "United States")
+ frappe.db.set_single_value("Press Settings", "gst_percentage", 0.18)
+
+ invoice = frappe.get_doc(doctype="Invoice", team=team.name)
+ invoice.append("items", {"quantity": 1, "rate": 10, "amount": 10})
+ invoice.insert()
+
+ invoice.finalize_invoice()
+ self.assertEqual(invoice.total, 10)
+ self.assertEqual(invoice.amount_due, 10)
+ self.assertEqual(invoice.amount_due_with_tax, 10)
+
+ def test_npo_discount(self):
+ team = create_test_team("npo_team_discount@gmail.com")
+ team.apply_npo_discount = 1
+ team.save()
+ frappe.db.set_single_value("Press Settings", "npo_discount", 10)
+
+ invoice = frappe.get_doc(doctype="Invoice", team=team.name)
+ invoice.append("items", {"quantity": 1, "rate": 100, "amount": 100})
+ invoice.insert()
+
+ invoice.finalize_invoice()
+ self.assertEqual(invoice.total, 90)
+ self.assertEqual(invoice.total_before_discount, 100)
+ self.assertEqual(invoice.total_discount_amount, 10)
+ self.assertEqual(invoice.amount_due, 90)
diff --git a/press/press/doctype/invoice_credit_allocation/invoice_credit_allocation.py b/press/press/doctype/invoice_credit_allocation/invoice_credit_allocation.py
index f6b284d000c..63a147fca99 100644
--- a/press/press/doctype/invoice_credit_allocation/invoice_credit_allocation.py
+++ b/press/press/doctype/invoice_credit_allocation/invoice_credit_allocation.py
@@ -8,4 +8,21 @@
class InvoiceCreditAllocation(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ amount: DF.Currency
+ currency: DF.Link | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ source: DF.Data | None
+ transaction: DF.Link | None
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/invoice_discount/invoice_discount.py b/press/press/doctype/invoice_discount/invoice_discount.py
index 0ad99bd5e30..2dca6afefcc 100644
--- a/press/press/doctype/invoice_discount/invoice_discount.py
+++ b/press/press/doctype/invoice_discount/invoice_discount.py
@@ -6,4 +6,24 @@
class InvoiceDiscount(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ amount: DF.Currency
+ based_on: DF.Literal["Percent", "Amount"]
+ discount_type: DF.Literal["Flat On Total"]
+ note: DF.Data | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ percent: DF.Percent
+ via_items: DF.Check
+ via_team: DF.Check
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/invoice_item/invoice_item.json b/press/press/doctype/invoice_item/invoice_item.json
index 3c01467889d..621d77e0e0a 100644
--- a/press/press/doctype/invoice_item/invoice_item.json
+++ b/press/press/doctype/invoice_item/invoice_item.json
@@ -13,6 +13,7 @@
"rate",
"amount",
"discount",
+ "discount_percentage",
"site",
"has_marketplace_payout_completed"
],
@@ -50,7 +51,8 @@
"fieldtype": "Link",
"in_list_view": 1,
"label": "Document Type",
- "options": "DocType"
+ "options": "DocType",
+ "search_index": 1
},
{
"fieldname": "document_name",
@@ -70,7 +72,8 @@
"fieldname": "site",
"fieldtype": "Link",
"label": "Site",
- "options": "Site"
+ "options": "Site",
+ "search_index": 1
},
{
"default": "0",
@@ -79,16 +82,22 @@
"label": "Has Marketplace Payout Completed?"
},
{
+ "default": "0",
"fieldname": "discount",
"fieldtype": "Currency",
"label": "Discount",
"options": "currency"
+ },
+ {
+ "fieldname": "discount_percentage",
+ "fieldtype": "Percent",
+ "label": "Discount (%)"
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
- "modified": "2023-02-03 18:02:30.371635",
+ "modified": "2024-11-06 20:44:24.686991",
"modified_by": "Administrator",
"module": "Press",
"name": "Invoice Item",
diff --git a/press/press/doctype/invoice_item/invoice_item.py b/press/press/doctype/invoice_item/invoice_item.py
index e0e26dde1c5..ed0b44e6c30 100644
--- a/press/press/doctype/invoice_item/invoice_item.py
+++ b/press/press/doctype/invoice_item/invoice_item.py
@@ -8,4 +8,28 @@
class InvoiceItem(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ amount: DF.Currency
+ description: DF.Data | None
+ discount: DF.Currency
+ discount_percentage: DF.Percent
+ document_name: DF.DynamicLink | None
+ document_type: DF.Link | None
+ has_marketplace_payout_completed: DF.Check
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ plan: DF.Data | None
+ quantity: DF.Float
+ rate: DF.Currency
+ site: DF.Link | None
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/invoice_transaction_fee/invoice_transaction_fee.py b/press/press/doctype/invoice_transaction_fee/invoice_transaction_fee.py
index 84c40d79bab..a5a2db0a1f6 100644
--- a/press/press/doctype/invoice_transaction_fee/invoice_transaction_fee.py
+++ b/press/press/doctype/invoice_transaction_fee/invoice_transaction_fee.py
@@ -8,4 +8,20 @@
class InvoiceTransactionFee(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ amount: DF.Currency
+ currency: DF.Link | None
+ description: DF.Data | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/log_counter/__init__.py b/press/press/doctype/log_counter/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/log_counter/log_counter.js b/press/press/doctype/log_counter/log_counter.js
new file mode 100644
index 00000000000..0d833a670f8
--- /dev/null
+++ b/press/press/doctype/log_counter/log_counter.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Log Counter", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/log_counter/log_counter.json b/press/press/doctype/log_counter/log_counter.json
new file mode 100644
index 00000000000..d173bc5a0bb
--- /dev/null
+++ b/press/press/doctype/log_counter/log_counter.json
@@ -0,0 +1,94 @@
+{
+ "actions": [],
+ "creation": "2024-04-19 11:57:50.263215",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "logtype",
+ "groupby",
+ "column_break_nyqx",
+ "date",
+ "section_break_epfa",
+ "counts",
+ "total"
+ ],
+ "fields": [
+ {
+ "fieldname": "date",
+ "fieldtype": "Date",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Collection Date",
+ "read_only": 1,
+ "reqd": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "column_break_nyqx",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "section_break_epfa",
+ "fieldtype": "Section Break",
+ "label": "Counts"
+ },
+ {
+ "fieldname": "counts",
+ "fieldtype": "JSON",
+ "label": "Counts",
+ "read_only": 1,
+ "reqd": 1
+ },
+ {
+ "fieldname": "total",
+ "fieldtype": "Int",
+ "in_list_view": 1,
+ "label": "Total",
+ "non_negative": 1,
+ "read_only": 1,
+ "reqd": 1
+ },
+ {
+ "fieldname": "groupby",
+ "fieldtype": "Data",
+ "label": "Group By",
+ "read_only": 1,
+ "reqd": 1
+ },
+ {
+ "fieldname": "logtype",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Log Type",
+ "options": "DocType",
+ "read_only": 1,
+ "reqd": 1,
+ "search_index": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2024-04-19 13:17:13.153028",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Log Counter",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/log_counter/log_counter.py b/press/press/doctype/log_counter/log_counter.py
new file mode 100644
index 00000000000..f9ae1f4f3f8
--- /dev/null
+++ b/press/press/doctype/log_counter/log_counter.py
@@ -0,0 +1,140 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+import datetime
+import json
+from typing import Optional, TypedDict
+
+import frappe
+import frappe.utils
+from frappe.model.document import Document
+from frappe.query_builder import DocType
+from frappe.query_builder.functions import Count
+from pypika import Order
+
+# DocType: groupby
+RECORD_FOR: dict[str, str] = {
+ "Error Log": "method",
+}
+
+Counts = TypedDict(
+ "Counts",
+ {
+ "counts": dict[str, int],
+ "date": datetime.date,
+ "total": int,
+ },
+)
+
+
+class LogCounter(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ counts: DF.JSON
+ date: DF.Date
+ groupby: DF.Data
+ logtype: DF.Link
+ total: DF.Int
+ # end: auto-generated types
+
+ def autoname(self):
+ self.name = get_name(self.logtype, self.date)
+
+
+def record_counts():
+ date = frappe.utils.now_datetime().date() - datetime.timedelta(days=1)
+ for doctype, groupby in RECORD_FOR.items():
+ record_for_date(doctype, groupby, date)
+ frappe.db.commit()
+
+
+def record_for_date(
+ doctype: str = "Error Log",
+ groupby: str = "method",
+ date: Optional[datetime.date] = None,
+):
+ counts = get_counts(
+ doctype,
+ groupby,
+ date,
+ )
+ name = get_name(doctype, counts["date"])
+ counts_json = json.dumps(counts["counts"], indent=2)
+
+ # Update counts if name value exists
+ if frappe.db.exists("Log Counter", name):
+ frappe.db.set_value("Log Counter", name, "counts", counts_json)
+ frappe.db.set_value("Log Counter", name, "total", counts["total"])
+ return
+
+ lc = frappe.get_doc(
+ {
+ "doctype": "Log Counter",
+ "logtype": doctype,
+ "groupby": groupby,
+ "counts": counts_json,
+ "total": counts["total"],
+ "date": counts["date"],
+ }
+ )
+ lc.insert()
+
+
+def get_counts(
+ doctype: str = "Error Log",
+ groupby: str = "method",
+ date: Optional[datetime.date] = None,
+) -> Counts:
+ date_to = date if date else frappe.utils.now_datetime().date()
+ date_from = date_to - datetime.timedelta(days=1)
+
+ table = DocType(doctype)
+ column = table[groupby]
+
+ q = frappe.qb.from_(table)
+ q = q.select(column, Count("*", alias="count"))
+ q = q.where(table.creation[date_from:date_to])
+ q = q.groupby(column)
+ q = q.orderby("count", order=Order.desc)
+ r = q.run()
+
+ counts = {c[0]: c[1] for c in r}
+ total = sum(c[1] for c in r)
+ return dict(counts=counts, date=date_to, total=total)
+
+
+def get_name(doctype: str, date: datetime.date):
+ dt_stub = doctype.lower().replace(" ", "_")
+ date_iso = date.isoformat().replace("-", "_")
+ return f"{dt_stub}-{date_iso}"
+
+
+def top_k(
+ k: int = 5,
+ log_type: str = "Error Log",
+ since: Optional[datetime.date] = None,
+):
+ if not since:
+ since = frappe.utils.now_datetime().date() - datetime.timedelta(days=30)
+
+ res = frappe.get_all(
+ "Log Counter",
+ fields=["total", "date", "counts"],
+ filters={
+ "logtype": log_type,
+ "creation": [">", since],
+ },
+ )
+ for r in res:
+ counts = json.loads(r["counts"])
+ counts = [{"error": k, "count": i} for k, i in counts.items()]
+ counts.sort(key=lambda x: x["count"], reverse=True)
+ r["counts"] = counts[:k]
+
+ return res
diff --git a/press/press/doctype/log_counter/test_log_counter.py b/press/press/doctype/log_counter/test_log_counter.py
new file mode 100644
index 00000000000..8940e8d76c1
--- /dev/null
+++ b/press/press/doctype/log_counter/test_log_counter.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestLogCounter(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/log_server/log_server.js b/press/press/doctype/log_server/log_server.js
index 78695f61b6a..774ad92e8ca 100644
--- a/press/press/doctype/log_server/log_server.js
+++ b/press/press/doctype/log_server/log_server.js
@@ -10,6 +10,7 @@ frappe.ui.form.on('Log Server', {
[__('Update Agent'), 'update_agent', true, frm.doc.is_server_setup],
[__('Prepare Server'), 'prepare_server', true, !frm.doc.is_server_setup],
[__('Setup Server'), 'setup_server', true, !frm.doc.is_server_setup],
+ [__('Archive'), 'archive', true, frm.doc.provider === 'AWS EC2'],
[
__('Fetch Keys'),
'fetch_keys',
@@ -23,6 +24,7 @@ frappe.ui.form.on('Log Server', {
false,
frm.doc.is_server_setup,
],
+ [__('Update TLS Certificate'), 'update_tls_certificate', true],
].forEach(([label, method, confirm, condition]) => {
if (typeof condition === 'undefined' || condition) {
frm.add_custom_button(
diff --git a/press/press/doctype/log_server/log_server.json b/press/press/doctype/log_server/log_server.json
index 8fa7bdccfca..b5fb10d6cc5 100644
--- a/press/press/doctype/log_server/log_server.json
+++ b/press/press/doctype/log_server/log_server.json
@@ -8,7 +8,10 @@
"status",
"hostname",
"domain",
+ "tls_certificate_renewal_failed",
+ "plan",
"column_break_4",
+ "cluster",
"provider",
"virtual_machine",
"is_server_setup",
@@ -21,6 +24,8 @@
"agent_section",
"agent_password",
"ssh_section",
+ "ssh_user",
+ "ssh_port",
"frappe_user_password",
"frappe_public_key",
"column_break_20",
@@ -66,7 +71,7 @@
"fieldname": "provider",
"fieldtype": "Select",
"label": "Provider",
- "options": "Generic\nScaleway\nAWS EC2",
+ "options": "Generic\nScaleway\nAWS EC2\nOCI",
"set_only_once": 1
},
{
@@ -87,7 +92,6 @@
"fieldtype": "Data",
"in_list_view": 1,
"label": "IP",
- "reqd": 1,
"set_only_once": 1
},
{
@@ -184,6 +188,36 @@
"label": "Virtual Machine",
"mandatory_depends_on": "eval:doc.provider === \"AWS EC2\"",
"options": "Virtual Machine"
+ },
+ {
+ "fieldname": "cluster",
+ "fieldtype": "Link",
+ "label": "Cluster",
+ "options": "Cluster"
+ },
+ {
+ "fieldname": "ssh_user",
+ "fieldtype": "Data",
+ "label": "SSH User"
+ },
+ {
+ "default": "22",
+ "fieldname": "ssh_port",
+ "fieldtype": "Int",
+ "label": "SSH Port"
+ },
+ {
+ "default": "0",
+ "fieldname": "tls_certificate_renewal_failed",
+ "fieldtype": "Check",
+ "label": "TLS Certificate Renewal Failed",
+ "read_only": 1
+ },
+ {
+ "fieldname": "plan",
+ "fieldtype": "Link",
+ "label": "Plan",
+ "options": "Server Plan"
}
],
"links": [
@@ -192,7 +226,7 @@
"link_fieldname": "server"
}
],
- "modified": "2022-06-16 22:28:28.777134",
+ "modified": "2025-11-22 15:50:53.703274",
"modified_by": "Administrator",
"module": "Press",
"name": "Log Server",
@@ -211,8 +245,9 @@
"write": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/log_server/log_server.py b/press/press/doctype/log_server/log_server.py
index 576157fbe42..a993a5782e0 100644
--- a/press/press/doctype/log_server/log_server.py
+++ b/press/press/doctype/log_server/log_server.py
@@ -1,5 +1,6 @@
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
import frappe
@@ -9,6 +10,37 @@
class LogServer(BaseServer):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ agent_password: DF.Password | None
+ cluster: DF.Link | None
+ domain: DF.Link | None
+ frappe_public_key: DF.Code | None
+ frappe_user_password: DF.Password | None
+ hostname: DF.Data
+ ip: DF.Data | None
+ is_server_setup: DF.Check
+ kibana_password: DF.Password | None
+ monitoring_password: DF.Password | None
+ plan: DF.Link | None
+ private_ip: DF.Data
+ private_mac_address: DF.Data | None
+ private_vlan_id: DF.Data | None
+ provider: DF.Literal["Generic", "Scaleway", "AWS EC2", "OCI"]
+ root_public_key: DF.Code | None
+ ssh_port: DF.Int
+ ssh_user: DF.Data | None
+ status: DF.Literal["Pending", "Installing", "Active", "Broken", "Archived"]
+ tls_certificate_renewal_failed: DF.Check
+ virtual_machine: DF.Link | None
+ # end: auto-generated types
+
def validate(self):
self.validate_agent_password()
self.validate_monitoring_password()
@@ -35,6 +67,8 @@ def _setup_server(self):
ansible = Ansible(
playbook="log.yml",
server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
variables={
"server": self.name,
"workers": 1,
@@ -78,7 +112,12 @@ def install_elasticsearch_exporter(self):
def _install_elasticsearch_exporter(self):
try:
- ansible = Ansible(playbook="elasticsearch_exporter.yml", server=self)
+ ansible = Ansible(
+ playbook="elasticsearch_exporter.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ )
ansible.run()
except Exception:
log_error("Elasticsearch Exporter Install Exception", server=self.as_dict())
diff --git a/press/press/doctype/log_server/test_log_server.py b/press/press/doctype/log_server/test_log_server.py
index 8af330f1e6e..c07d41df061 100644
--- a/press/press/doctype/log_server/test_log_server.py
+++ b/press/press/doctype/log_server/test_log_server.py
@@ -2,8 +2,8 @@
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestLogServer(unittest.TestCase):
+class TestLogServer(FrappeTestCase):
pass
diff --git a/press/press/doctype/logical_replication_backup/__init__.py b/press/press/doctype/logical_replication_backup/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/logical_replication_backup/logical_replication_backup.js b/press/press/doctype/logical_replication_backup/logical_replication_backup.js
new file mode 100644
index 00000000000..c1cf4a261dd
--- /dev/null
+++ b/press/press/doctype/logical_replication_backup/logical_replication_backup.js
@@ -0,0 +1,36 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Logical Replication Backup', {
+ refresh(frm) {
+ if (frm.is_new()) {
+ return;
+ }
+
+ [
+ [__('Start'), 'execute', frm.doc.status === 'Pending'],
+ [__('Force Continue'), 'force_continue', true],
+ [__('Force Fail'), 'force_fail', frm.doc.status === 'Running'],
+ [__('Next [Caution]'), 'next', frm.doc.status === 'Failure'],
+ ].forEach(([label, method, condition]) => {
+ if (condition) {
+ frm.add_custom_button(
+ label,
+ () => {
+ frappe.confirm(
+ `Are you sure you want to ${label.toLowerCase()}?`,
+ () =>
+ frm
+ .call(method, {
+ freeze: true,
+ freeze_message: __('Please wait...'),
+ })
+ .then(() => frm.refresh()),
+ );
+ },
+ __('Actions'),
+ );
+ }
+ });
+ },
+});
diff --git a/press/press/doctype/logical_replication_backup/logical_replication_backup.json b/press/press/doctype/logical_replication_backup/logical_replication_backup.json
new file mode 100644
index 00000000000..4f385258a73
--- /dev/null
+++ b/press/press/doctype/logical_replication_backup/logical_replication_backup.json
@@ -0,0 +1,252 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-08-14 12:59:28.001429",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "status",
+ "site",
+ "server_snapshot",
+ "column_break_retn",
+ "server",
+ "database_server",
+ "section_break_aftx",
+ "start",
+ "column_break_vsbg",
+ "end",
+ "column_break_ojhl",
+ "duration",
+ "section_break_jnca",
+ "site_replication_config",
+ "column_break_aawe",
+ "bench_replication_config",
+ "section_break_eagz",
+ "servers",
+ "section_break_zzat",
+ "execution_stage",
+ "column_break_olbe",
+ "pre_migrate_stage_status",
+ "column_break_ookj",
+ "post_migrate_stage_status",
+ "column_break_ifmh",
+ "failover_stage_status",
+ "section_break_orav",
+ "pre_migrate_steps",
+ "post_migrate_steps",
+ "failover_steps",
+ "initial_binlog_position_of_new_primary_db"
+ ],
+ "fields": [
+ {
+ "default": "Pending",
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Status",
+ "options": "Pending\nRunning\nSuccess\nFailure",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_retn",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fetch_from": "site.server",
+ "fetch_if_empty": 1,
+ "fieldname": "server",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Server",
+ "options": "Server",
+ "reqd": 1
+ },
+ {
+ "fetch_from": "server.database_server",
+ "fetch_if_empty": 1,
+ "fieldname": "database_server",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Database Server",
+ "options": "Database Server",
+ "reqd": 1
+ },
+ {
+ "fieldname": "server_snapshot",
+ "fieldtype": "Link",
+ "label": "Server Snapshot",
+ "options": "Server Snapshot",
+ "search_index": 1
+ },
+ {
+ "fieldname": "section_break_eagz",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "failover_steps",
+ "fieldtype": "Table",
+ "label": "Failover Steps",
+ "options": "Logical Replication Step"
+ },
+ {
+ "fieldname": "pre_migrate_steps",
+ "fieldtype": "Table",
+ "label": "Pre Migrate Steps",
+ "options": "Logical Replication Step"
+ },
+ {
+ "fieldname": "post_migrate_steps",
+ "fieldtype": "Table",
+ "label": "Post Migrate Steps",
+ "options": "Logical Replication Step"
+ },
+ {
+ "fieldname": "section_break_jnca",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "site_replication_config",
+ "fieldtype": "Small Text",
+ "label": "Site Replication Config",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_aawe",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "bench_replication_config",
+ "fieldtype": "Small Text",
+ "label": "Bench Replication Config",
+ "read_only": 1
+ },
+ {
+ "fieldname": "servers",
+ "fieldtype": "Table",
+ "label": "Servers",
+ "options": "Logical Replication Server"
+ },
+ {
+ "fieldname": "site",
+ "fieldtype": "Link",
+ "label": "Site",
+ "options": "Site",
+ "reqd": 1
+ },
+ {
+ "default": "Pre-Migrate",
+ "fieldname": "execution_stage",
+ "fieldtype": "Select",
+ "label": "Execution Stage",
+ "options": "Pre-Migrate\nPost-Migrate\nFailover",
+ "reqd": 1
+ },
+ {
+ "fieldname": "section_break_aftx",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "start",
+ "fieldtype": "Datetime",
+ "label": "Start",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_vsbg",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "end",
+ "fieldtype": "Datetime",
+ "label": "End",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_ojhl",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "duration",
+ "fieldtype": "Duration",
+ "label": "Duration",
+ "read_only": 1
+ },
+ {
+ "depends_on": "eval: doc.initial_binlog_position_of_new_primary_db",
+ "fieldname": "initial_binlog_position_of_new_primary_db",
+ "fieldtype": "Data",
+ "label": "Initial Binlog Position of New Primary DB",
+ "read_only": 1
+ },
+ {
+ "fieldname": "section_break_zzat",
+ "fieldtype": "Section Break"
+ },
+ {
+ "default": "Pending",
+ "fieldname": "pre_migrate_stage_status",
+ "fieldtype": "Select",
+ "label": "Pre-Migrate Stage Status",
+ "options": "Pending\nRunning\nSuccess\nFailure",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_olbe",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "Pending",
+ "fieldname": "post_migrate_stage_status",
+ "fieldtype": "Select",
+ "label": "Post-Migrate Stage Status",
+ "options": "Pending\nRunning\nSuccess\nFailure",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_ookj",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "Pending",
+ "fieldname": "failover_stage_status",
+ "fieldtype": "Select",
+ "label": "Failover Stage Status",
+ "options": "Pending\nRunning\nSuccess\nFailure",
+ "reqd": 1
+ },
+ {
+ "fieldname": "section_break_orav",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "column_break_ifmh",
+ "fieldtype": "Column Break"
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-08-21 16:54:30.994648",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Logical Replication Backup",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/logical_replication_backup/logical_replication_backup.py b/press/press/doctype/logical_replication_backup/logical_replication_backup.py
new file mode 100644
index 00000000000..a8364061b15
--- /dev/null
+++ b/press/press/doctype/logical_replication_backup/logical_replication_backup.py
@@ -0,0 +1,1236 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+import json
+import time
+from enum import Enum
+from typing import TYPE_CHECKING, Literal
+
+import frappe
+from frappe.model.document import Document
+
+from press.press.doctype.ansible_console.ansible_console import AnsibleAdHoc
+
+if TYPE_CHECKING:
+ from press.press.doctype.bench.bench import Bench
+ from press.press.doctype.database_server.database_server import DatabaseServer
+ from press.press.doctype.logical_replication_step.logical_replication_step import LogicalReplicationStep
+ from press.press.doctype.release_group.release_group import ReleaseGroup
+ from press.press.doctype.server.server import BaseServer, Server
+ from press.press.doctype.server_snapshot.server_snapshot import ServerSnapshot
+ from press.press.doctype.site.site import Site
+
+
+StepStatus = Enum("StepStatus", ["Pending", "Running", "Skipped", "Success", "Failure"])
+
+VOLUME_INITIALIZATION_RATE = 300 # in MB/s, this is the rate at which the volume will be initialized
+MINIMUM_REPLICATION_LAG_FOR_TAKING_DOWNTIME = (
+ 1200 # in seconds, this is the minimum replication lag required before taking downtime
+)
+
+"""
+Replication configuration keys
+- read_from_replica: bool
+- allow_reads_during_maintenance: bool
+- replica_host: str
+"""
+REPLICATION_CONFIG_KEYS = [
+ "read_from_replica",
+ "allow_reads_during_maintenance",
+ "replica_host",
+]
+
+
+def check_replication_lag(server: "DatabaseServer", target_lag: int) -> int:
+ # -1 -> Something wrong with replication, consider as failure
+ # 0 -> We haven't yet reached the targetted replication lag
+ # 1 -> We have reached the targetted replication lag
+ try:
+ data = server.get_replication_status()
+ if not data.get("success", False):
+ # If the replication status is not available
+ # Might be agent or db is not in a state to fetch the replication status
+ # Just keep trying
+ return 0
+ replication_status = data.get("data", {}).get("slave_status", {})
+ if not replication_status:
+ # No replication status available
+ # That means server is not replicating even
+ # Means failed replication setup
+ return -1
+
+ if (
+ # No replication status available
+ # That means server is not replicating even
+ # Means failed replication setup
+ not replication_status
+ # Usually `Seconds_Behind_Master` should be there
+ # If not there, then replication is not running and fail it
+ or "Seconds_Behind_Master" not in replication_status
+ # If any error in replication
+ # Mark the process as failure
+ # As it's not safe to proceed + not easy to automatically fix
+ or replication_status.get("Last_Errno", 0) != 0
+ or replication_status.get("Last_IO_Errno", 0) != 0
+ or replication_status.get("Last_SQL_Errno", 0) != 0
+ or replication_status.get("Slave_IO_Running") != "Yes"
+ or replication_status.get("Slave_SQL_Running") != "Yes"
+ ):
+ """
+ During doing multiple stuffs like setting up different configurations and all
+ Replication can take some time to start
+
+ So, Ignore the network issues or connection issues
+ """
+ if replication_status and (
+ replication_status.get("Last_IO_Errno") == 2003
+ or "error reconnecting to master" in replication_status.get("Last_SQL_Errno", "")
+ ):
+ return 0
+
+ # Replication is not running
+ return -1
+
+ if (
+ replication_status.get("Seconds_Behind_Master", 10000000000) # Default to a large number
+ <= target_lag
+ ):
+ # Replication lag is less than the minimum required
+ return 1
+
+ return 0
+
+ except Exception:
+ return 0
+
+
+class LogicalReplicationBackup(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.logical_replication_server.logical_replication_server import (
+ LogicalReplicationServer,
+ )
+ from press.press.doctype.logical_replication_step.logical_replication_step import (
+ LogicalReplicationStep,
+ )
+
+ bench_replication_config: DF.SmallText | None
+ database_server: DF.Link
+ duration: DF.Duration | None
+ end: DF.Datetime | None
+ execution_stage: DF.Literal["Pre-Migrate", "Post-Migrate", "Failover"]
+ failover_stage_status: DF.Literal["Pending", "Running", "Success", "Failure"]
+ failover_steps: DF.Table[LogicalReplicationStep]
+ initial_binlog_position_of_new_primary_db: DF.Data | None
+ post_migrate_stage_status: DF.Literal["Pending", "Running", "Success", "Failure"]
+ post_migrate_steps: DF.Table[LogicalReplicationStep]
+ pre_migrate_stage_status: DF.Literal["Pending", "Running", "Success", "Failure"]
+ pre_migrate_steps: DF.Table[LogicalReplicationStep]
+ server: DF.Link
+ server_snapshot: DF.Link | None
+ servers: DF.Table[LogicalReplicationServer]
+ site: DF.Link
+ site_replication_config: DF.SmallText | None
+ start: DF.Datetime | None
+ status: DF.Literal["Pending", "Running", "Success", "Failure"]
+ # end: auto-generated types
+
+ SyncStep = False
+ AsyncStep = True
+ Wait = True
+ NoWait = False
+
+ def get_steps__template(self, context: Literal["pre_migrate", "post_migrate", "failover"]):
+ SyncStep = False
+ AsyncStep = True # some external job
+ Wait = True
+ NoWait = False
+ PreMigrateStep = "pre_migrate"
+ PostMigrateStep = "post_migrate"
+ FailoverStep = "failover"
+
+ methods = [
+ (self.pre__validate_existing_replica_health, SyncStep, NoWait, PreMigrateStep),
+ (self.pre__create_consistent_server_snapshot, SyncStep, NoWait, PreMigrateStep),
+ (self.pre__wait_for_servers_to_be_online, SyncStep, Wait, PreMigrateStep),
+ (self.pre__wait_for_server_snapshot_to_be_ready, AsyncStep, NoWait, PreMigrateStep),
+ (self.pre__lock_server_snapshot, SyncStep, NoWait, PreMigrateStep),
+ (self.pre__provision_hot_standby_database_server, SyncStep, Wait, PreMigrateStep),
+ (self.pre__wait_for_hot_standby_volume_initialization, SyncStep, Wait, PreMigrateStep),
+ (self.pre__wait_for_hot_standby_database_server_to_be_ready, AsyncStep, NoWait, PreMigrateStep),
+ (self.pre__wait_for_minimal_replication_lag, SyncStep, Wait, PreMigrateStep),
+ (self.pre__enable_maintenance_mode_in_site, AsyncStep, NoWait, PreMigrateStep),
+ (self.pre__enable_read_only_mode_in_database_server, AsyncStep, NoWait, PreMigrateStep),
+ (self.pre__wait_for_database_server_to_be_available, SyncStep, Wait, PreMigrateStep),
+ (self.pre__remove_replication_configuration_from_site, SyncStep, NoWait, PreMigrateStep),
+ (self.pre__wait_for_complete_hot_standby_replica_syncing, SyncStep, Wait, PreMigrateStep),
+ (self.pre__wait_for_complete_other_replica_syncing, SyncStep, Wait, PreMigrateStep),
+ (self.pre__stop_hot_standby_database_replication, SyncStep, NoWait, PreMigrateStep),
+ (self.pre__stop_other_replica_database_replication, SyncStep, NoWait, PreMigrateStep),
+ (self.pre__disable_read_only_mode_from_database_server, SyncStep, NoWait, PreMigrateStep),
+ (self.pre__wait_for_database_server_to_be_available, SyncStep, Wait, PreMigrateStep),
+ ####################################################################################
+ (self.post__archive_hot_standby_database_server, SyncStep, NoWait, PostMigrateStep),
+ (self.post__activate_sites, AsyncStep, NoWait, PostMigrateStep),
+ (self.post__enable_replication_on_replica, SyncStep, NoWait, PostMigrateStep),
+ (self.post__wait_for_minimal_replication_lag_of_replica, SyncStep, Wait, PostMigrateStep),
+ (
+ self.post__restore_replication_configuration_of_site_and_bench,
+ AsyncStep,
+ NoWait,
+ PostMigrateStep,
+ ),
+ ####################################################################################
+ (self.failover__plan_and_assign_new_role_to_servers, SyncStep, NoWait, FailoverStep),
+ (
+ self.failover__configure_hot_standby_database_server_as_new_master,
+ SyncStep,
+ NoWait,
+ FailoverStep,
+ ),
+ (self.failover__update_reference_to_new_master_database_server, SyncStep, NoWait, FailoverStep),
+ (self.failover__update_new_master_database_server_plan_and_team, SyncStep, NoWait, FailoverStep),
+ (self.failover__gather_binlog_position_from_new_master, SyncStep, Wait, FailoverStep),
+ (self.failover__update_master_database_ref_in_replica_servers, SyncStep, NoWait, FailoverStep),
+ (self.failover__disable_read_only_mode_on_new_master, SyncStep, NoWait, FailoverStep),
+ (self.failover__wait_for_master_database_server_to_be_available, SyncStep, Wait, FailoverStep),
+ (self.failover__configure_sites_with_new_master_database_server, AsyncStep, NoWait, FailoverStep),
+ (self.failover__activate_sites, AsyncStep, NoWait, FailoverStep),
+ (self.failover__join_other_replicas_to_new_master, SyncStep, NoWait, FailoverStep),
+ (self.failover__start_replication_on_replica, SyncStep, NoWait, FailoverStep),
+ (self.failover__wait_for_minimal_replication_lag_of_replica, SyncStep, Wait, FailoverStep),
+ (
+ self.failover__restore_replication_configuration_of_site_and_bench,
+ AsyncStep,
+ NoWait,
+ FailoverStep,
+ ),
+ (self.failover__archive_old_primary_database_server, SyncStep, NoWait, FailoverStep),
+ (self.failover__create_subscription_for_new_master, SyncStep, NoWait, FailoverStep),
+ ]
+
+ steps = []
+ for (
+ method,
+ is_async,
+ wait_for_completion,
+ step_context,
+ ) in methods:
+ if step_context != context:
+ continue
+ steps.append(
+ {
+ "step": method.__doc__,
+ "method": method.__name__,
+ "is_async": is_async,
+ "wait_for_completion": wait_for_completion,
+ }
+ )
+ return steps
+
+ @property
+ def pre_migrate_steps__template(self):
+ return self.get_steps__template("pre_migrate")
+
+ @property
+ def post_migrate_steps__template(self):
+ return self.get_steps__template("post_migrate")
+
+ @property
+ def failover_steps__template(self):
+ return self.get_steps__template("failover")
+
+ @property
+ def stage_status(self):
+ if self.execution_stage == "Pre-Migrate":
+ return self.pre_migrate_stage_status
+ if self.execution_stage == "Post-Migrate":
+ return self.post_migrate_stage_status
+ if self.execution_stage == "Failover":
+ return self.failover_stage_status
+ frappe.throw("Invalid execution stage for getting stage status")
+ return None
+
+ @stage_status.setter
+ def stage_status(self, value: Literal["Pending", "Running", "Success", "Failure"]):
+ if self.execution_stage == "Pre-Migrate":
+ self.pre_migrate_stage_status = value
+ elif self.execution_stage == "Post-Migrate":
+ self.post_migrate_stage_status = value
+ elif self.execution_stage == "Failover":
+ self.failover_stage_status = value
+ else:
+ frappe.throw("Invalid execution stage for setting stage status.")
+
+ @property
+ def site_doc(self) -> "Site":
+ return frappe.get_doc("Site", self.site)
+
+ @property
+ def release_group_doc(self) -> "ReleaseGroup":
+ return frappe.get_doc("Release Group", frappe.db.get_value("Site", self.site, "group"))
+
+ @property
+ def server_snapshot_doc(self) -> "ServerSnapshot":
+ return frappe.get_doc("Server Snapshot", self.server_snapshot)
+
+ @property
+ def app_server_doc(self) -> "Server":
+ return frappe.get_doc("Server", self.server)
+
+ @property
+ def database_server_doc(self) -> "DatabaseServer":
+ return frappe.get_doc("Database Server", self.database_server)
+
+ @property
+ def replica_database_servers(self) -> list[str]:
+ return [s.database_server for s in self.servers if s.current_role == "Replica"]
+
+ @property
+ def new_replica_database_servers(self) -> list[str]:
+ return [s.database_server for s in self.servers if s.new_role == "Replica"]
+
+ @property
+ def replica_database_server_docs(self) -> list["DatabaseServer"]:
+ replica_servers = []
+ for s in self.servers:
+ if s.current_role == "Replica":
+ replica_servers.append(frappe.get_doc("Database Server", s.database_server))
+ return replica_servers
+
+ @property
+ def new_replica_database_server_docs(self) -> list["DatabaseServer"]:
+ new_replica_servers = []
+ for s in self.servers:
+ if s.new_role == "Replica":
+ new_replica_servers.append(frappe.get_doc("Database Server", s.database_server))
+ return new_replica_servers
+
+ @property
+ def hot_standby_database_server(self) -> "str | None":
+ server = None
+ for s in self.servers:
+ if s.current_role == "Hot Standby":
+ server = s.database_server
+
+ return server
+
+ @property
+ def hot_standby_database_server_doc(self) -> "DatabaseServer | None":
+ hot_standby_server = self.hot_standby_database_server
+ if not hot_standby_server:
+ return None
+ return frappe.get_doc("Database Server", hot_standby_server)
+
+ @property
+ def site_replication_config_dict(self) -> dict:
+ try:
+ return json.loads(self.site_replication_config or "{}")
+ except json.JSONDecodeError:
+ frappe.throw("Invalid site replication config JSON format.")
+
+ @property
+ def bench_replication_config_dict(self) -> dict:
+ try:
+ return json.loads(self.bench_replication_config or "{}")
+ except json.JSONDecodeError:
+ frappe.throw("Invalid bench replication config JSON format.")
+
+ def after_insert(self):
+ self.populate_server_infos()
+ self.add_steps()
+ self.store_db_replication_config_of_site(save=False)
+ self.store_replication_config_of_bench(save=False)
+ self.save()
+
+ def on_update(self):
+ stage_status_updated = (
+ self.has_value_changed("pre_migrate_stage_status")
+ or self.has_value_changed("post_migrate_stage_status")
+ or self.has_value_changed("failover_stage_status")
+ )
+ if self.has_value_changed("execution_stage") or stage_status_updated:
+ new_status = self.status
+ if self.execution_stage == "Pre-Migrate":
+ new_status = {
+ "Pending": "Pending",
+ "Running": "Running",
+ "Success": "Running", # Because some other stage need to run for overall status
+ "Failure": "Failure",
+ }[self.pre_migrate_stage_status]
+ elif self.execution_stage == "Post-Migrate":
+ new_status = {
+ "Pending": "Pending",
+ "Running": "Running",
+ "Success": "Success",
+ "Failure": "Failure",
+ }[self.post_migrate_stage_status]
+ elif self.execution_stage == "Failover":
+ new_status = {
+ "Pending": "Pending",
+ "Running": "Running",
+ "Success": "Success",
+ "Failure": "Failure",
+ }[self.failover_stage_status]
+
+ if self.status != new_status:
+ self.status = new_status
+ self.save()
+
+ if stage_status_updated:
+ self.callback_to_linked_site_update()
+
+ #########################################################
+ # Pre Migrate Steps #
+ #########################################################
+ def pre__validate_existing_replica_health(self):
+ """Validate Existing Replica Health"""
+ return StepStatus.Success
+
+ def pre__create_consistent_server_snapshot(self):
+ """Create Consistent Snapshot Of Servers"""
+ try:
+ self.server_snapshot = self.app_server_doc._create_snapshot(consistent=True)
+ self.save()
+ return StepStatus.Success
+ except Exception as e:
+ frappe.throw(f"Failed to create consistent server snapshot: {e}")
+ return StepStatus.Failure
+
+ def pre__wait_for_servers_to_be_online(self):
+ """Wait For Servers To Be Online"""
+
+ servers = [["Server", self.server], ["Database Server", self.database_server]]
+ for doctype, name in servers:
+ server: "BaseServer" = frappe.get_doc(doctype, name)
+ if server.status != "Active":
+ time.sleep(1)
+ return StepStatus.Running
+
+ server.ping_ansible()
+
+ plays = frappe.get_all(
+ "Ansible Play",
+ {"server": name, "play": "Ping Server"},
+ ["status"],
+ order_by="creation desc",
+ limit=1,
+ )
+
+ if not plays or plays[0].status in ["Pending", "Running", "Failure"]:
+ return StepStatus.Running
+
+ return StepStatus.Success
+
+ def pre__wait_for_server_snapshot_to_be_ready(self):
+ """Wait For Snapshot To Be Ready"""
+ status = frappe.get_value(
+ "Server Snapshot",
+ self.server_snapshot,
+ "status",
+ )
+ if status in ["Pending", "Processing"]:
+ return StepStatus.Running
+ if status == "Completed":
+ return StepStatus.Success
+ return StepStatus.Failure
+
+ def pre__lock_server_snapshot(self):
+ """Lock Server Snapshot"""
+ self.server_snapshot_doc.lock()
+ return StepStatus.Success
+
+ def pre__provision_hot_standby_database_server(self):
+ """Provision Hot Standby Database Server"""
+ hot_standby_database_server: "DatabaseServer" = self.server_snapshot_doc.create_server(
+ server_type="Database Server",
+ provision_db_replica=True,
+ create_subscription=False, # Don't charge hot standby database server
+ master_db_server=self.database_server,
+ title=self.database_server_doc.title + " (Hot Standby)",
+ team=self.database_server_doc.team,
+ plan=self.database_server_doc.plan,
+ press_job_arguments={
+ "logical_replication_backup": self.name,
+ },
+ )
+ self.append(
+ "servers",
+ {"current_role": "Hot Standby", "database_server": hot_standby_database_server, "new_role": ""},
+ )
+ self.save()
+ return StepStatus.Success
+
+ def pre__wait_for_hot_standby_volume_initialization(self):
+ """Wait For Hot Standby Volume Initialization"""
+ # Can be optimized not to spawn up a bg job to just wait
+ db_server_snapshot_size = frappe.get_value(
+ "Virtual Disk Snapshot",
+ frappe.get_value("Server Snapshot", self.server_snapshot, "app_server_snapshot"),
+ "size",
+ )
+ required_initialization_time = db_server_snapshot_size * 1024 / VOLUME_INITIALIZATION_RATE
+ server_creation_time = frappe.get_value(
+ "Database Server",
+ self.hot_standby_database_server,
+ "creation",
+ )
+ if (frappe.utils.now_datetime() - server_creation_time).seconds > required_initialization_time:
+ return StepStatus.Success
+ return StepStatus.Running
+
+ def pre__wait_for_hot_standby_database_server_to_be_ready(self):
+ """Wait For Hot Standby Database Server To Be Ready"""
+ server = self.hot_standby_database_server_doc
+ if server.status != "Active":
+ return StepStatus.Running
+
+ # Check status of Create Server Press Job
+ press_job_status = frappe.db.get_value(
+ "Press Job",
+ {"server_type": "Database Server", "server": server.name, "job_type": "Create Server"},
+ "status",
+ )
+ if not press_job_status:
+ # Press Job might not have created yet
+ return StepStatus.Running
+
+ if press_job_status in ["Pending", "Running"]:
+ return StepStatus.Running
+
+ if press_job_status == "Failure":
+ return StepStatus.Failure
+
+ # Check if replication setup done
+ if not server.is_replication_setup:
+ return StepStatus.Running
+
+ return StepStatus.Success
+
+ def pre__wait_for_minimal_replication_lag(self):
+ """Wait For Minimal Replication Lag"""
+ lag_status = check_replication_lag(
+ self.hot_standby_database_server_doc, MINIMUM_REPLICATION_LAG_FOR_TAKING_DOWNTIME
+ )
+ if lag_status == -1:
+ return StepStatus.Failure
+ return StepStatus.Success if lag_status == 1 else StepStatus.Running
+
+ def pre__enable_maintenance_mode_in_site(self):
+ """Enable Maintenance Mode For All Sites"""
+ status = self.deactivate_site()
+ if status == "Success":
+ return StepStatus.Success
+ if status == "Failure":
+ return StepStatus.Failure
+ return StepStatus.Running
+
+ def pre__enable_read_only_mode_in_database_server(self):
+ """Enable Read Only Mode In Database Server"""
+ self.database_server_doc.enable_read_only_mode(update_variables_synchronously=True)
+ return StepStatus.Success
+
+ def pre__wait_for_database_server_to_be_available(self):
+ """Wait For Database Server To Be Online"""
+ if self.database_server_doc.ping_mariadb():
+ return StepStatus.Success
+
+ return StepStatus.Running
+
+ def pre__remove_replication_configuration_from_site(self):
+ """Remove Replication Configuration From Site"""
+ self.remove_replication_config_from_site_and_bench()
+ return StepStatus.Success
+
+ def pre__wait_for_complete_hot_standby_replica_syncing(self):
+ """Wait For Complete Hot Standby Replica Syncing"""
+
+ lag_status = check_replication_lag(self.hot_standby_database_server_doc, 0)
+ if lag_status == -1:
+ return StepStatus.Failure
+ return StepStatus.Success if lag_status == 1 else StepStatus.Running
+
+ def pre__wait_for_complete_other_replica_syncing(self):
+ """Wait For Complete Other Replica Syncing"""
+
+ for replica in self.replica_database_server_docs:
+ lag_status = check_replication_lag(replica, 0)
+ if lag_status == -1:
+ return StepStatus.Failure
+ if lag_status == 0:
+ return StepStatus.Running
+
+ return StepStatus.Success
+
+ def pre__stop_hot_standby_database_replication(self):
+ """Stop Hot Standby Database Replication"""
+ self.hot_standby_database_server_doc.stop_replication()
+ return StepStatus.Success
+
+ def pre__stop_other_replica_database_replication(self):
+ """Stop Other Replica Database Replication"""
+ for replica in self.replica_database_server_docs:
+ replica.stop_replication()
+ return StepStatus.Success
+
+ def pre__disable_read_only_mode_from_database_server(self):
+ """Disable Read Only Mode From Database Server"""
+ self.database_server_doc.disable_read_only_mode(update_variables_synchronously=True)
+ return StepStatus.Success
+
+ #########################################################
+ # Post Migrate Steps #
+ #########################################################
+ def post__archive_hot_standby_database_server(self):
+ """Archive Hot Standby Database Server"""
+
+ # Don't block the flow for archival failure
+ # TODO: Add some retry mechanism to archive the server later
+ try:
+ self.hot_standby_database_server_doc.archive()
+ row = None
+ for s in self.servers:
+ if s.current_role == "Hot Standby":
+ row = s
+ break
+ if row:
+ row.archived = 1
+ self.save()
+ except Exception:
+ self.add_comment(
+ "Comment",
+ "Error archiving hot standby database server - " + self.hot_standby_database_server,
+ )
+
+ return StepStatus.Success
+
+ def post__activate_sites(self):
+ """Activate Sites"""
+ status = self.activate_site()
+ if status == "Success":
+ return StepStatus.Success
+ if status == "Failure":
+ return StepStatus.Failure
+ return StepStatus.Running
+
+ def post__enable_replication_on_replica(self):
+ """Enable Replication On Replica"""
+ for replica in self.replica_database_server_docs:
+ replica.start_replication()
+ return StepStatus.Success
+
+ # change function name
+ def post__wait_for_minimal_replication_lag_of_replica(self):
+ """Wait For Minimal Replication Lag Of Replica"""
+
+ """
+ TODO:
+ Failure in this should not trigger a site recovery
+ Just throw the replica.
+ """
+
+ for replica in self.replica_database_server_docs:
+ lag_status = check_replication_lag(replica, 300)
+ if lag_status == -1:
+ return StepStatus.Failure
+ if lag_status == 0:
+ return StepStatus.Running
+
+ return StepStatus.Success
+
+ def post__restore_replication_configuration_of_site_and_bench(self):
+ """Restore Replication Configuration Of Site And Bench"""
+ self.add_replication_config_to_site_and_bench()
+ return StepStatus.Success
+
+ #########################################################
+ # Failover Steps #
+ #########################################################
+ def failover__plan_and_assign_new_role_to_servers(self):
+ """Plan And Assign New Role To Servers"""
+ hot_standby_server = next((s for s in self.servers if s.current_role == "Hot Standby"), None)
+ if hot_standby_server:
+ hot_standby_server.new_role = "Master"
+
+ old_primary_server = next((s for s in self.servers if s.current_role == "Master"), None)
+ if old_primary_server:
+ old_primary_server.new_role = "Retired"
+
+ replica_servers = [s for s in self.servers if s.current_role == "Replica"]
+ for s in replica_servers:
+ s.new_role = "Replica"
+
+ self.save()
+ return StepStatus.Success
+
+ def failover__configure_hot_standby_database_server_as_new_master(self):
+ """Configure Hot Standby Database Server As New Master"""
+ self.hot_standby_database_server_doc.reset_replication()
+ return StepStatus.Success
+
+ def failover__update_reference_to_new_master_database_server(self):
+ """Update Reference To New Master Database Server"""
+ server = self.app_server_doc
+ server.database_server = self.hot_standby_database_server
+ server.save()
+ return StepStatus.Success
+
+ def failover__update_new_master_database_server_plan_and_team(self):
+ """Update New Master Database Server Plan And Team"""
+ # Update the plan and team of the new master database server
+ server = self.hot_standby_database_server_doc
+ old_db = self.database_server_doc
+ server.plan = old_db.plan # Handle arm , x86_64 plans automatically
+ server.team = old_db.team
+ server.title = old_db.title
+ server.save()
+ return StepStatus.Success
+
+ def failover__gather_binlog_position_from_new_master(self):
+ """Gather Binlog Position From New Master Database Server"""
+ self.initial_binlog_position_of_new_primary_db = (
+ self.hot_standby_database_server_doc.get_replication_status()
+ .get("data", {})
+ .get("gtid_current_pos", "")
+ )
+ if not self.initial_binlog_position_of_new_primary_db:
+ frappe.throw("Failed to gather initial binlog position from new master database server")
+ self.save()
+ return StepStatus.Success
+
+ def failover__update_master_database_ref_in_replica_servers(self):
+ """Update Master Database Info In Replica Servers"""
+ for replica in self.replica_database_server_docs:
+ frappe.db.set_value("Database Server", replica.name, "primary", self.hot_standby_database_server)
+ return StepStatus.Success
+
+ def failover__disable_read_only_mode_on_new_master(self):
+ """Disable Read Only Mode From New Master Database Server"""
+ self.hot_standby_database_server_doc.disable_read_only_mode(update_variables_synchronously=True)
+ return StepStatus.Success
+
+ def failover__wait_for_master_database_server_to_be_available(self):
+ """Wait For Master Database Server To Be Available"""
+ if self.hot_standby_database_server_doc.ping_mariadb():
+ return StepStatus.Success
+
+ return StepStatus.Running
+
+ def failover__configure_sites_with_new_master_database_server(self):
+ """Configure Sites With New Master Database Server"""
+
+ # Check if any deactivate site job already exists
+ if frappe.db.count(
+ "Agent Job",
+ {
+ "job_type": "Update Database Host",
+ "reference_doctype": self.doctype,
+ "reference_name": self.name,
+ },
+ ):
+ status = frappe.db.get_value(
+ "Agent Job",
+ {
+ "job_type": "Update Database Host",
+ "reference_doctype": self.doctype,
+ "reference_name": self.name,
+ },
+ "status",
+ order_by="modified desc",
+ )
+ if status in ("Pending", "Running"):
+ return StepStatus.Running
+ if status == "Success":
+ return StepStatus.Success
+ if status == "Failure":
+ return StepStatus.Failure
+
+ agent = self.app_server_doc.agent
+ """
+ On changing the database server on server doctype, it should automatically update the database host
+ in all the benches that are associated with this server.
+
+ Do a check here in this doctype as well to reduce chance of affecting by external changes.
+ """
+
+ hot_standby_database_server = self.hot_standby_database_server
+
+ benches = frappe.get_all("Bench", {"server": self.server, "status": ("!=", "Archived")})
+ for bench in benches:
+ bench: "Bench" = frappe.get_doc("Bench", bench)
+ if bench.database_server != hot_standby_database_server:
+ # We will do a bulk update, So avoid automatically triggering update bench config job
+ bench.flags.avoid_triggerring_update_bench_config_job = True
+ bench.database_server = hot_standby_database_server
+ bench.save()
+
+ agent.update_database_host_in_all_benches(
+ self.hot_standby_database_server_doc.private_ip, self.doctype, self.name
+ )
+ return StepStatus.Running
+
+ def failover__activate_sites(self):
+ """Activate Sites"""
+ return self.post__activate_sites()
+
+ def failover__join_other_replicas_to_new_master(self):
+ """Join Other Replicas To New Master"""
+ for replica in self.new_replica_database_server_docs:
+ replica.configure_replication(gtid_slave_pos=self.initial_binlog_position_of_new_primary_db)
+ return StepStatus.Success
+
+ def failover__start_replication_on_replica(self):
+ """Start Replication On Replica"""
+ for replica in self.new_replica_database_server_docs:
+ replica.start_replication()
+ return StepStatus.Success
+
+ def failover__wait_for_minimal_replication_lag_of_replica(self):
+ """Wait For Minimal Replication Lag Of Replica"""
+
+ for replica in self.new_replica_database_server_docs:
+ lag_status = check_replication_lag(replica, 300)
+ if lag_status == -1:
+ return StepStatus.Failure
+ if lag_status == 0:
+ return StepStatus.Running
+
+ return StepStatus.Success
+
+ def failover__restore_replication_configuration_of_site_and_bench(self):
+ """Restore Replication Configuration Of Site And Bench"""
+ return self.post__restore_replication_configuration_of_site_and_bench()
+
+ def failover__archive_old_primary_database_server(self):
+ """Archive Old Primary Database Server"""
+ # Just unset team / plan / disable subscription
+ try:
+ self.database_server_doc.archive()
+ row = None
+ for s in self.servers:
+ if s.current_role == "Master":
+ row = s
+ break
+ if row:
+ row.archived = 1
+ self.save()
+ except Exception:
+ self.add_comment(
+ "Comment",
+ "Error archiving primary database server - " + self.database_server,
+ )
+ return StepStatus.Success
+
+ def failover__create_subscription_for_new_master(self):
+ """Create Subscription For New Master Database Server"""
+ return StepStatus.Success
+
+ #########################################################
+ # Common Steps / Private Methods #
+ #########################################################
+ def deactivate_site(self):
+ # Check if any deactivate site job already exists
+ if frappe.db.count(
+ "Agent Job",
+ {
+ "site": self.site,
+ "job_type": "Deactivate Site",
+ "reference_doctype": self.doctype,
+ "reference_name": self.name,
+ },
+ ):
+ return frappe.db.get_value(
+ "Agent Job",
+ {
+ "site": self.site,
+ "job_type": "Deactivate Site",
+ "reference_doctype": self.doctype,
+ "reference_name": self.name,
+ },
+ "status",
+ order_by="modified desc",
+ )
+
+ # Deactivate the site
+ self.app_server_doc.agent.deactivate_site(
+ self.site_doc, reference_doctype=self.doctype, reference_name=self.name
+ )
+ return "Pending"
+
+ def activate_site(self):
+ if frappe.db.count(
+ "Agent Job",
+ {
+ "site": self.site,
+ "job_type": "Activate Site",
+ "reference_doctype": self.doctype,
+ "reference_name": self.name,
+ },
+ ):
+ return frappe.db.get_value(
+ "Agent Job",
+ {
+ "site": self.site,
+ "job_type": "Activate Site",
+ "reference_doctype": self.doctype,
+ "reference_name": self.name,
+ },
+ "status",
+ order_by="modified desc",
+ )
+
+ self.app_server_doc.agent.activate_site(
+ self.site_doc, reference_doctype=self.doctype, reference_name=self.name
+ )
+ return "Pending"
+
+ def store_db_replication_config_of_site(self, save: bool = True):
+ config = {}
+ site = self.site_doc
+ for key in REPLICATION_CONFIG_KEYS:
+ value = site.get_config_value_for_key(key)
+ if value is not None:
+ config[key] = value
+
+ self.site_replication_config = json.dumps(config, indent=2)
+ if save:
+ self.save()
+
+ def store_replication_config_of_bench(self, save: bool = True):
+ config = {}
+ common_site_config: dict = json.loads(self.release_group_doc.common_site_config)
+ for key in REPLICATION_CONFIG_KEYS:
+ value = common_site_config.get(key)
+ if value is not None:
+ config[key] = value
+
+ self.bench_replication_config = json.dumps(config, indent=2)
+ if save:
+ self.save()
+
+ def remove_replication_config_from_site_and_bench(self):
+ site = self.site_doc
+ release_group = self.release_group_doc
+
+ site.delete_multiple_config(self.site_replication_config_dict.keys())
+
+ for key in self.bench_replication_config_dict:
+ release_group.delete_config(key)
+
+ def add_replication_config_to_site_and_bench(self):
+ self.site_doc.update_config(self.site_replication_config_dict)
+ self.release_group_doc.update_config(self.bench_replication_config_dict)
+
+ #########################################################
+ # Internal Methods #
+ #########################################################
+ def populate_server_infos(self):
+ self.append(
+ "servers",
+ {"current_role": "Master", "database_server": self.database_server, "new_role": ""},
+ )
+ # Find the replica servers
+ replica_servers = frappe.get_all(
+ "Database Server",
+ {"is_primary": False, "primary": self.database_server, "status": "Active"},
+ pluck="name",
+ )
+ for replica in replica_servers:
+ self.append(
+ "servers",
+ {"current_role": "Replica", "database_server": replica, "new_role": ""},
+ )
+
+ # Do validation that the database server doesn't have any broken replication
+ if frappe.db.count(
+ "Database Server",
+ {
+ "primary": self.database_server,
+ "is_primary": 0,
+ "status": ("in", ["Pending", "Installing", "Broken"]),
+ },
+ ):
+ frappe.throw(
+ f"Database Server {self.database_server} has few inactive replicas. Please fix it before proceeding."
+ )
+
+ def add_steps(self):
+ for step in self.pre_migrate_steps__template:
+ step.update({"status": "Pending"})
+ self.append("pre_migrate_steps", step)
+
+ for step in self.post_migrate_steps__template:
+ step.update({"status": "Pending"})
+ self.append("post_migrate_steps", step)
+
+ for step in self.failover_steps__template:
+ step.update({"status": "Pending"})
+ self.append("failover_steps", step)
+
+ def callback_to_linked_site_update(self):
+ from press.press.doctype.site_update.site_update import (
+ process_callback_from_logical_replication_backup,
+ )
+
+ process_callback_from_logical_replication_backup(self)
+
+ @frappe.whitelist()
+ def execute(self):
+ if self.stage_status == "Running":
+ frappe.msgprint("Replication is already in Running state. It will be executed soon.")
+ return
+ # Just set to Running, scheduler will pick it up
+ self.stage_status = "Running"
+ if not self.start:
+ self.start = frappe.utils.now_datetime()
+ self.save()
+ self.next()
+
+ def fail(self, save: bool = True) -> None:
+ self.stage_status = "Failure"
+ for step in self.current_execution_steps:
+ if step.stage_status == "Pending":
+ step.stage_status = "Skipped"
+ self.end = frappe.utils.now_datetime()
+ self.duration = frappe.utils.cint((self.end - self.start).total_seconds())
+ if save:
+ self.save(ignore_version=True)
+
+ def finish(self) -> None:
+ # if stage_status is already Success or Failure, then don't update the stage_status and durations
+ if self.stage_status not in ("Success", "Failure"):
+ self.stage_status = "Success" if self.is_restoration_steps_successful() else "Failure"
+ self.end = frappe.utils.now_datetime()
+ self.duration = frappe.utils.cint((self.end - self.start).total_seconds())
+
+ self.save()
+
+ @frappe.whitelist()
+ def next(self) -> None:
+ if self.stage_status != "Running" and self.stage_status not in ("Success", "Failure"):
+ self.stage_status = "Running"
+ self.save(ignore_version=True)
+
+ next_step_to_run = None
+
+ # Check if current_step is running
+ current_running_step = self.current_running_step
+ if current_running_step:
+ next_step_to_run = current_running_step
+ elif self.next_step:
+ next_step_to_run = self.next_step
+
+ if not next_step_to_run:
+ # We've executed everything
+ self.finish()
+ return
+
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "execute_step",
+ step_name=next_step_to_run.name,
+ enqueue_after_commit=True,
+ deduplicate=next_step_to_run.wait_for_completion
+ is False, # Don't deduplicate if wait_for_completion is True
+ job_id=f"logical_replication||{self.name}||{next_step_to_run.name}",
+ timeout=600,
+ )
+
+ @frappe.whitelist(allow_guest=True)
+ def retry(self):
+ # Reset the states
+ self.stage_status = "Pending"
+ if not self.start:
+ self.start = frappe.utils.now_datetime()
+ self.end = None
+ self.duration = None
+ for step in self.current_execution_steps:
+ step.status = "Pending"
+ self.save(ignore_version=True)
+
+ @frappe.whitelist()
+ def force_continue(self) -> None:
+ # Mark all failed and skipped steps as pending
+ for step in self.current_execution_steps:
+ if step.status in ("Failure", "Skipped"):
+ step.status = "Pending"
+
+ self.stage_status = "Running"
+ self.save()
+
+ self.next()
+
+ @frappe.whitelist()
+ def force_fail(self) -> None:
+ # Mark all pending steps as failure
+ for step in self.current_execution_steps:
+ if step.status == "Pending":
+ step.status = "Failure"
+ self.stage_status = "Failure"
+ self.save()
+
+ @property
+ def current_execution_steps(self) -> list["LogicalReplicationStep"]:
+ if self.execution_stage == "Pre-Migrate":
+ return self.pre_migrate_steps
+ if self.execution_stage == "Post-Migrate":
+ return self.post_migrate_steps
+ if self.execution_stage == "Failover":
+ return self.failover_steps
+
+ frappe.throw(f"Invalid execution stage: {self.execution_stage}")
+ return None
+
+ @property
+ def current_running_step(self) -> "LogicalReplicationStep | None":
+ for step in self.current_execution_steps:
+ if step.status == "Running":
+ return step
+ return None
+
+ @property
+ def next_step(self) -> "LogicalReplicationStep | None":
+ for step in self.current_execution_steps:
+ if step.status == "Pending":
+ return step
+ return None
+
+ def is_restoration_steps_successful(self) -> bool:
+ return all(step.status == "Success" for step in self.current_execution_steps)
+
+ @frappe.whitelist()
+ def execute_step(self, step_name):
+ step = self.get_step(step_name)
+
+ if not step.start:
+ step.start = frappe.utils.now_datetime()
+ try:
+ result = getattr(self, step.method)()
+ step.status = result.name
+ """
+ If the step is async and function has returned Running,
+ Then save the document and return
+
+ Some external process will resume the job later
+ """
+ if step.is_async and result == StepStatus.Running:
+ self.save(ignore_version=True)
+ return
+
+ """
+ If the step is sync and function is marked to wait for completion,
+ Then wait for the function to complete
+ """
+ if step.wait_for_completion and result == StepStatus.Running:
+ step.attempts = step.attempts + 1 if step.attempts else 1
+ self.save(ignore_version=True)
+ time.sleep(1)
+
+ except Exception:
+ step.status = "Failure"
+ step.traceback = frappe.get_traceback(with_context=True)
+
+ step.end = frappe.utils.now_datetime()
+ step.duration = (step.end - step.start).total_seconds()
+
+ if step.status == "Failure":
+ self.fail(save=True)
+ else:
+ self.save(ignore_version=True)
+ self.next()
+
+ def get_step(self, step_name) -> "LogicalReplicationStep | None":
+ for step in self.current_execution_steps:
+ if step.name == step_name:
+ return step
+ return None
+
+ def get_step_by_method(self, method_name) -> "LogicalReplicationStep | None":
+ for step in self.current_execution_steps:
+ if step.method == method_name:
+ return step
+ return None
+
+ def ansible_run(self, command):
+ inventory = f"{self.virtual_machine.public_ip_address},"
+ result = AnsibleAdHoc(sources=inventory).run(command, self.name)[0]
+ self.add_command(command, result)
+ return result
+
+ def add_command(self, command, result):
+ pretty_result = json.dumps(result, indent=2, sort_keys=True, default=str)
+ comment = f"{command}{pretty_result} "
+ self.add_comment(text=comment)
+
+
+def process_logical_replication_backup_deactivate_site_job_update(job):
+ if job.reference_doctype != "Logical Replication Backup":
+ return
+ if job.status not in ["Success", "Failure", "Delivery Failure"]:
+ return
+ doc: LogicalReplicationBackup = frappe.get_doc("Logical Replication Backup", job.reference_name)
+ doc.next()
+
+
+def process_logical_replication_backup_activate_site_job_update(job):
+ if job.reference_doctype != "Logical Replication Backup":
+ return
+ if job.status not in ["Success", "Failure", "Delivery Failure"]:
+ return
+ doc: LogicalReplicationBackup = frappe.get_doc("Logical Replication Backup", job.reference_name)
+ doc.next()
+
+
+def process_logical_replication_backup_update_database_host_job_update(job):
+ if job.reference_doctype != "Logical Replication Backup":
+ return
+ if job.status not in ["Success", "Failure", "Delivery Failure"]:
+ return
+ doc: LogicalReplicationBackup = frappe.get_doc("Logical Replication Backup", job.reference_name)
+ doc.next()
+
+
+def get_logical_replication_backup_restoration_steps(
+ name: str, stage: Literal["Pre-Migrate", "Post-Migrate", "Failover"]
+) -> list[dict]:
+ """
+ {
+ "title": "Step Name",
+ "status": "Success",
+ "output": "Output",
+ "stage": "Restore Backup"
+ }
+ """
+ parent_field = {
+ "Pre-Migrate": "pre_migrate_steps",
+ "Post-Migrate": "post_migrate_steps",
+ "Failover": "failover_steps",
+ }[stage]
+ steps = frappe.get_all(
+ "Logical Replication Step",
+ {
+ "parent": name,
+ "parenttype": "Logical Replication Backup",
+ "parentfield": parent_field,
+ },
+ ["name", "step", "status", "traceback"],
+ order_by="idx",
+ )
+
+ return [
+ {
+ "title": step["step"],
+ "status": step["status"],
+ "output": "" if not step.get("traceback") else step["traceback"],
+ "stage": stage.replace("-", " "),
+ "name": step["name"],
+ }
+ for step in steps
+ ]
diff --git a/press/press/doctype/logical_replication_backup/test_logical_replication_backup.py b/press/press/doctype/logical_replication_backup/test_logical_replication_backup.py
new file mode 100644
index 00000000000..eb780ab297f
--- /dev/null
+++ b/press/press/doctype/logical_replication_backup/test_logical_replication_backup.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests import IntegrationTestCase
+
+# On IntegrationTestCase, the doctype test records and all
+# link-field test record dependencies are recursively loaded
+# Use these module variables to add/remove to/from that list
+EXTRA_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+IGNORE_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+
+
+class IntegrationTestLogicalReplicationBackup(IntegrationTestCase):
+ """
+ Integration tests for LogicalReplicationBackup.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/logical_replication_server/__init__.py b/press/press/doctype/logical_replication_server/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/logical_replication_server/logical_replication_server.json b/press/press/doctype/logical_replication_server/logical_replication_server.json
new file mode 100644
index 00000000000..44dda5d3464
--- /dev/null
+++ b/press/press/doctype/logical_replication_server/logical_replication_server.json
@@ -0,0 +1,64 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-08-14 17:35:36.329972",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "database_server",
+ "current_role",
+ "new_role",
+ "archived"
+ ],
+ "fields": [
+ {
+ "fieldname": "database_server",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Database Server",
+ "options": "Database Server",
+ "read_only": 1,
+ "reqd": 1
+ },
+ {
+ "fieldname": "current_role",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Current Role",
+ "options": "Master\nReplica\nHot Standby",
+ "read_only": 1,
+ "reqd": 1
+ },
+ {
+ "fieldname": "new_role",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "New Role",
+ "options": "\nMaster\nReplica\nHot Standby\nRetired",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "archived",
+ "fieldtype": "Check",
+ "in_list_view": 1,
+ "label": "Archived",
+ "read_only": 1
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-08-21 02:43:26.472015",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Logical Replication Server",
+ "owner": "Administrator",
+ "permissions": [],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/logical_replication_server/logical_replication_server.py b/press/press/doctype/logical_replication_server/logical_replication_server.py
new file mode 100644
index 00000000000..37a01e36ba6
--- /dev/null
+++ b/press/press/doctype/logical_replication_server/logical_replication_server.py
@@ -0,0 +1,26 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class LogicalReplicationServer(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ archived: DF.Check
+ current_role: DF.Literal["Master", "Replica", "Hot Standby"]
+ database_server: DF.Link
+ new_role: DF.Literal["", "Master", "Replica", "Hot Standby", "Retired"]
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/logical_replication_step/__init__.py b/press/press/doctype/logical_replication_step/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/logical_replication_step/logical_replication_step.json b/press/press/doctype/logical_replication_step/logical_replication_step.json
new file mode 100644
index 00000000000..dedb68eca95
--- /dev/null
+++ b/press/press/doctype/logical_replication_step/logical_replication_step.json
@@ -0,0 +1,115 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-08-14 15:54:52.129009",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "step",
+ "status",
+ "method",
+ "column_break_dwne",
+ "start",
+ "end",
+ "duration",
+ "column_break_wkvt",
+ "is_async",
+ "wait_for_completion",
+ "attempts",
+ "section_break_fgrq",
+ "traceback"
+ ],
+ "fields": [
+ {
+ "fieldname": "step",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Step",
+ "reqd": 1
+ },
+ {
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Status",
+ "options": "Pending\nScheduled\nRunning\nSkipped\nSuccess\nFailure",
+ "reqd": 1
+ },
+ {
+ "fieldname": "method",
+ "fieldtype": "Data",
+ "label": "Method",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_dwne",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "start",
+ "fieldtype": "Datetime",
+ "label": "Start",
+ "read_only": 1
+ },
+ {
+ "fieldname": "end",
+ "fieldtype": "Datetime",
+ "label": "End",
+ "read_only": 1
+ },
+ {
+ "fieldname": "duration",
+ "fieldtype": "Duration",
+ "label": "Duration",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_wkvt",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_async",
+ "fieldtype": "Check",
+ "label": "Is Async",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "wait_for_completion",
+ "fieldtype": "Check",
+ "label": "Wait For Completion",
+ "read_only": 1
+ },
+ {
+ "fieldname": "attempts",
+ "fieldtype": "Int",
+ "label": "Attempts",
+ "read_only": 1
+ },
+ {
+ "fieldname": "section_break_fgrq",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "traceback",
+ "fieldtype": "Code",
+ "label": "Traceback"
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-08-21 10:56:47.022439",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Logical Replication Step",
+ "owner": "Administrator",
+ "permissions": [],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/logical_replication_step/logical_replication_step.py b/press/press/doctype/logical_replication_step/logical_replication_step.py
new file mode 100644
index 00000000000..ee1d7ed4826
--- /dev/null
+++ b/press/press/doctype/logical_replication_step/logical_replication_step.py
@@ -0,0 +1,34 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+import frappe
+from frappe.model.document import Document
+
+
+class LogicalReplicationStep(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ attempts: DF.Int
+ duration: DF.Duration | None
+ end: DF.Datetime | None
+ is_async: DF.Check
+ method: DF.Data
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ start: DF.Datetime | None
+ status: DF.Literal["Pending", "Scheduled", "Running", "Skipped", "Success", "Failure"]
+ step: DF.Data
+ traceback: DF.Code | None
+ wait_for_completion: DF.Check
+ # end: auto-generated types
+
+ def validate(self):
+ if self.is_async and self.wait_for_completion:
+ frappe.throw("Cannot wait for completion on async kind of step")
diff --git a/press/press/doctype/mail_log/mail_log.json b/press/press/doctype/mail_log/mail_log.json
index 9b3a46132c4..6cb7dd33e62 100644
--- a/press/press/doctype/mail_log/mail_log.json
+++ b/press/press/doctype/mail_log/mail_log.json
@@ -8,14 +8,17 @@
"engine": "InnoDB",
"field_order": [
"unique_token",
- "site",
+ "status",
"message_id",
+ "column_break_drnq",
+ "site",
"subscription_key",
- "date",
+ "column_break_erbe",
"sender",
"recipient",
+ "date",
+ "section_break_uslz",
"message",
- "status",
"log"
],
"fields": [
@@ -74,7 +77,7 @@
},
{
"fieldname": "message_id",
- "fieldtype": "Data",
+ "fieldtype": "Small Text",
"label": "Message Id",
"read_only": 1
},
@@ -86,14 +89,27 @@
},
{
"fieldname": "message",
- "fieldtype": "Data",
+ "fieldtype": "Code",
"label": "Message",
"read_only": 1
+ },
+ {
+ "fieldname": "column_break_drnq",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "column_break_erbe",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "section_break_uslz",
+ "fieldtype": "Section Break"
}
],
+ "grid_page_length": 50,
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2022-01-16 13:07:36.587086",
+ "modified": "2025-10-06 16:21:14.518847",
"modified_by": "Administrator",
"module": "Press",
"name": "Mail Log",
@@ -113,7 +129,9 @@
"write": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
+ "states": [],
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/mail_log/mail_log.py b/press/press/doctype/mail_log/mail_log.py
index e766d383b00..49637dface2 100644
--- a/press/press/doctype/mail_log/mail_log.py
+++ b/press/press/doctype/mail_log/mail_log.py
@@ -1,9 +1,35 @@
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
-# import frappe
+import frappe
from frappe.model.document import Document
class MailLog(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ date: DF.Date | None
+ log: DF.Code | None
+ message: DF.Code | None
+ message_id: DF.SmallText | None
+ recipient: DF.Data | None
+ sender: DF.Data | None
+ site: DF.Data | None
+ status: DF.Data | None
+ subscription_key: DF.Data | None
+ unique_token: DF.Data | None
+ # end: auto-generated types
+
pass
+
+
+def on_doctype_update():
+ frappe.db.add_index("Mail Log", ["site", "status"])
+ frappe.db.add_index("Mail Log", ["site", "creation"])
diff --git a/press/press/doctype/mail_log/test_mail_log.py b/press/press/doctype/mail_log/test_mail_log.py
index d0deba9ea44..ce6578f5ecb 100644
--- a/press/press/doctype/mail_log/test_mail_log.py
+++ b/press/press/doctype/mail_log/test_mail_log.py
@@ -2,8 +2,8 @@
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestMailLog(unittest.TestCase):
+class TestMailLog(FrappeTestCase):
pass
diff --git a/press/press/doctype/mail_setup/mail_setup.py b/press/press/doctype/mail_setup/mail_setup.py
index 1e3ccfb1904..5fd2f324ec4 100644
--- a/press/press/doctype/mail_setup/mail_setup.py
+++ b/press/press/doctype/mail_setup/mail_setup.py
@@ -6,4 +6,16 @@
class MailSetup(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ is_complete: DF.Check
+ site: DF.Link | None
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/mail_setup/test_mail_setup.py b/press/press/doctype/mail_setup/test_mail_setup.py
index a2e95e70396..e96d8fb64e9 100644
--- a/press/press/doctype/mail_setup/test_mail_setup.py
+++ b/press/press/doctype/mail_setup/test_mail_setup.py
@@ -2,8 +2,8 @@
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestMailSetup(unittest.TestCase):
+class TestMailSetup(FrappeTestCase):
pass
diff --git a/press/press/doctype/malware_scan/malware_scan.js b/press/press/doctype/malware_scan/malware_scan.js
index 504a0eda9c7..8fca73bd8f6 100644
--- a/press/press/doctype/malware_scan/malware_scan.js
+++ b/press/press/doctype/malware_scan/malware_scan.js
@@ -3,9 +3,9 @@
frappe.ui.form.on('Malware Scan', {
refresh: function (frm) {
- if (['Failure', 'Success'].includes(frm.doc.status)) {
+ if (['Installed', 'Failure', 'Success'].includes(frm.doc.status)) {
frm.add_custom_button(__('Start'), () => {
- frappe.confirm('Are you sure you want to retry the scan?', () =>
+ frappe.confirm('Are you sure you want to scan?', () =>
frm.call('start'),
);
});
diff --git a/press/press/doctype/malware_scan/malware_scan.json b/press/press/doctype/malware_scan/malware_scan.json
index 5added4e5a9..7c585bb8eff 100644
--- a/press/press/doctype/malware_scan/malware_scan.json
+++ b/press/press/doctype/malware_scan/malware_scan.json
@@ -45,13 +45,13 @@
"fieldname": "status",
"fieldtype": "Select",
"label": "Status",
- "options": "Pending\nRunning\nClean\nInfected\nFailure",
+ "options": "Pending\nInstalled\nNot Installed\nRunning\nClean\nInfected\nFailure",
"reqd": 1
}
],
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2022-10-12 23:23:45.104024",
+ "modified": "2023-11-22 17:24:35.104095",
"modified_by": "Administrator",
"module": "Press",
"name": "Malware Scan",
diff --git a/press/press/doctype/malware_scan/malware_scan.py b/press/press/doctype/malware_scan/malware_scan.py
index 20e4baa445e..d90cb9c60f8 100644
--- a/press/press/doctype/malware_scan/malware_scan.py
+++ b/press/press/doctype/malware_scan/malware_scan.py
@@ -9,8 +9,53 @@
class MalwareScan(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ play: DF.Link | None
+ server: DF.DynamicLink
+ server_type: DF.Literal["Server", "Database Server", "Proxy Server"]
+ status: DF.Literal["Pending", "Installed", "Not Installed", "Running", "Clean", "Infected", "Failure"]
+ # end: auto-generated types
+
def after_insert(self):
- self.start()
+ self.check_clamav()
+
+ def check_clamav(self):
+ self.status = "Pending"
+ self.save()
+ frappe.db.commit()
+ frappe.enqueue_doc(self.doctype, self.name, "_check_clamav", queue="default")
+
+ def _check_clamav(self):
+ try:
+ server = frappe.get_doc(self.server_type, self.server)
+ ansible = Ansible(
+ playbook="pkg_exists.yml",
+ server=server,
+ user=server._ssh_user(),
+ port=server._ssh_port(),
+ variables={"pkg": "clamav"},
+ )
+ self.reload()
+ self.play = ansible.play
+ self.status = "Running"
+ self.save()
+ frappe.db.commit()
+ play = ansible.run()
+ if play.status == "Success":
+ self.status = "Installed"
+ else:
+ self.status = "Not Installed"
+ except Exception:
+ log_error("ClamAV Install Exception", scan=self.as_dict())
+ self.status = "Failure"
+ self.save()
@frappe.whitelist()
def start(self):
@@ -21,9 +66,12 @@ def start(self):
def _start(self):
try:
+ server = frappe.get_doc(self.server_type, self.server)
ansible = Ansible(
playbook="malware_scan.yml",
- server=frappe.get_doc(self.server_type, self.server),
+ server=server,
+ user=server._ssh_user(),
+ port=server._ssh_port(),
)
self.reload()
self.play = ansible.play
@@ -73,9 +121,4 @@ def fail(self):
self.send_alert(message)
def send_alert(self, message):
- # chat_id = frappe.db.get_value(
- # "Press Settings", "Press Settings", "telegram_alert_chat_id"
- # )
- # telegram = Telegram(chat_id)
- # telegram.send(message)
pass
diff --git a/press/press/doctype/managed_database_service/__init__.py b/press/press/doctype/managed_database_service/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/managed_database_service/managed_database_service.js b/press/press/doctype/managed_database_service/managed_database_service.js
new file mode 100644
index 00000000000..8028b9b58d2
--- /dev/null
+++ b/press/press/doctype/managed_database_service/managed_database_service.js
@@ -0,0 +1,16 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Managed Database Service', {
+ refresh(frm) {
+ let command = `mysql -h ${frm.doc.name} -p -u ${frm.doc.database_root_user} -P ${frm.doc.port}`;
+ frm.add_custom_button('Console Access', () => {
+ frappe.msgprint(`${command} `);
+ });
+ frm.add_custom_button('Show Root Password', () => {
+ frm.call('show_root_password').then((r) => {
+ frappe.msgprint(`${r.message}`);
+ });
+ });
+ },
+});
diff --git a/press/press/doctype/managed_database_service/managed_database_service.json b/press/press/doctype/managed_database_service/managed_database_service.json
new file mode 100644
index 00000000000..11669e6cbd9
--- /dev/null
+++ b/press/press/doctype/managed_database_service/managed_database_service.json
@@ -0,0 +1,103 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "autoname": "field:database_host",
+ "creation": "2024-05-17 16:35:50.107617",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "database_section",
+ "service_provider",
+ "database_host",
+ "database_root_user",
+ "column_break_dboe",
+ "root_user_password",
+ "port",
+ "team_section",
+ "team"
+ ],
+ "fields": [
+ {
+ "fieldname": "service_provider",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Service Provider",
+ "options": "AWS RDS",
+ "reqd": 1
+ },
+ {
+ "fieldname": "database_host",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Database Host",
+ "reqd": 1,
+ "unique": 1
+ },
+ {
+ "fieldname": "column_break_dboe",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "3306",
+ "fieldname": "port",
+ "fieldtype": "Data",
+ "label": "Port"
+ },
+ {
+ "fieldname": "database_section",
+ "fieldtype": "Section Break",
+ "label": "Database "
+ },
+ {
+ "fieldname": "team_section",
+ "fieldtype": "Section Break",
+ "label": "Team"
+ },
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "label": "Team",
+ "options": "Team",
+ "reqd": 1
+ },
+ {
+ "fieldname": "database_root_user",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Database Root User",
+ "reqd": 1
+ },
+ {
+ "fieldname": "root_user_password",
+ "fieldtype": "Password",
+ "in_list_view": 1,
+ "label": "Root User Password",
+ "reqd": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2024-05-29 19:11:14.644480",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Managed Database Service",
+ "naming_rule": "By fieldname",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/managed_database_service/managed_database_service.py b/press/press/doctype/managed_database_service/managed_database_service.py
new file mode 100644
index 00000000000..5ba90bda790
--- /dev/null
+++ b/press/press/doctype/managed_database_service/managed_database_service.py
@@ -0,0 +1,31 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+import frappe
+from frappe.model.document import Document
+
+
+class ManagedDatabaseService(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ database_host: DF.Data
+ database_root_user: DF.Data
+ port: DF.Data | None
+ root_user_password: DF.Password
+ service_provider: DF.Literal["AWS RDS"]
+ team: DF.Link
+ # end: auto-generated types
+
+ pass
+
+ @frappe.whitelist()
+ def show_root_password(self):
+ frappe.only_for("System Manager")
+ return self.get_password("root_user_password")
diff --git a/press/press/doctype/managed_database_service/test_managed_database_service.py b/press/press/doctype/managed_database_service/test_managed_database_service.py
new file mode 100644
index 00000000000..69ff0289d0c
--- /dev/null
+++ b/press/press/doctype/managed_database_service/test_managed_database_service.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestManagedDatabaseService(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/mariadb_binlog/__init__.py b/press/press/doctype/mariadb_binlog/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/mariadb_binlog/mariadb_binlog.js b/press/press/doctype/mariadb_binlog/mariadb_binlog.js
new file mode 100644
index 00000000000..ff237695e4e
--- /dev/null
+++ b/press/press/doctype/mariadb_binlog/mariadb_binlog.js
@@ -0,0 +1,19 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('MariaDB Binlog', {
+ refresh(frm) {
+ [[__('Download in Server'), 'download_binlog', frm.doc.uploaded]].forEach(
+ ([label, method, condition]) => {
+ if (typeof condition === 'undefined' || condition) {
+ frm.add_custom_button(label, () => {
+ frappe.confirm(
+ `Are you sure you want to ${label.toLowerCase()} this site?`,
+ () => frm.call(method).then((r) => frm.refresh()),
+ );
+ });
+ }
+ },
+ );
+ },
+});
diff --git a/press/press/doctype/mariadb_binlog/mariadb_binlog.json b/press/press/doctype/mariadb_binlog/mariadb_binlog.json
new file mode 100644
index 00000000000..4c361cab293
--- /dev/null
+++ b/press/press/doctype/mariadb_binlog/mariadb_binlog.json
@@ -0,0 +1,139 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-05-08 15:23:10.701619",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "file_name",
+ "database_server",
+ "column_break_pvji",
+ "current",
+ "indexed",
+ "purged_from_disk",
+ "column_break_wwgs",
+ "uploaded",
+ "remote_file",
+ "section_break_zpmy",
+ "size_mb",
+ "column_break_botn",
+ "file_modification_time"
+ ],
+ "fields": [
+ {
+ "default": "0",
+ "fieldname": "indexed",
+ "fieldtype": "Check",
+ "label": "Indexed",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "database_server",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Database Server",
+ "options": "Database Server",
+ "reqd": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "column_break_pvji",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "section_break_zpmy",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "file_modification_time",
+ "fieldtype": "Datetime",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Modification Time",
+ "reqd": 1,
+ "search_index": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "purged_from_disk",
+ "fieldtype": "Check",
+ "label": "Purged from Disk",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "size_mb",
+ "fieldtype": "Float",
+ "label": "Size (MB)",
+ "reqd": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "current",
+ "fieldtype": "Check",
+ "label": "Current",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "file_name",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "File Name",
+ "reqd": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "column_break_botn",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "column_break_wwgs",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "uploaded",
+ "fieldtype": "Check",
+ "label": "Uploaded",
+ "read_only": 1
+ },
+ {
+ "depends_on": "eval: doc.uploaded",
+ "fieldname": "remote_file",
+ "fieldtype": "Link",
+ "label": "Remote File",
+ "options": "Remote File",
+ "read_only": 1
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-12-08 12:36:55.422452",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "MariaDB Binlog",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/mariadb_binlog/mariadb_binlog.py b/press/press/doctype/mariadb_binlog/mariadb_binlog.py
new file mode 100644
index 00000000000..394a2d35dae
--- /dev/null
+++ b/press/press/doctype/mariadb_binlog/mariadb_binlog.py
@@ -0,0 +1,139 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+import json
+from typing import TYPE_CHECKING
+
+import frappe
+from frappe.desk.doctype.tag.tag import add_tag
+from frappe.model.document import Document
+
+from press.press.doctype.ansible_console.ansible_console import AnsibleAdHoc
+
+if TYPE_CHECKING:
+ from press.press.doctype.agent_job.agent_job import AgentJob
+ from press.press.doctype.remote_file.remote_file import RemoteFile
+
+
+class MariaDBBinlog(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ current: DF.Check
+ database_server: DF.Link
+ file_modification_time: DF.Datetime
+ file_name: DF.Data
+ indexed: DF.Check
+ purged_from_disk: DF.Check
+ remote_file: DF.Link | None
+ size_mb: DF.Float
+ uploaded: DF.Check
+ # end: auto-generated types
+
+ def on_trash(self):
+ self.delete_remote_file()
+
+ def delete_remote_file(self):
+ if self.remote_file:
+ remote_file = self.remote_file
+ self.uploaded = 0
+ self.remote_file = None
+ self.save()
+ frappe.delete_doc("Remote File", remote_file)
+
+ @frappe.whitelist()
+ def download_binlog(self):
+ frappe.enqueue_doc(
+ "MariaDB Binlog",
+ self.name,
+ "_download_binlog",
+ queue="default",
+ timeout=300,
+ now=True,
+ )
+ frappe.msgprint(
+ "Binlog download started. You will be notified when the download is complete.",
+ )
+
+ def _download_binlog(self):
+ if not self.uploaded:
+ return
+ remote_file: RemoteFile = frappe.get_doc("Remote File", self.remote_file)
+ download_link = remote_file.get_download_link()
+ if not download_link:
+ return
+
+ command = f"curl -sSL '{download_link}' | gunzip -c > /var/lib/mysql/{self.file_name}.bak"
+ virtual_machine_ip = frappe.db.get_value(
+ "Virtual Machine",
+ frappe.get_value("Database Server", self.database_server, "virtual_machine"),
+ "public_ip_address",
+ )
+ result = AnsibleAdHoc(sources=f"{virtual_machine_ip},").run(command, self.name, raw_params=True)[0]
+ if not result.get("success"):
+ pretty_result = json.dumps(result, indent=2, sort_keys=True, default=str)
+ comment = f"{command}{pretty_result} "
+ self.add_comment(text=comment)
+ else:
+ self.add_comment(text=f"Binlog downloaded successfully to /var/lib/mysql/{self.file_name}.bak")
+
+
+def process_upload_binlogs_to_s3_job_update(job: AgentJob):
+ if job.status != "Success" or job.server_type != "Database Server" or not job.data:
+ return
+
+ data: dict = json.loads(job.data)
+ offsite_files: dict = data.get("offsite_files", {})
+ if not offsite_files:
+ return
+
+ bucket = json.loads(job.request_data)["offsite"]["bucket"]
+
+ binlog_file_remote_files = {}
+ # Create remote file records for each file
+ for binlog_file_name, data in offsite_files.items():
+ remote_file = frappe.get_doc(
+ {
+ "doctype": "Remote File",
+ "file_name": f"{binlog_file_name}.gz",
+ "file_path": data["path"],
+ "file_size": data["size"],
+ "file_type": "application/x-gzip",
+ "bucket": bucket,
+ }
+ )
+ remote_file.save()
+ add_tag("MariaDB Binlog", remote_file.doctype, remote_file.name)
+ binlog_file_remote_files[binlog_file_name] = remote_file.name
+
+ # Update the remote_file field in the binlog files
+ for binlog_file_name, remote_file_name in binlog_file_remote_files.items():
+ frappe.db.set_value(
+ "MariaDB Binlog",
+ {"file_name": binlog_file_name, "database_server": job.server, "current": 0},
+ {
+ "uploaded": 1,
+ "remote_file": remote_file_name,
+ },
+ )
+
+
+def cleanup_old_records():
+ """
+ Cleanup junk records
+ """
+ frappe.db.delete(
+ "MariaDB Binlog",
+ {
+ "purged_from_disk": 1,
+ "uploaded": 0,
+ "indexed": 0,
+ },
+ )
diff --git a/press/press/doctype/mariadb_binlog/test_mariadb_binlog.py b/press/press/doctype/mariadb_binlog/test_mariadb_binlog.py
new file mode 100644
index 00000000000..ae9f7c662ea
--- /dev/null
+++ b/press/press/doctype/mariadb_binlog/test_mariadb_binlog.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class IntegrationTestMariaDBBinlog(FrappeTestCase):
+ """
+ Integration tests for MariaDBBinlog.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/mariadb_stalk/__init__.py b/press/press/doctype/mariadb_stalk/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/mariadb_stalk/mariadb_stalk.js b/press/press/doctype/mariadb_stalk/mariadb_stalk.js
new file mode 100644
index 00000000000..5248750973d
--- /dev/null
+++ b/press/press/doctype/mariadb_stalk/mariadb_stalk.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2023, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("MariaDB Stalk", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/mariadb_stalk/mariadb_stalk.json b/press/press/doctype/mariadb_stalk/mariadb_stalk.json
new file mode 100644
index 00000000000..ff2822bc7f6
--- /dev/null
+++ b/press/press/doctype/mariadb_stalk/mariadb_stalk.json
@@ -0,0 +1,75 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2023-12-21 16:47:05.329972",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "server",
+ "column_break_apst",
+ "timestamp",
+ "section_break_bqjv",
+ "diagnostics"
+ ],
+ "fields": [
+ {
+ "fieldname": "server",
+ "fieldtype": "Link",
+ "in_filter": 1,
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Server",
+ "options": "Database Server",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "column_break_apst",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "timestamp",
+ "fieldtype": "Datetime",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Timestamp",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "section_break_bqjv",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "diagnostics",
+ "fieldtype": "Table",
+ "label": "Diagnostics",
+ "options": "MariaDB Stalk Diagnostic",
+ "read_only": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2023-12-21 18:55:52.236854",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "MariaDB Stalk",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/mariadb_stalk/mariadb_stalk.py b/press/press/doctype/mariadb_stalk/mariadb_stalk.py
new file mode 100644
index 00000000000..26a8ca69777
--- /dev/null
+++ b/press/press/doctype/mariadb_stalk/mariadb_stalk.py
@@ -0,0 +1,102 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+import gzip
+from datetime import datetime
+
+import frappe
+from frappe.model.document import Document
+from frappe.query_builder import Interval
+from frappe.query_builder.functions import Now
+from frappe.utils import add_to_date, convert_utc_to_system_timezone, now_datetime
+
+from press.utils import log_error
+
+
+class MariaDBStalk(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.mariadb_stalk_diagnostic.mariadb_stalk_diagnostic import (
+ MariaDBStalkDiagnostic,
+ )
+
+ diagnostics: DF.Table[MariaDBStalkDiagnostic]
+ server: DF.Link | None
+ timestamp: DF.Datetime | None
+ # end: auto-generated types
+
+ @staticmethod
+ def clear_old_logs(days=30):
+ table = frappe.qb.DocType("MariaDB Stalk")
+ stalks = frappe.db.get_values(
+ table, filters=table.creation < (Now() - Interval(days=days))
+ )
+ for stalk in stalks:
+ try:
+ stalk = frappe.get_doc("MariaDB Stalk", stalk)
+ stalk.create_json_gz_file()
+ stalk.delete(delete_permanently=True)
+ frappe.db.commit()
+ except Exception:
+ log_error("MariaDB Stalk Delete Error")
+ frappe.db.rollback()
+
+ def create_json_gz_file(self):
+ filename = f"mariadb-stalk-{self.server}-{self.timestamp}.json.gz"
+ encoded = frappe.safe_encode(self.as_json())
+ compressed = gzip.compress(encoded)
+ if frappe.db.exists("File", {"file_name": filename}):
+ return
+ file = frappe.get_doc(
+ {
+ "doctype": "File",
+ "file_name": filename,
+ "content": compressed,
+ "is_private": True,
+ }
+ )
+ file.insert()
+
+
+def fetch_stalks():
+ for server in frappe.get_all(
+ "Database Server", {"status": "Active", "is_stalk_setup": True}, pluck="name"
+ ):
+ frappe.enqueue(
+ "press.press.doctype.mariadb_stalk.mariadb_stalk.fetch_server_stalks",
+ server=server,
+ job_id=f"fetch_mariadb_stalk:{server}",
+ )
+
+
+def fetch_server_stalks(server):
+ server = frappe.get_cached_doc("Database Server", server)
+ for stalk in server.get_stalks():
+ timestamp = convert_utc_to_system_timezone(
+ datetime.fromisoformat(stalk["timestamp"])
+ ).replace(tzinfo=None)
+ # To avoid fetching incomplete stalks, wait for 5 minutes
+ if not now_datetime() > add_to_date(timestamp, minutes=5):
+ continue
+ # Don't fetch old stalks
+ if now_datetime() > add_to_date(timestamp, days=15):
+ continue
+ if frappe.db.exists("MariaDB Stalk", {"server": server.name, "timestamp": timestamp}):
+ continue
+ try:
+ doc = frappe.new_doc("MariaDB Stalk")
+ doc.server = server.name
+ doc.timestamp = timestamp
+ for diagnostic in server.get_stalk(stalk["name"]):
+ doc.append("diagnostics", diagnostic)
+ doc.insert()
+ frappe.db.commit()
+ except Exception:
+ log_error("MariaDB Stalk Error", server=server, stalk=stalk)
+ frappe.db.rollback()
diff --git a/press/press/doctype/mariadb_stalk/test_mariadb_stalk.py b/press/press/doctype/mariadb_stalk/test_mariadb_stalk.py
new file mode 100644
index 00000000000..79b286c0863
--- /dev/null
+++ b/press/press/doctype/mariadb_stalk/test_mariadb_stalk.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2023, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestMariaDBStalk(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/mariadb_stalk_diagnostic/__init__.py b/press/press/doctype/mariadb_stalk_diagnostic/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/mariadb_stalk_diagnostic/mariadb_stalk_diagnostic.json b/press/press/doctype/mariadb_stalk_diagnostic/mariadb_stalk_diagnostic.json
new file mode 100644
index 00000000000..b44e81d7038
--- /dev/null
+++ b/press/press/doctype/mariadb_stalk_diagnostic/mariadb_stalk_diagnostic.json
@@ -0,0 +1,41 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2023-12-21 16:57:50.050145",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "type",
+ "output"
+ ],
+ "fields": [
+ {
+ "fieldname": "type",
+ "fieldtype": "Data",
+ "in_filter": 1,
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Type",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "output",
+ "fieldtype": "Code",
+ "label": "Output",
+ "read_only": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2023-12-21 18:57:47.641855",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "MariaDB Stalk Diagnostic",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/mariadb_stalk_diagnostic/mariadb_stalk_diagnostic.py b/press/press/doctype/mariadb_stalk_diagnostic/mariadb_stalk_diagnostic.py
new file mode 100644
index 00000000000..463aa1ac220
--- /dev/null
+++ b/press/press/doctype/mariadb_stalk_diagnostic/mariadb_stalk_diagnostic.py
@@ -0,0 +1,24 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class MariaDBStalkDiagnostic(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ output: DF.Code | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ type: DF.Data | None
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/mariadb_variable/mariadb_variable.js b/press/press/doctype/mariadb_variable/mariadb_variable.js
index 1c09e24b921..c0a9e46479e 100644
--- a/press/press/doctype/mariadb_variable/mariadb_variable.js
+++ b/press/press/doctype/mariadb_variable/mariadb_variable.js
@@ -8,5 +8,19 @@ frappe.ui.form.on('MariaDB Variable', {
`${root}${frm.doc.doc_section}-system-variables/#${frm.doc.name}`,
__('Check MariaDB Documentation'),
);
+ frm.add_custom_button(__('Set on all servers'), () => {
+ frappe.confirm(
+ `Are you sure you want to set variable on all servers?
+ If variable is not dynamic, mariadb will be restarted`,
+ () =>
+ frm.call('set_on_all_servers').then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ } else {
+ frm.refresh();
+ }
+ }),
+ );
+ });
},
});
diff --git a/press/press/doctype/mariadb_variable/mariadb_variable.json b/press/press/doctype/mariadb_variable/mariadb_variable.json
index 0e945e506bc..7d167ab2006 100644
--- a/press/press/doctype/mariadb_variable/mariadb_variable.json
+++ b/press/press/doctype/mariadb_variable/mariadb_variable.json
@@ -1,5 +1,6 @@
{
"actions": [],
+ "allow_import": 1,
"allow_rename": 1,
"autoname": "prompt",
"creation": "2023-04-29 00:00:12.372588",
@@ -11,8 +12,13 @@
"dynamic",
"datatype",
"doc_section",
+ "skippable",
+ "column_break_yrfg",
"default_value",
- "skippable"
+ "set_on_new_servers",
+ "section_break_mhww",
+ "configurable_by_user",
+ "title"
],
"fields": [
{
@@ -27,7 +33,7 @@
"fieldname": "datatype",
"fieldtype": "Select",
"label": "Datatype",
- "options": "Int\nBool\nFloat\nStr",
+ "options": "Int\nFloat\nStr",
"reqd": 1
},
{
@@ -48,11 +54,38 @@
"label": "Doc Section",
"options": "server\nreplication-and-binary-log\ninnodb",
"reqd": 1
+ },
+ {
+ "fieldname": "column_break_yrfg",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "set_on_new_servers",
+ "fieldtype": "Check",
+ "label": "Set on new servers"
+ },
+ {
+ "default": "0",
+ "fieldname": "configurable_by_user",
+ "fieldtype": "Check",
+ "label": "Configurable By User"
+ },
+ {
+ "fieldname": "section_break_mhww",
+ "fieldtype": "Section Break"
+ },
+ {
+ "depends_on": "eval: doc.configurable_by_user",
+ "fieldname": "title",
+ "fieldtype": "Data",
+ "label": "Title",
+ "mandatory_depends_on": "eval: doc.configurable_by_user"
}
],
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2023-06-15 14:40:46.598554",
+ "modified": "2025-03-25 22:21:06.165678",
"modified_by": "Administrator",
"module": "Press",
"name": "MariaDB Variable",
diff --git a/press/press/doctype/mariadb_variable/mariadb_variable.py b/press/press/doctype/mariadb_variable/mariadb_variable.py
index 91aa7a01915..806c37c7cea 100644
--- a/press/press/doctype/mariadb_variable/mariadb_variable.py
+++ b/press/press/doctype/mariadb_variable/mariadb_variable.py
@@ -1,9 +1,55 @@
# Copyright (c) 2023, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
-# import frappe
+from typing import TYPE_CHECKING
+
+import frappe
from frappe.model.document import Document
+if TYPE_CHECKING:
+ from press.press.doctype.database_server.database_server import DatabaseServer
+
class MariaDBVariable(Document):
- pass
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ configurable_by_user: DF.Check
+ datatype: DF.Literal["Int", "Float", "Str"]
+ default_value: DF.Data | None
+ doc_section: DF.Literal["server", "replication-and-binary-log", "innodb"]
+ dynamic: DF.Check
+ set_on_new_servers: DF.Check
+ skippable: DF.Check
+ title: DF.Data | None
+ # end: auto-generated types
+
+ def get_default_value(self):
+ if not (value := self.default_value):
+ frappe.throw("Default Value is required")
+ match self.datatype:
+ case "Int":
+ return int(value)
+ case "Float":
+ return float(value)
+ return value
+
+ @frappe.whitelist()
+ def set_on_all_servers(self):
+ value = self.get_default_value()
+ servers = frappe.get_all(
+ "Database Server", {"status": "Active", "is_self_hosted": False}, pluck="name"
+ )
+ for server_name in servers:
+ server: DatabaseServer = frappe.get_doc("Database Server", server_name)
+ server.add_or_update_mariadb_variable(self.name, f"value_{self.datatype.lower()}", value)
+
+ def set_on_server(self, server: DatabaseServer):
+ value = self.get_default_value()
+ server.add_or_update_mariadb_variable(self.name, f"value_{self.datatype.lower()}", value)
diff --git a/press/press/doctype/mariadb_variable/test_mariadb_variable.py b/press/press/doctype/mariadb_variable/test_mariadb_variable.py
index c1f8e180bcf..4f92cfa659c 100644
--- a/press/press/doctype/mariadb_variable/test_mariadb_variable.py
+++ b/press/press/doctype/mariadb_variable/test_mariadb_variable.py
@@ -1,9 +1,67 @@
# Copyright (c) 2023, Frappe and Contributors
# See license.txt
-# import frappe
+import frappe
from frappe.tests.utils import FrappeTestCase
+from press.press.doctype.database_server.test_database_server import (
+ create_test_database_server,
+)
+
class TestMariaDBVariable(FrappeTestCase):
- pass
+ def tearDown(self):
+ frappe.db.rollback()
+
+ def test_set_on_all_servers_sets_on_all_servers(self):
+ db_1 = create_test_database_server()
+ db_2 = create_test_database_server()
+ db_1.add_or_update_mariadb_variable("tmp_disk_table_size", "value_int", 1024)
+ db_1.add_or_update_mariadb_variable("innodb_old_blocks_time", "value_str", "1000")
+
+ variable = frappe.get_doc("MariaDB Variable", "tmp_disk_table_size") # in fixture
+ variable.default_value = "5120"
+ variable.save()
+
+ variable.set_on_all_servers()
+ db_1.reload()
+ db_2.reload()
+ self.assertEqual(db_1.mariadb_system_variables[0].value, 5120 * 1024 * 1024)
+ self.assertEqual(db_2.mariadb_system_variables[0].value, 5120 * 1024 * 1024)
+
+ variable = frappe.get_doc("MariaDB Variable", "innodb_old_blocks_time")
+ variable.default_value = "5000"
+ variable.save()
+
+ variable.set_on_all_servers()
+ db_1.reload()
+ db_2.reload()
+ self.assertEqual(db_1.mariadb_system_variables[1].value, "5000")
+ self.assertEqual(db_2.mariadb_system_variables[1].value, "5000")
+
+ def test_set_on_server_sets_on_one_server(self):
+ db_1 = create_test_database_server()
+ db_2 = create_test_database_server()
+ db_2.add_or_update_mariadb_variable("tmp_disk_table_size", "value_int", 1024)
+ db_1.add_or_update_mariadb_variable("tmp_disk_table_size", "value_int", 1024)
+ db_1.add_or_update_mariadb_variable("innodb_old_blocks_time", "value_str", "1000")
+
+ variable = frappe.get_doc("MariaDB Variable", "tmp_disk_table_size")
+ variable.default_value = "5120"
+ variable.save()
+
+ variable.set_on_server(db_1)
+ db_1.reload()
+ db_2.reload()
+ self.assertEqual(db_1.mariadb_system_variables[0].value, 5120 * 1024 * 1024)
+ self.assertEqual(db_2.mariadb_system_variables[0].value, 1024 * 1024 * 1024)
+
+ variable = frappe.get_doc("MariaDB Variable", "innodb_old_blocks_time")
+ variable.default_value = "5000"
+ variable.save()
+
+ variable.set_on_server(db_2)
+ db_1.reload()
+ db_2.reload()
+ self.assertEqual(db_1.mariadb_system_variables[1].value, "1000")
+ self.assertEqual(db_2.mariadb_system_variables[1].value, "5000")
diff --git a/press/press/doctype/marketplace_app/events.py b/press/press/doctype/marketplace_app/events.py
new file mode 100644
index 00000000000..a87a06cd910
--- /dev/null
+++ b/press/press/doctype/marketplace_app/events.py
@@ -0,0 +1,45 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+import frappe
+
+from press.press.doctype.communication_info.communication_info import get_communication_info
+
+
+def auto_review_for_missing_steps():
+ for app in frappe.get_all(
+ "Marketplace App",
+ {
+ "status": ("in", ["Draft", "Attention Required", "In Review"]),
+ "stop_auto_review": False,
+ },
+ pluck="name",
+ ):
+ app_doc = frappe.get_doc("Marketplace App", app)
+ release = bool(frappe.db.exists("App Release Approval Request", {"app": app}))
+ logo = bool(app_doc.image)
+ desc = "Please add a short" not in app_doc.description
+ links = bool(
+ app_doc.website
+ and app_doc.support
+ and app_doc.documentation
+ and app_doc.privacy_policy
+ and app_doc.terms_of_service
+ )
+
+ recipients = get_communication_info("Email", "Marketplace", "Team", app_doc.team)
+ if recipients and not (logo and desc and links and release):
+ frappe.sendmail(
+ subject=f"Marketplace App Review: {app_doc.title}",
+ recipients=recipients,
+ template="marketplace_auto_review",
+ reference_doctype="Marketplace App",
+ reference_name=app,
+ args={
+ "logo": logo,
+ "links": links,
+ "desc": desc,
+ "release": release,
+ "review_page_link": f"{frappe.local.site}/dashboard/marketplace/apps/{app}/review",
+ },
+ )
diff --git a/press/press/doctype/marketplace_app/marketplace_app.js b/press/press/doctype/marketplace_app/marketplace_app.js
index 097df6b556e..b6dbc6a3a11 100644
--- a/press/press/doctype/marketplace_app/marketplace_app.js
+++ b/press/press/doctype/marketplace_app/marketplace_app.js
@@ -4,7 +4,7 @@
frappe.ui.form.on('Marketplace App', {
refresh: function (frm) {
frm.add_web_link(
- `/dashboard/marketplace/apps/${frm.doc.name}/releases`,
+ `/dashboard/apps/${frm.doc.name}/`,
__('Open in dashboard'),
);
},
diff --git a/press/press/doctype/marketplace_app/marketplace_app.json b/press/press/doctype/marketplace_app/marketplace_app.json
index 845b79ac7eb..ac625d59f31 100644
--- a/press/press/doctype/marketplace_app/marketplace_app.json
+++ b/press/press/doctype/marketplace_app/marketplace_app.json
@@ -13,6 +13,7 @@
"column_break_3",
"team",
"route",
+ "published_on",
"section_break_7",
"frappe_approved",
"subscription_type",
@@ -55,9 +56,18 @@
"run_after_uninstall_script",
"after_uninstall_script",
"review_tab",
+ "status",
+ "column_break_bcjk",
"stop_auto_review",
"review_stage",
- "status"
+ "dashboard_tab",
+ "onboarding_related_section",
+ "show_for_site_creation",
+ "localisation_apps",
+ "section_break_tlpw",
+ "average_rating",
+ "others_section",
+ "collect_feedback"
],
"fields": [
{
@@ -86,13 +96,14 @@
"in_list_view": 1,
"in_standard_filter": 1,
"label": "Status",
- "options": "Draft\nPublished\nIn Review\nAttention Required\nRejected"
+ "options": "Draft\nPublished\nIn Review\nAttention Required\nRejected\nDisabled"
},
{
"fieldname": "section_break_5",
"fieldtype": "Section Break"
},
{
+ "default": "Please add a short description about your app here...",
"fieldname": "description",
"fieldtype": "Small Text",
"in_list_view": 1,
@@ -101,7 +112,7 @@
},
{
"fieldname": "long_description",
- "fieldtype": "Markdown Editor",
+ "fieldtype": "Text Editor",
"label": "Long Description"
},
{
@@ -347,8 +358,64 @@
"fieldname": "site_config_section",
"fieldtype": "Section Break",
"label": "Site Config"
+ },
+ {
+ "fieldname": "onboarding_related_section",
+ "fieldtype": "Section Break",
+ "label": "Onboarding/Site Creation Related"
+ },
+ {
+ "default": "0",
+ "fieldname": "show_for_site_creation",
+ "fieldtype": "Check",
+ "label": "Show for site creation"
+ },
+ {
+ "fieldname": "dashboard_tab",
+ "fieldtype": "Tab Break",
+ "label": "Dashboard"
+ },
+ {
+ "fieldname": "section_break_tlpw",
+ "fieldtype": "Section Break",
+ "label": "Marketplace Page"
+ },
+ {
+ "default": "0",
+ "fieldname": "average_rating",
+ "fieldtype": "Float",
+ "hidden": 1,
+ "label": "Average Rating",
+ "precision": "2"
+ },
+ {
+ "fieldname": "localisation_apps",
+ "fieldtype": "Table",
+ "label": "Localisation Apps",
+ "options": "Marketplace Localisation App"
+ },
+ {
+ "fieldname": "others_section",
+ "fieldtype": "Section Break",
+ "label": "Others"
+ },
+ {
+ "default": "0",
+ "fieldname": "collect_feedback",
+ "fieldtype": "Check",
+ "label": "Collect Feedback"
+ },
+ {
+ "fieldname": "column_break_bcjk",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "published_on",
+ "fieldtype": "Date",
+ "label": "Published On"
}
],
+ "grid_page_length": 50,
"has_web_view": 1,
"image_field": "image",
"index_web_pages_for_search": 1,
@@ -370,7 +437,7 @@
"link_fieldname": "app"
}
],
- "modified": "2023-06-01 13:32:04.972967",
+ "modified": "2025-09-30 11:48:31.952469",
"modified_by": "Administrator",
"module": "Press",
"name": "Marketplace App",
@@ -387,8 +454,22 @@
"role": "System Manager",
"share": 1,
"write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Marketplace Manager",
+ "select": 1,
+ "share": 1,
+ "write": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
"states": [
@@ -403,4 +484,4 @@
],
"title_field": "title",
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/marketplace_app/marketplace_app.py b/press/press/doctype/marketplace_app/marketplace_app.py
index f67ace186e8..39d1972b4c6 100644
--- a/press/press/doctype/marketplace_app/marketplace_app.py
+++ b/press/press/doctype/marketplace_app/marketplace_app.py
@@ -1,51 +1,197 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
-import frappe
-import requests
+from __future__ import annotations
-from typing import Dict, List
from base64 import b64decode
-from press.utils import get_last_doc
-from press.api.github import get_access_token
+from typing import TYPE_CHECKING, ClassVar
+
+import frappe
+import requests
from frappe.query_builder.functions import Cast_
+from frappe.utils.caching import redis_cache
+from frappe.utils.safe_exec import safe_exec
from frappe.website.utils import cleanup_page_name
from frappe.website.website_generator import WebsiteGenerator
+
+from press.api.client import dashboard_whitelist
+from press.api.github import get_access_token
from press.marketplace.doctype.marketplace_app_plan.marketplace_app_plan import (
get_app_plan_features,
)
+from press.press.doctype.app.app import new_app as new_app_doc
+from press.press.doctype.app_release_approval_request.app_release_approval_request import (
+ AppReleaseApprovalRequest,
+)
from press.press.doctype.marketplace_app.utils import get_rating_percentage_distribution
-from frappe.utils.safe_exec import safe_exec
-from frappe.utils import get_datetime
+from press.utils import get_current_team, get_last_doc
+
+if TYPE_CHECKING:
+ from press.press.doctype.site.site import Site
class MarketplaceApp(WebsiteGenerator):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.marketplace_app_categories.marketplace_app_categories import (
+ MarketplaceAppCategories,
+ )
+ from press.press.doctype.marketplace_app_screenshot.marketplace_app_screenshot import (
+ MarketplaceAppScreenshot,
+ )
+ from press.press.doctype.marketplace_app_version.marketplace_app_version import MarketplaceAppVersion
+ from press.press.doctype.marketplace_localisation_app.marketplace_localisation_app import (
+ MarketplaceLocalisationApp,
+ )
+
+ after_install_script: DF.Code | None
+ after_uninstall_script: DF.Code | None
+ app: DF.Link
+ average_rating: DF.Float
+ categories: DF.Table[MarketplaceAppCategories]
+ collect_feedback: DF.Check
+ custom_verify_template: DF.Check
+ description: DF.SmallText
+ documentation: DF.Data | None
+ frappe_approved: DF.Check
+ image: DF.AttachImage | None
+ localisation_apps: DF.Table[MarketplaceLocalisationApp]
+ long_description: DF.TextEditor | None
+ message: DF.TextEditor | None
+ outgoing_email: DF.Data | None
+ outgoing_sender_name: DF.Data | None
+ poll_method: DF.Data | None
+ privacy_policy: DF.Data | None
+ published: DF.Check
+ published_on: DF.Date | None
+ review_stage: DF.Literal[
+ "Not Started",
+ "Description Missing",
+ "Logo Missing",
+ "App Release Not Reviewed",
+ "Ready for Review",
+ "Ready to Publish",
+ "Rejected",
+ ]
+ route: DF.Data | None
+ run_after_install_script: DF.Check
+ run_after_uninstall_script: DF.Check
+ screenshots: DF.Table[MarketplaceAppScreenshot]
+ show_for_site_creation: DF.Check
+ signature: DF.TextEditor | None
+ site_config: DF.JSON | None
+ sources: DF.Table[MarketplaceAppVersion]
+ status: DF.Literal["Draft", "Published", "In Review", "Attention Required", "Rejected", "Disabled"]
+ stop_auto_review: DF.Check
+ subject: DF.Data | None
+ subscription_type: DF.Literal["Free", "Paid", "Freemium"]
+ subscription_update_hook: DF.Data | None
+ support: DF.Data | None
+ team: DF.Link | None
+ terms_of_service: DF.Data | None
+ title: DF.Data
+ website: DF.Data | None
+ # end: auto-generated types
+
+ dashboard_fields: ClassVar = [
+ "image",
+ "title",
+ "status",
+ "description",
+ "review_stage",
+ ]
+
def autoname(self):
self.name = self.app
+ @dashboard_whitelist()
+ def delete(self):
+ if self.status != "Draft":
+ frappe.throw("You can only delete an app in Draft status")
+
+ if get_current_team() != self.team:
+ frappe.throw("You are not authorized to delete this app")
+
+ super().delete()
+
+ def on_trash(self):
+ frappe.db.delete("Marketplace App Plan", {"app": self.name})
+ frappe.db.delete("App Release Approval Request", {"marketplace_app": self.name})
+
+ @dashboard_whitelist()
+ def create_approval_request(self, app_release: str):
+ """Create a new Approval Request for given `app_release`"""
+ AppReleaseApprovalRequest.create(self.app, app_release)
+
+ @dashboard_whitelist()
+ def cancel_approval_request(self, app_release: str):
+ approval_requests = frappe.get_all(
+ "App Release Approval Request",
+ filters={"app_release": app_release},
+ pluck="name",
+ order_by="creation desc",
+ )
+
+ if len(approval_requests) == 0:
+ frappe.throw("No approval request exists for the given app release")
+
+ frappe.get_doc("App Release Approval Request", approval_requests[0]).cancel()
+
def before_insert(self):
if not frappe.flags.in_test:
- self.long_description = self.fetch_readme()
+ self.check_if_duplicate()
+ self.create_app_and_source_if_needed()
+ self.long_description = frappe.utils.md_to_html(self.fetch_readme())
self.set_route()
def set_route(self):
self.route = "marketplace/apps/" + cleanup_page_name(self.app)
+ def check_if_duplicate(self):
+ if frappe.db.exists("Marketplace App", self.name):
+ frappe.throw(f"App {self.name} already exists. Please contact support.")
+
+ def create_app_and_source_if_needed(self):
+ if frappe.db.exists("App", self.app or self.name):
+ app_doc = frappe.get_doc("App", self.app or self.name)
+ else:
+ app_doc = new_app_doc(self.name, self.title)
+
+ if not self.sources:
+ source = app_doc.add_source(
+ self.version,
+ self.repository_url,
+ self.branch,
+ self.team,
+ self.github_installation_id,
+ public=True,
+ )
+ self.app = source.app
+ self.append("sources", {"version": self.version, "source": source.name})
+
def validate(self):
self.published = self.status == "Published"
self.validate_sources()
self.validate_number_of_screenshots()
+ self.validate_summary()
+
+ def validate_summary(self):
+ if len(self.description) > 140:
+ frappe.throw("Marketplace App summary cannot be more than 140 characters.")
def validate_sources(self):
for source in self.sources:
app_source = frappe.get_doc("App Source", source.source)
if app_source.app != self.app:
- frappe.throw(
- f"App Source {frappe.bold(source.source)} does not belong to this app!"
- )
+ frappe.throw(f"App Source {frappe.bold(source.source)} does not belong to this app!")
app_source_versions = [v.version for v in app_source.versions]
if source.version not in app_source_versions:
@@ -55,13 +201,20 @@ def validate_sources(self):
)
def validate_number_of_screenshots(self):
- max_allowed_screenshots = frappe.db.get_single_value(
- "Press Settings", "max_allowed_screenshots"
- )
+ max_allowed_screenshots = frappe.db.get_single_value("Press Settings", "max_allowed_screenshots")
if len(self.screenshots) > max_allowed_screenshots:
- frappe.throw(
- f"You cannot add more than {max_allowed_screenshots} screenshots for an app."
- )
+ frappe.throw(f"You cannot add more than {max_allowed_screenshots} screenshots for an app.")
+
+ def on_update(self):
+ self.set_published_on_date()
+
+ def set_published_on_date(self):
+ if self.published_on:
+ return
+
+ doc_before_save = self.get_doc_before_save()
+ if self.status == "Published" and doc_before_save.status != "Published":
+ self.published_on = frappe.utils.nowdate()
def change_branch(self, source, version, to_branch):
existing_source = frappe.db.exists(
@@ -69,9 +222,7 @@ def change_branch(self, source, version, to_branch):
{
"name": ("!=", self.name),
"app": self.app,
- "repository_url": frappe.db.get_value(
- "App Source", {"name": source}, "repository_url"
- ),
+ "repository_url": frappe.db.get_value("App Source", {"name": source}, "repository_url"),
"branch": to_branch,
"team": self.team,
},
@@ -95,6 +246,7 @@ def change_branch(self, source, version, to_branch):
source_doc.branch = to_branch
source_doc.save()
+ @dashboard_whitelist()
def add_version(self, version, branch):
existing_source = frappe.db.exists(
"App Source",
@@ -109,6 +261,7 @@ def add_version(self, version, branch):
source_doc = frappe.get_doc("App Source", existing_source)
try:
source_doc.append("versions", {"version": version})
+ source_doc.public = 1
source_doc.save()
except Exception:
pass
@@ -132,6 +285,7 @@ def add_version(self, version, branch):
self.append("sources", {"version": version, "source": source_doc.name})
self.save()
+ @dashboard_whitelist()
def remove_version(self, version):
if self.status == "Published" and len(self.sources) == 1:
frappe.throw("Failed to remove. Need at least 1 version for a published app")
@@ -192,9 +346,7 @@ def get_context(self, context):
context.app = self
supported_versions = []
- public_rgs = frappe.get_all(
- "Release Group", filters={"public": True}, fields=["version", "name"]
- )
+ public_rgs = frappe.get_all("Release Group", filters={"public": True}, fields=["version", "name"])
unique_public_rgs = {}
for rg in public_rgs:
@@ -206,14 +358,21 @@ def get_context(self, context):
continue
frappe_source_name = frappe.get_doc(
- "Release Group App", {"app": "frappe", "parent": unique_public_rgs[source.version]}
+ "Release Group App",
+ {"app": "frappe", "parent": unique_public_rgs[source.version]},
).source
frappe_source = frappe.db.get_value(
- "App Source", frappe_source_name, ["repository_url", "branch"], as_dict=True
+ "App Source",
+ frappe_source_name,
+ ["repository_url", "branch"],
+ as_dict=True,
)
app_source = frappe.db.get_value(
- "App Source", source.source, ["repository_url", "branch", "public"], as_dict=True
+ "App Source",
+ source.source,
+ ["repository_url", "branch", "public"],
+ as_dict=True,
)
supported_versions.append(
@@ -259,7 +418,7 @@ def get_context(self, context):
context.user_reviews = user_reviews
context.ratings_summary = ratings_summary
- def get_user_reviews(self) -> List:
+ def get_user_reviews(self) -> list:
app_user_review = frappe.qb.DocType("App User Review")
user = frappe.qb.DocType("User")
@@ -280,7 +439,7 @@ def get_user_reviews(self) -> List:
)
return query.run(as_dict=True)
- def get_user_ratings_summary(self, reviews: List) -> Dict:
+ def get_user_ratings_summary(self, reviews: list) -> dict:
total_num_reviews = len(reviews)
avg_rating = 0.0
@@ -314,9 +473,7 @@ def get_deploy_information(self):
release_group = frappe.get_doc("Release Group", rg_name)
sources_on_rg = [a.source for a in release_group.apps]
- latest_active_bench = get_last_doc(
- "Bench", filters={"status": "Active", "group": rg_name}
- )
+ latest_active_bench = get_last_doc("Bench", filters={"status": "Active", "group": rg_name})
if latest_active_bench:
sources_on_bench = [a.source for a in latest_active_bench.apps]
@@ -369,13 +526,13 @@ def total_active_benches(self):
def get_payout_amount(self, status: str = "", total_for: str = "net_amount"):
"""Return the payout amount for this app"""
- filters = {"recipient": self.team}
+ filters = {"team": self.team}
if status:
filters["status"] = status
payout_orders = frappe.get_all("Payout Order", filters=filters, pluck="name")
payout = frappe.get_all(
"Payout Order Item",
- filters={"parent": ("in", payout_orders)},
+ filters={"parent": ("in", payout_orders), "document_name": self.name},
fields=[
f"SUM(CASE WHEN currency = 'USD' THEN {total_for} ELSE 0 END) AS usd_amount",
f"SUM(CASE WHEN currency = 'INR' THEN {total_for} ELSE 0 END) AS inr_amount",
@@ -383,68 +540,92 @@ def get_payout_amount(self, status: str = "", total_for: str = "net_amount"):
)
return payout[0] if payout else {"usd_amount": 0, "inr_amount": 0}
+ @dashboard_whitelist()
+ def site_installs(self):
+ site = frappe.qb.DocType("Site")
+ site_app = frappe.qb.DocType("Site App")
+ site_plan = frappe.qb.DocType("Site Plan")
+ team = frappe.qb.DocType("Team")
+
+ query = (
+ frappe.qb.from_(site)
+ .left_join(team)
+ .on(team.name == site.team)
+ .left_outer_join(site_app)
+ .on(site.name == site_app.parent)
+ .left_outer_join(site_plan)
+ .on(site_app.plan == site_plan.name)
+ .select(site.name, site.plan, team.user)
+ .where((site.status == "Active") & (site_app.app == self.app) & (site_plan.price_usd >= 0))
+ )
+ return query.run(as_dict=True)
+
+ @dashboard_whitelist()
+ def listing_details(self):
+ return {
+ "support": self.support,
+ "website": self.website,
+ "documentation": self.documentation,
+ "privacy_policy": self.privacy_policy,
+ "terms_of_service": self.terms_of_service,
+ "description": self.description,
+ "long_description": self.long_description,
+ "screenshots": [screenshot.image for screenshot in self.screenshots],
+ }
+
+ @dashboard_whitelist()
+ def mark_app_ready_for_review(self):
+ # TODO: Start security check and auto deploy process here
+ self.review_stage = "Ready for Review"
+ self.save()
+
+ @dashboard_whitelist()
+ def update_listing(self, *args):
+ data = frappe._dict(args[0])
+ self.title = data.get("title") or self.title
+ self.description = data.get("description")
+ self.long_description = data.get("long_description")
+ self.support = data.get("support")
+ self.website = data.get("website")
+ self.documentation = data.get("documentation")
+ self.privacy_policy = data.get("privacy_policy")
+ self.terms_of_service = data.get("terms_of_service")
+ self.save()
+
def get_analytics(self):
+ today = frappe.utils.today()
+ last_week = frappe.utils.add_days(today, -7)
+
return {
"total_installs": self.total_installs(),
- "num_installs_active_sites": self.total_active_sites(),
- "num_installs_active_benches": self.total_active_benches(),
+ "installs_active_sites": self.total_active_sites(),
+ "installs_active_benches": self.total_active_benches(),
+ "installs_last_week": frappe.db.count(
+ "Site Activity",
+ {
+ "action": "Install App",
+ "reason": self.app,
+ "creation": (">=", last_week),
+ },
+ ),
"total_payout": self.get_payout_amount(),
"paid_payout": self.get_payout_amount(status="Paid"),
"pending_payout": self.get_payout_amount(status="Draft"),
"commission": self.get_payout_amount(total_for="commission"),
}
- def get_plans(self, frappe_version: str = None) -> List:
+ def get_plans(self, frappe_version: str | None = None) -> list:
return get_plans_for_app(self.name, frappe_version)
def can_charge_for_subscription(self, subscription):
- marketplace_app_plan, plan, site, team, status = frappe.get_value(
- "Marketplace App Subscription",
- subscription.marketplace_app_subscription,
- ["marketplace_app_plan", "plan", "site", "team", "status"],
- )
-
- return (
- status == "Active"
- and team
- and team != "Administrator"
- and self.should_create_usage_record(marketplace_app_plan, plan, site)
- )
-
- def should_create_usage_record(self, marketplace_app_plan, plan, site):
- """Check if the user can create a usage record for this app"""
- is_free = frappe.db.get_value("Marketplace App Plan", marketplace_app_plan, "is_free")
-
- if is_free:
- return False
-
- # For annual prepaid plans
- plan_interval = frappe.db.get_value("Plan", plan, "interval")
-
- if plan_interval == "Annually":
- return False
-
- # For non-active sites
- site_status, trial_site, free = frappe.db.get_value(
- "Site", site, ["status", "trial_end_date", "free"]
- )
- if site_status not in ("Active", "Inactive"):
+ if subscription.team == self.team:
return False
-
- if free:
- return False
-
- if trial_site and frappe.utils.getdate() < get_datetime(trial_site).date():
- return False
-
- return True
+ return subscription.enabled == 1 and subscription.team and subscription.team != "Administrator"
def get_plans_for_app(
app_name, frappe_version=None, include_free=True, include_disabled=False
): # Unused for now, might use later
- from press.press.doctype.team.team import is_us_eu
-
plans = []
filters = {"app": app_name}
@@ -454,38 +635,22 @@ def get_plans_for_app(
if not include_disabled:
filters["enabled"] = True
- filters["us_eu"] = (
- frappe.db.get_value("Saas Settings", app_name, "multiplier_pricing") and is_us_eu()
- )
-
marketplace_app_plans = frappe.get_all(
"Marketplace App Plan",
filters=filters,
fields=[
"name",
- "plan",
- "discount_percent",
- "gst",
- "marked_most_popular",
- "is_free",
+ "title",
"enabled",
- "block_monthly",
+ "price_inr",
+ "price_usd",
],
)
for app_plan in marketplace_app_plans:
plan_data = {}
plan_data.update(app_plan)
-
- plan_discount_percent = app_plan.discount_percent
- plan_data["discounted"] = plan_discount_percent > 0
- plan_prices = frappe.db.get_value(
- "Plan", app_plan.plan, ["plan_title", "price_usd", "price_inr"], as_dict=True
- )
-
- plan_data.update(plan_prices)
plan_data["features"] = get_app_plan_features(app_plan.name)
-
plans.append(plan_data)
plans.sort(key=lambda x: x["price_usd"])
@@ -494,32 +659,48 @@ def get_plans_for_app(
return plans
-def marketplace_app_hook(app=None, site="", op="install"):
+def marketplace_app_hook(app=None, site: Site | None = None, op="install"):
if app is None:
- site_apps = frappe.get_all("Site App", filters={"parent": site}, pluck="app")
- for app in site_apps:
- run_script(app, site, op)
+ if site is None:
+ return
+ site_apps = frappe.get_all("Site App", filters={"parent": site.name}, pluck="app")
+ for app_name in site_apps:
+ run_script(app_name, site, op)
else:
run_script(app, site, op)
def get_script_name(app, op):
- if op == "install" and frappe.db.get_value(
- "Marketplace App", app, "run_after_install_script"
- ):
+ if op == "install" and frappe.db.get_value("Marketplace App", app, "run_after_install_script"):
return "after_install_script"
- elif op == "uninstall" and frappe.db.get_value(
- "Marketplace App", app, "run_after_uninstall_script"
- ):
+ if op == "uninstall" and frappe.db.get_value("Marketplace App", app, "run_after_uninstall_script"):
return "after_uninstall_script"
- else:
- return ""
+ return ""
-def run_script(app, site, op):
+def run_script(app, site: Site, op):
script = get_script_name(app, op)
if script:
script = frappe.db.get_value("Marketplace App", app, script)
- local = {"doc": frappe.get_doc("Site", site)}
+ local = {"doc": site}
safe_exec(script, _locals=local)
+
+
+@redis_cache(ttl=60 * 60 * 24)
+def get_total_installs_by_app():
+ try:
+ total_installs = frappe.db.get_all(
+ "Site App",
+ fields=["app", "count(*) as count"],
+ group_by="app",
+ order_by=None,
+ )
+ except: # noqa E722
+ total_installs = frappe.db.get_all(
+ "Site App",
+ fields=["app", {"COUNT": "*", "as": "count"}],
+ group_by="app",
+ order_by=None,
+ )
+ return {installs["app"]: installs["count"] for installs in total_installs}
diff --git a/press/press/doctype/marketplace_app/patches/change_field_from_first_site_creation_to_site_creation.py b/press/press/doctype/marketplace_app/patches/change_field_from_first_site_creation_to_site_creation.py
new file mode 100644
index 00000000000..9098f37c522
--- /dev/null
+++ b/press/press/doctype/marketplace_app/patches/change_field_from_first_site_creation_to_site_creation.py
@@ -0,0 +1,13 @@
+# Copyright (c) 2024, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+
+import frappe
+
+
+def execute():
+ try:
+ frappe.db.sql(
+ "UPDATE `tabMarketplace App` SET show_for_site_creation = show_for_first_site_creation"
+ )
+ except frappe.db.OperationalError:
+ pass
diff --git a/press/press/doctype/marketplace_app/patches/convert_images_to_webp.py b/press/press/doctype/marketplace_app/patches/convert_images_to_webp.py
new file mode 100644
index 00000000000..9e6758bb9b0
--- /dev/null
+++ b/press/press/doctype/marketplace_app/patches/convert_images_to_webp.py
@@ -0,0 +1,61 @@
+# Copyright (c) 2024, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+
+from io import BytesIO
+
+import frappe
+import requests
+from PIL import Image
+from tqdm import tqdm
+
+
+def execute():
+ IMAGE_FORMATS_TO_CONVERT = ["png", "jpeg", "jpg"]
+
+ def convert_to_webp(screenshot):
+ if screenshot.startswith("files") or screenshot.startswith("/files"):
+ image_content = frappe.get_doc("File", {"file_url": screenshot}).get_content()
+ image = Image.open(BytesIO(image_content))
+ else:
+ # load from url
+ url = screenshot
+ response = requests.get(url, stream=True)
+ image = Image.open(response.raw)
+
+ image = image.convert("RGB")
+ filename = f"{screenshot.split('/')[-1].split('.')[0]}.webp"
+
+ # convert to bytes
+ image_bytes = BytesIO()
+ image.save(image_bytes, "webp")
+ image_bytes = image_bytes.getvalue()
+ _file = frappe.get_doc(
+ {
+ "doctype": "File",
+ "attached_to_field": "image",
+ "folder": "Home/Attachments",
+ "file_name": filename,
+ "is_private": 0,
+ "content": image_bytes,
+ }
+ )
+ _file.save(ignore_permissions=True)
+ return _file.file_url
+
+ marketplace_app_names = frappe.get_all("Marketplace App", pluck="name")
+
+ for app_name in tqdm(marketplace_app_names):
+ app = frappe.get_doc("Marketplace App", app_name)
+
+ if app.image and app.image.split(".")[-1] in IMAGE_FORMATS_TO_CONVERT:
+ app.image = convert_to_webp(app.image)
+
+ screenshots = app.screenshots
+
+ for screenshot in screenshots:
+ if screenshot.image.split(".")[-1] not in IMAGE_FORMATS_TO_CONVERT:
+ continue
+
+ screenshot.image = convert_to_webp(screenshot.image)
+
+ app.save()
diff --git a/press/press/doctype/marketplace_app/templates/marketplace_app.html b/press/press/doctype/marketplace_app/templates/marketplace_app.html
index 81f27eed932..1fead7300b5 100644
--- a/press/press/doctype/marketplace_app/templates/marketplace_app.html
+++ b/press/press/doctype/marketplace_app/templates/marketplace_app.html
@@ -1,21 +1,59 @@
{% extends "templates/marketplace/base.html" %}
-{%- from "templates/marketplace/macros.html" import button, link, breadcrumbs, badge_blue, badge_green,
-five_star_rating -%}
+{%- from "templates/marketplace/macros.html" import button, link, breadcrumbs, badge_gray, badge_green,
+five_star_rating, approved_badge -%}
{%- block title -%}
-{{ app.title }} - Frappe Cloud Marketplace
+{{ app.title }} - Frappe Cloud Marketplace
{%- endblock -%}
-{%- block body -%}
+{%- block content -%}
-
-
- {{ breadcrumbs([ { 'label': 'Apps', 'url': '/marketplace' }, { 'label':
- app.title, 'url': '' } ]) }}
-
+
+
+
+ {{ breadcrumbs([ { 'label': 'Apps', 'url': '/marketplace' }, { 'label':
+ app.title, 'url': '' } ]) }}
+
+
+
+ {{ app_image(app, 'hidden md:block') }}
+
{{ app_image(app) }}
+
+
+ {{ app.title }}
+ {%- if app.frappe_approved -%}
+
+ {{ approved_badge() }}
+
+ {%- endif -%}
+
+
{{ app.description }}
+
+
+ {%- for category in
+ app.categories -%}
+
+ {{ badge_gray(category.category) }}
+
+ {%- endfor -%}
+
+
+
-
- {{ sidebar() }} {{ main() }}
+
+ {{ button('Install Now', '/dashboard/install-app/' + app.name, 'primary') }}
+
+
+ {{ no_of_installs | number_k_format }} {{ 'install' if no_of_installs
+ == 1 else 'installs' }}
+
+
+
+
+
+
+
+ {{ sidebar() }} {{ main() }}
@@ -23,22 +61,11 @@
{% macro sidebar() %}
-
- {{ app_image(app, 'hidden md:block') }}
-
-{%- if app.frappe_approved -%}
-
- {%- endif -%}
-
+
{%- if publisher_profile -%}
-
Publisher
-
Built by
+
{{ publisher_profile.display_name }}
@@ -46,23 +73,15 @@
Publisher
{%- endif -%}
-
Categories
-
-
- {%- if app.subscription_type == 'Freemium'-%} {{ badge_green('Freemium')
- }} {%- elif app.subscription_type == 'Paid'-%} {{ badge_green('Paid') }}
- {%- else -%} {{ badge_blue('Free') }} {%- endif -%} {%- for category in
- app.categories -%}
-
- {{ badge_blue(category.category) }}
-
- {%- endfor -%}
-
+
Pricing
+ {%- if app.subscription_type == 'Freemium'-%} {{ badge_green('Freemium')
+ }} {%- elif app.subscription_type == 'Paid'-%} {{ badge_green('Paid') }}
+ {%- else -%} {{ badge_gray('Free') }} {%- endif -%}
-
Learn more & Support
-
+ Resources
+
{%- for label, icon, href in [ ('Visit Website', 'external-link',
app.website), ('Support', 'life-buoy', app.support), ('Documentation',
'file-text', app.documentation), ('Privacy Policy', 'lock',
@@ -71,7 +90,7 @@ Learn more & Support
- {{ label }}
+ {{ label }}
{%- endif -%} {%- endfor -%}
@@ -79,8 +98,8 @@ Learn more & Support
-
Supported Versions
-
+ Supported versions
+
{%- for supported_version in supported_versions -%}
{{
@@ -97,50 +116,35 @@ Supported Versions
{% macro main() %}
-
-
-
{{ app_image(app) }}
-
- {{ app.title }}
-
-
-
{{ app.description }}
-
- {{ button('Install Now', '/dashboard/install-app/' + app.name, 'primary') }}
-
-
- {{ no_of_installs | number_k_format }} {{ 'install' if no_of_installs
- == 1 else 'installs' }}
-
-
-
-
- {%- if plans -%}
-
Pricing
- {{ app_plans_list(plans) }}
- {%- endif -%}
-
+
+
{%- if app.screenshots -%}
-
+
{%- for image in app.screenshots -%}
{%- endfor -%}
{%- endif -%}
-
+
{{ frappe.utils.md_to_html(app.long_description) }}
-
+
User Reviews
- {{ button('Write a review', link='/dashboard/user-review/' + app.name + '?title=' + app.title) }}
+ {{ button('Write a review', link='/dashboard/user-review/' + app.name) }}
{%- if (user_reviews | length) > 0 -%}
@@ -188,8 +192,8 @@
{{ review.user_name }} • {{ frappe.utils.pretty_date(review.creation) }}
- • {{ link('Reply', url='/dashboard/developer-reply/' + app.name + '/' + review.name + '?title=' + app.title) }}
-
+ • {{ link('Reply', url='/dashboard/developer-reply/' + app.name + '/' + review.name) }}
+
{% for reply in review.developer_reply %}
@@ -224,7 +228,7 @@
{% macro app_image(app, class='') %}
-
+
{%- if app.image -%}
{%- else -%}
diff --git a/press/press/doctype/marketplace_app/test_marketplace_app.py b/press/press/doctype/marketplace_app/test_marketplace_app.py
index 503f33c009d..508a3b97955 100644
--- a/press/press/doctype/marketplace_app/test_marketplace_app.py
+++ b/press/press/doctype/marketplace_app/test_marketplace_app.py
@@ -1,20 +1,18 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
+from __future__ import annotations
+
import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-from typing import Optional
from press.press.doctype.marketplace_app.utils import (
- number_k_format,
get_rating_percentage_distribution,
+ number_k_format,
)
-def create_test_marketplace_app(
- app: str, team: Optional[str] = None, sources: Optional[list[dict]] = None
-):
+def create_test_marketplace_app(app: str, team: str | None = None, sources: list[dict] | None = None):
marketplace_app = frappe.get_doc(
{
"doctype": "Marketplace App",
@@ -28,7 +26,7 @@ def create_test_marketplace_app(
return marketplace_app
-class TestMarketplaceApp(unittest.TestCase):
+class TestMarketplaceApp(FrappeTestCase):
def test_number_format_util(self):
test_cases_map = {
0: "0",
diff --git a/press/press/doctype/marketplace_app/utils.py b/press/press/doctype/marketplace_app/utils.py
index 375eef4fc39..2bd12bac7b5 100644
--- a/press/press/doctype/marketplace_app/utils.py
+++ b/press/press/doctype/marketplace_app/utils.py
@@ -1,8 +1,8 @@
# Copyright (c) 2021, Frappe Technologies Pvt. Ltd. and Contributors
-import frappe
-
from typing import Dict, List
+import frappe
+
def number_k_format(number: int):
"""Returns a '101.6k' like string representation"""
diff --git a/press/press/doctype/marketplace_app_categories/marketplace_app_categories.py b/press/press/doctype/marketplace_app_categories/marketplace_app_categories.py
index bcdbd3a01d3..da47043ea14 100644
--- a/press/press/doctype/marketplace_app_categories/marketplace_app_categories.py
+++ b/press/press/doctype/marketplace_app_categories/marketplace_app_categories.py
@@ -6,4 +6,18 @@
class MarketplaceAppCategories(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ category: DF.Link
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/marketplace_app_categories/test_marketplace_app_categories.py b/press/press/doctype/marketplace_app_categories/test_marketplace_app_categories.py
index 002466c7578..05d00dd5153 100644
--- a/press/press/doctype/marketplace_app_categories/test_marketplace_app_categories.py
+++ b/press/press/doctype/marketplace_app_categories/test_marketplace_app_categories.py
@@ -2,8 +2,8 @@
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestMarketplaceAppCategories(unittest.TestCase):
+class TestMarketplaceAppCategories(FrappeTestCase):
pass
diff --git a/press/press/doctype/marketplace_app_category/marketplace_app_category.json b/press/press/doctype/marketplace_app_category/marketplace_app_category.json
index bec522a23b3..2609f844a04 100644
--- a/press/press/doctype/marketplace_app_category/marketplace_app_category.json
+++ b/press/press/doctype/marketplace_app_category/marketplace_app_category.json
@@ -24,7 +24,7 @@
],
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2023-03-14 16:39:22.547107",
+ "modified": "2025-08-20 20:08:20.571148",
"modified_by": "Administrator",
"module": "Press",
"name": "Marketplace App Category",
@@ -42,11 +42,24 @@
"role": "System Manager",
"share": 1,
"write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Marketplace Manager",
+ "share": 1,
+ "write": 1
}
],
"quick_entry": 1,
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/marketplace_app_category/marketplace_app_category.py b/press/press/doctype/marketplace_app_category/marketplace_app_category.py
index 33ed98c104b..a52316333d0 100644
--- a/press/press/doctype/marketplace_app_category/marketplace_app_category.py
+++ b/press/press/doctype/marketplace_app_category/marketplace_app_category.py
@@ -8,5 +8,17 @@
class MarketplaceAppCategory(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ description: DF.SmallText | None
+ slug: DF.Data | None
+ # end: auto-generated types
+
def before_insert(self):
self.slug = cleanup_page_name(self.name)
diff --git a/press/press/doctype/marketplace_app_category/test_marketplace_app_category.py b/press/press/doctype/marketplace_app_category/test_marketplace_app_category.py
index be7c0795e43..d9019c2294f 100644
--- a/press/press/doctype/marketplace_app_category/test_marketplace_app_category.py
+++ b/press/press/doctype/marketplace_app_category/test_marketplace_app_category.py
@@ -1,11 +1,10 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestMarketplaceAppCategory(unittest.TestCase):
+class TestMarketplaceAppCategory(FrappeTestCase):
pass
diff --git a/press/press/doctype/marketplace_app_screenshot/marketplace_app_screenshot.py b/press/press/doctype/marketplace_app_screenshot/marketplace_app_screenshot.py
index 539f2512768..b4784be28c4 100644
--- a/press/press/doctype/marketplace_app_screenshot/marketplace_app_screenshot.py
+++ b/press/press/doctype/marketplace_app_screenshot/marketplace_app_screenshot.py
@@ -6,4 +6,19 @@
class MarketplaceAppScreenshot(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ caption: DF.Data | None
+ image: DF.Attach | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/marketplace_app_version/marketplace_app_version.py b/press/press/doctype/marketplace_app_version/marketplace_app_version.py
index 266edab335b..9cabeeaccd6 100644
--- a/press/press/doctype/marketplace_app_version/marketplace_app_version.py
+++ b/press/press/doctype/marketplace_app_version/marketplace_app_version.py
@@ -6,4 +6,19 @@
class MarketplaceAppVersion(Document):
- pass
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ source: DF.Link
+ version: DF.Link
+ # end: auto-generated types
+
+ dashboard_fields = ["name", "version", "source"]
diff --git a/press/press/doctype/marketplace_localisation_app/__init__.py b/press/press/doctype/marketplace_localisation_app/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/marketplace_localisation_app/marketplace_localisation_app.json b/press/press/doctype/marketplace_localisation_app/marketplace_localisation_app.json
new file mode 100644
index 00000000000..ad8d11af507
--- /dev/null
+++ b/press/press/doctype/marketplace_localisation_app/marketplace_localisation_app.json
@@ -0,0 +1,39 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-09-11 22:58:01.233719",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "marketplace_app",
+ "country"
+ ],
+ "fields": [
+ {
+ "fieldname": "marketplace_app",
+ "fieldtype": "Link",
+ "label": "Marketplace App",
+ "options": "Marketplace App"
+ },
+ {
+ "fieldname": "country",
+ "fieldtype": "Link",
+ "label": "Country",
+ "options": "Country"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-08-20 20:08:02.368682",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Marketplace Localisation App",
+ "owner": "Administrator",
+ "permissions": [],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/marketplace_localisation_app/marketplace_localisation_app.py b/press/press/doctype/marketplace_localisation_app/marketplace_localisation_app.py
new file mode 100644
index 00000000000..3e1cd9fc38d
--- /dev/null
+++ b/press/press/doctype/marketplace_localisation_app/marketplace_localisation_app.py
@@ -0,0 +1,24 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class MarketplaceLocalisationApp(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ country: DF.Link | None
+ marketplace_app: DF.Link | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/module_setup_guide/module_setup_guide.py b/press/press/doctype/module_setup_guide/module_setup_guide.py
index ccdf24c19f6..17c76e237ad 100644
--- a/press/press/doctype/module_setup_guide/module_setup_guide.py
+++ b/press/press/doctype/module_setup_guide/module_setup_guide.py
@@ -8,4 +8,30 @@
class ModuleSetupGuide(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ enabled: DF.Check
+ industry: DF.Literal[
+ "",
+ "Manufacturing",
+ "Distribution",
+ "Retail",
+ "Services",
+ "Education",
+ "Healthcare",
+ "Non Profit",
+ "Other",
+ ]
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ setup_guide: DF.Attach | None
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/module_setup_guide/test_module_setup_guide.py b/press/press/doctype/module_setup_guide/test_module_setup_guide.py
index ff9b241ca8d..14bc5720aba 100644
--- a/press/press/doctype/module_setup_guide/test_module_setup_guide.py
+++ b/press/press/doctype/module_setup_guide/test_module_setup_guide.py
@@ -1,11 +1,10 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestModuleSetupGuide(unittest.TestCase):
+class TestModuleSetupGuide(FrappeTestCase):
pass
diff --git a/press/press/doctype/monitor_server/monitor_server.js b/press/press/doctype/monitor_server/monitor_server.js
index a403c7efb4b..011288075d4 100644
--- a/press/press/doctype/monitor_server/monitor_server.js
+++ b/press/press/doctype/monitor_server/monitor_server.js
@@ -10,6 +10,7 @@ frappe.ui.form.on('Monitor Server', {
[__('Update Agent'), 'update_agent', true, frm.doc.is_server_setup],
[__('Prepare Server'), 'prepare_server', true, !frm.doc.is_server_setup],
[__('Setup Server'), 'setup_server', true, !frm.doc.is_server_setup],
+ [__('Archive'), 'archive', true, frm.doc.provider === 'AWS EC2'],
[
__('Reconfigure Monitor Server'),
'reconfigure_monitor_server',
@@ -29,6 +30,7 @@ frappe.ui.form.on('Monitor Server', {
false,
frm.doc.is_server_setup,
],
+ [__('Update TLS Certificate'), 'update_tls_certificate', true],
].forEach(([label, method, confirm, condition]) => {
if (typeof condition === 'undefined' || condition) {
frm.add_custom_button(
diff --git a/press/press/doctype/monitor_server/monitor_server.json b/press/press/doctype/monitor_server/monitor_server.json
index 054b098d4ed..4e5c0945860 100644
--- a/press/press/doctype/monitor_server/monitor_server.json
+++ b/press/press/doctype/monitor_server/monitor_server.json
@@ -8,7 +8,10 @@
"status",
"hostname",
"domain",
+ "tls_certificate_renewal_failed",
+ "plan",
"column_break_4",
+ "cluster",
"provider",
"virtual_machine",
"is_server_setup",
@@ -21,14 +24,24 @@
"agent_section",
"agent_password",
"grafana_section",
+ "grafana_username",
+ "prometheus_username",
+ "column_break_ilpd",
"grafana_password",
+ "node_exporter_dashboard_path",
"ssh_section",
+ "ssh_user",
+ "ssh_port",
"frappe_user_password",
"frappe_public_key",
"column_break_20",
"root_public_key",
"monitoring_section",
- "monitoring_password"
+ "monitoring_password",
+ "webhook_token",
+ "column_break_nzet",
+ "prometheus_data_directory",
+ "only_monitor_uptime_metrics"
],
"fields": [
{
@@ -66,7 +79,7 @@
"fieldname": "provider",
"fieldtype": "Select",
"label": "Provider",
- "options": "Generic\nScaleway\nAWS EC2",
+ "options": "Generic\nScaleway\nAWS EC2\nOCI",
"set_only_once": 1
},
{
@@ -87,7 +100,6 @@
"fieldtype": "Data",
"in_list_view": 1,
"label": "IP",
- "reqd": 1,
"set_only_once": 1
},
{
@@ -186,15 +198,88 @@
"label": "Virtual Machine",
"mandatory_depends_on": "eval:doc.provider === \"AWS EC2\"",
"options": "Virtual Machine"
+ },
+ {
+ "fieldname": "cluster",
+ "fieldtype": "Link",
+ "label": "Cluster",
+ "options": "Cluster"
+ },
+ {
+ "fieldname": "column_break_nzet",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "/home/frappe/prometheus/data",
+ "fieldname": "prometheus_data_directory",
+ "fieldtype": "Data",
+ "label": "Prometheus Data Directory"
+ },
+ {
+ "fieldname": "grafana_username",
+ "fieldtype": "Data",
+ "label": "Grafana Username"
+ },
+ {
+ "fieldname": "column_break_ilpd",
+ "fieldtype": "Column Break"
+ },
+ {
+ "description": "Begin with / but don't end with /",
+ "fieldname": "node_exporter_dashboard_path",
+ "fieldtype": "Data",
+ "label": "Node Exporter Dashboard Path"
+ },
+ {
+ "description": "for /prometheus",
+ "fieldname": "prometheus_username",
+ "fieldtype": "Data",
+ "label": "Prometheus Username"
+ },
+ {
+ "fieldname": "ssh_user",
+ "fieldtype": "Data",
+ "label": "SSH User"
+ },
+ {
+ "default": "22",
+ "fieldname": "ssh_port",
+ "fieldtype": "Int",
+ "label": "SSH Port"
+ },
+ {
+ "fieldname": "webhook_token",
+ "fieldtype": "Data",
+ "label": "Webhook Token"
+ },
+ {
+ "default": "0",
+ "fieldname": "tls_certificate_renewal_failed",
+ "fieldtype": "Check",
+ "label": "TLS Certificate Renewal Failed",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "only_monitor_uptime_metrics",
+ "fieldtype": "Check",
+ "label": "Only Monitor Uptime Metrics"
+ },
+ {
+ "fieldname": "plan",
+ "fieldtype": "Link",
+ "label": "Plan",
+ "options": "Server Plan"
}
],
+ "grid_page_length": 50,
"links": [
{
"link_doctype": "Ansible Play",
"link_fieldname": "server"
}
],
- "modified": "2022-06-16 22:28:16.543575",
+ "modified": "2025-11-22 15:49:17.754965",
"modified_by": "Administrator",
"module": "Press",
"name": "Monitor Server",
@@ -213,8 +298,9 @@
"write": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/monitor_server/monitor_server.py b/press/press/doctype/monitor_server/monitor_server.py
index cbd38b27155..6a87498cdcf 100644
--- a/press/press/doctype/monitor_server/monitor_server.py
+++ b/press/press/doctype/monitor_server/monitor_server.py
@@ -1,22 +1,80 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
-
-import frappe
+import contextlib
import json
+from typing import TypedDict
+import frappe
+import requests
+from frappe.utils.caching import redis_cache
+from frappe.utils.data import cint
+from requests.auth import HTTPBasicAuth
from press.press.doctype.server.server import BaseServer
from press.runner import Ansible
from press.utils import log_error
+class SitesDownAlertLabels(TypedDict):
+ alertname: str
+ bench: str
+ cluster: str
+ group: str
+ instance: str
+ job: str
+ server: str
+ severity: str
+
+
+class SitesDownAlert(TypedDict):
+ labels: SitesDownAlertLabels
+
+
class MonitorServer(BaseServer):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ agent_password: DF.Password | None
+ cluster: DF.Link | None
+ domain: DF.Link | None
+ frappe_public_key: DF.Code | None
+ frappe_user_password: DF.Password | None
+ grafana_password: DF.Password | None
+ grafana_username: DF.Data | None
+ hostname: DF.Data
+ ip: DF.Data | None
+ is_server_setup: DF.Check
+ monitoring_password: DF.Password | None
+ node_exporter_dashboard_path: DF.Data | None
+ only_monitor_uptime_metrics: DF.Check
+ plan: DF.Link | None
+ private_ip: DF.Data
+ private_mac_address: DF.Data | None
+ private_vlan_id: DF.Data | None
+ prometheus_data_directory: DF.Data | None
+ prometheus_username: DF.Data | None
+ provider: DF.Literal["Generic", "Scaleway", "AWS EC2", "OCI"]
+ root_public_key: DF.Code | None
+ ssh_port: DF.Int
+ ssh_user: DF.Data | None
+ status: DF.Literal["Pending", "Installing", "Active", "Broken", "Archived"]
+ tls_certificate_renewal_failed: DF.Check
+ virtual_machine: DF.Link | None
+ webhook_token: DF.Data | None
+ # end: auto-generated types
+
def validate(self):
self.validate_agent_password()
self.validate_grafana_password()
self.validate_monitoring_password()
+ self.validate_webhook_token()
def validate_monitoring_password(self):
if not self.monitoring_password:
@@ -26,6 +84,10 @@ def validate_grafana_password(self):
if not self.grafana_password:
self.grafana_password = frappe.generate_hash(length=32)
+ def validate_webhook_token(self):
+ if not self.webhook_token:
+ self.webhook_token = frappe.generate_hash(length=32)
+
def _setup_server(self):
agent_password = self.get_password("agent_password")
agent_repository_url = self.get_agent_repository_url()
@@ -72,6 +134,8 @@ def _setup_server(self):
ansible = Ansible(
playbook="monitor.yml",
server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
variables={
"server": self.name,
"workers": 1,
@@ -81,8 +145,11 @@ def _setup_server(self):
"monitor": True,
"monitoring_password": monitoring_password,
"press_monitoring_password": press_monitoring_password,
- "press_server": frappe.local.site,
+ "press_app_server": frappe.local.site,
+ "press_db_server": f"db.{frappe.local.site}",
+ "press_db_replica_server": f"db2.{frappe.local.site}" if frappe.conf.replica_host else "",
"press_url": press_url,
+ "prometheus_data_directory": self.prometheus_data_directory,
"monitor_token": monitor_token,
"registries_json": json.dumps(registries),
"log_servers_json": json.dumps(log_servers),
@@ -108,9 +175,7 @@ def _setup_server(self):
@frappe.whitelist()
def reconfigure_monitor_server(self):
- frappe.enqueue_doc(
- self.doctype, self.name, "_reconfigure_monitor_server", queue="long", timeout=1200
- )
+ frappe.enqueue_doc(self.doctype, self.name, "_reconfigure_monitor_server", queue="long", timeout=1200)
def _reconfigure_monitor_server(self):
settings = frappe.get_single("Press Settings")
@@ -151,11 +216,15 @@ def _reconfigure_monitor_server(self):
ansible = Ansible(
playbook="reconfigure_monitoring.yml",
server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
variables={
"server": self.name,
"monitoring_password": monitoring_password,
"press_monitoring_password": press_monitoring_password,
- "press_server": frappe.local.site,
+ "press_app_server": frappe.local.site,
+ "press_db_server": f"db.{frappe.local.site}",
+ "press_db_replica_server": f"db2.{frappe.local.site}" if frappe.conf.replica_host else "",
"registries_json": json.dumps(registries),
"log_servers_json": json.dumps(log_servers),
"clusters_json": json.dumps(clusters),
@@ -169,3 +238,89 @@ def _reconfigure_monitor_server(self):
@frappe.whitelist()
def show_grafana_password(self):
return self.get_password("grafana_password")
+
+ @property
+ def alerts(self):
+ print(
+ f"https://{self.name}/prometheus/api/v1/rules",
+ )
+ ret = requests.get(
+ f"https://{self.name}/prometheus/api/v1/rules",
+ auth=HTTPBasicAuth(self.prometheus_username, self.get_password("grafana_password")),
+ params={"type": "alert"},
+ )
+
+ ret.raise_for_status()
+ data = ret.json()
+ if data["status"] != "success":
+ frappe.throw("Error fetching sites down")
+ return data["data"]["groups"][0]["rules"]
+
+ @property
+ def sites_down_alerts(self) -> list[SitesDownAlert]:
+ for alert in self.alerts:
+ if not (alert["name"] == "Sites Down" and alert["state"] == "firing"):
+ continue
+ return alert["alerts"]
+ return []
+
+ @property
+ def sites_down(self):
+ sites = []
+ for alert in self.sites_down_alerts:
+ sites.append(alert["labels"]["instance"])
+ return sites
+
+ def get_sites_down_for_server(self, server: str) -> list[str]:
+ sites = []
+ for alert in self.sites_down_alerts:
+ if alert["labels"]["server"] == server:
+ sites.append(alert["labels"]["instance"])
+ return sites
+
+ @property
+ def benches_down(self):
+ benches = []
+ for alert in self.sites_down_alerts:
+ benches.append(alert["labels"]["bench"])
+ return set(benches)
+
+ def get_benches_down_for_server(self, server: str) -> set[str]:
+ benches = []
+ for alert in self.sites_down_alerts:
+ if alert["labels"]["server"] == server:
+ benches.append(alert["labels"]["bench"])
+ return set(benches)
+
+
+@redis_cache(ttl=3600)
+def get_monitor_server_ips():
+ servers = frappe.get_all(
+ "Monitor Server", filters={"status": ["!=", "Archived"]}, fields=["ip", "private_ip"]
+ )
+ ips = []
+ for server in servers:
+ if server.ip:
+ ips.append(server.ip)
+ if server.private_ip:
+ ips.append(server.private_ip)
+ return ips
+
+
+def check_monitoring_servers_rate_limit_key():
+ from press.api.monitoring import MONITORING_ENDPOINT_RATE_LIMIT_WINDOW_SECONDS
+ from press.telegram_utils import Telegram
+
+ ips = get_monitor_server_ips()
+
+ for ip in ips:
+ key = f"{frappe.conf.db_name}|rl:press.api.monitoring.targets:{ip}:{MONITORING_ENDPOINT_RATE_LIMIT_WINDOW_SECONDS}"
+ val = frappe.cache.get(key)
+ if not val:
+ continue
+ current_val = cint(val.decode("utf-8"))
+ if current_val > 100:
+ frappe.cache.delete(key)
+ with contextlib.suppress(Exception):
+ msg = f"Rate limit key for monitoring server {ip} had value {current_val} which is too high. Deleted the key.\n@adityahase @balamurali27 @tanmoysrt"
+ Telegram("Errors").send(msg)
diff --git a/press/press/doctype/monitor_server/test_monitor_server.py b/press/press/doctype/monitor_server/test_monitor_server.py
index 2479d07afb9..016a5ec5b57 100644
--- a/press/press/doctype/monitor_server/test_monitor_server.py
+++ b/press/press/doctype/monitor_server/test_monitor_server.py
@@ -1,11 +1,10 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2021, Frappe and Contributors
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestMonitorServer(unittest.TestCase):
+class TestMonitorServer(FrappeTestCase):
pass
diff --git a/press/press/doctype/mpesa_payment_record/__init__.py b/press/press/doctype/mpesa_payment_record/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/mpesa_payment_record/mpesa_payment_record.js b/press/press/doctype/mpesa_payment_record/mpesa_payment_record.js
new file mode 100644
index 00000000000..24ed94b9d1b
--- /dev/null
+++ b/press/press/doctype/mpesa_payment_record/mpesa_payment_record.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Mpesa Payment Record", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/mpesa_payment_record/mpesa_payment_record.json b/press/press/doctype/mpesa_payment_record/mpesa_payment_record.json
new file mode 100644
index 00000000000..651d0effa4f
--- /dev/null
+++ b/press/press/doctype/mpesa_payment_record/mpesa_payment_record.json
@@ -0,0 +1,218 @@
+{
+ "actions": [],
+ "allow_copy": 1,
+ "allow_import": 1,
+ "autoname": "MP.-.YY.-.MM.-.####",
+ "creation": "2025-01-18 10:49:30.186896",
+ "default_view": "List",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "team",
+ "merchant_request_id",
+ "transaction_id",
+ "transaction_type",
+ "transaction_time",
+ "grand_total",
+ "amount",
+ "amount_usd",
+ "mpesa_receipt_number",
+ "exchange_rate",
+ "column_break_14",
+ "phone_number",
+ "payment_partner",
+ "invoice_number",
+ "posting_date",
+ "posting_time",
+ "default_currency",
+ "amended_from",
+ "balance_transaction",
+ "local_invoice"
+ ],
+ "fields": [
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "label": "Team",
+ "options": "Team"
+ },
+ {
+ "fieldname": "merchant_request_id",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Request ID",
+ "no_copy": 1,
+ "read_only": 1
+ },
+ {
+ "fieldname": "transaction_type",
+ "fieldtype": "Select",
+ "label": "Transaction Type",
+ "options": "\nMpesa Express\nMpesa C2B"
+ },
+ {
+ "fieldname": "grand_total",
+ "fieldtype": "Currency",
+ "label": "Grand Total (Ksh)"
+ },
+ {
+ "fieldname": "amount_usd",
+ "fieldtype": "Float",
+ "label": "Amount (USD)"
+ },
+ {
+ "fieldname": "exchange_rate",
+ "fieldtype": "Float",
+ "label": "Exchange Rate",
+ "precision": "9"
+ },
+ {
+ "fieldname": "column_break_14",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "payment_partner",
+ "fieldtype": "Link",
+ "label": "Payment Partner",
+ "options": "Team"
+ },
+ {
+ "fieldname": "invoice_number",
+ "fieldtype": "Data",
+ "label": "Invoice Number",
+ "no_copy": 1,
+ "read_only": 1
+ },
+ {
+ "default": "Today",
+ "fieldname": "posting_date",
+ "fieldtype": "Date",
+ "in_list_view": 1,
+ "in_preview": 1,
+ "in_standard_filter": 1,
+ "label": "Posting Date",
+ "no_copy": 1,
+ "read_only": 1
+ },
+ {
+ "default": "Now",
+ "fieldname": "posting_time",
+ "fieldtype": "Time",
+ "label": "Posting Time",
+ "no_copy": 1,
+ "read_only": 1
+ },
+ {
+ "default": "KES",
+ "fetch_from": "company.default_currency",
+ "fieldname": "default_currency",
+ "fieldtype": "Data",
+ "label": "Default Currency",
+ "read_only": 1
+ },
+ {
+ "fieldname": "amended_from",
+ "fieldtype": "Link",
+ "label": "Amended From",
+ "no_copy": 1,
+ "options": "Mpesa Payment Record",
+ "print_hide": 1,
+ "read_only": 1
+ },
+ {
+ "fieldname": "balance_transaction",
+ "fieldtype": "Link",
+ "label": "Balance Transaction",
+ "options": "Balance Transaction"
+ },
+ {
+ "fieldname": "local_invoice",
+ "fieldtype": "Small Text",
+ "label": "Local Invoice"
+ },
+ {
+ "fieldname": "transaction_id",
+ "fieldtype": "Data",
+ "label": "Transaction ID",
+ "no_copy": 1,
+ "read_only": 1
+ },
+ {
+ "fieldname": "transaction_time",
+ "fieldtype": "Datetime",
+ "label": "Transaction Time",
+ "no_copy": 1,
+ "read_only": 1
+ },
+ {
+ "fieldname": "amount",
+ "fieldtype": "Float",
+ "in_list_view": 1,
+ "in_preview": 1,
+ "in_standard_filter": 1,
+ "label": "Amount (Ksh)",
+ "no_copy": 1,
+ "read_only": 1
+ },
+ {
+ "fieldname": "mpesa_receipt_number",
+ "fieldtype": "Data",
+ "label": "Mpesa Receipt Number",
+ "no_copy": 1,
+ "read_only": 1
+ },
+ {
+ "fieldname": "phone_number",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "in_preview": 1,
+ "in_standard_filter": 1,
+ "label": "Phone Number",
+ "no_copy": 1,
+ "read_only": 1
+ }
+ ],
+ "in_create": 1,
+ "index_web_pages_for_search": 1,
+ "is_submittable": 1,
+ "links": [],
+ "modified": "2025-02-02 17:52:49.719724",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Mpesa Payment Record",
+ "naming_rule": "Expression (old style)",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "show_preview_popup": 1,
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": [],
+ "title_field": "transaction_id",
+ "track_changes": 1
+}
\ No newline at end of file
diff --git a/press/press/doctype/mpesa_payment_record/mpesa_payment_record.py b/press/press/doctype/mpesa_payment_record/mpesa_payment_record.py
new file mode 100644
index 00000000000..f84b0e184bc
--- /dev/null
+++ b/press/press/doctype/mpesa_payment_record/mpesa_payment_record.py
@@ -0,0 +1,63 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+import frappe
+from frappe.model.document import Document
+
+
+class MpesaPaymentRecord(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ amended_from: DF.Link | None
+ amount: DF.Float
+ amount_usd: DF.Float
+ balance_transaction: DF.Link | None
+ default_currency: DF.Data | None
+ exchange_rate: DF.Float
+ grand_total: DF.Currency
+ invoice_number: DF.Data | None
+ local_invoice: DF.SmallText | None
+ merchant_request_id: DF.Data | None
+ mpesa_receipt_number: DF.Data | None
+ payment_partner: DF.Link | None
+ phone_number: DF.Data | None
+ posting_date: DF.Date | None
+ posting_time: DF.Time | None
+ team: DF.Link | None
+ transaction_id: DF.Data | None
+ transaction_time: DF.Datetime | None
+ transaction_type: DF.Literal["", "Mpesa Express", "Mpesa C2B"]
+ # end: auto-generated types
+
+ dashboard_fields = (
+ "name",
+ "posting_date",
+ "amount",
+ "default_currency",
+ "local_invoice",
+ "amount_usd",
+ "payment_partner",
+ "exchange_rate",
+ "grand_total",
+ )
+
+ def before_insert(self):
+ self.validate_duplicate()
+
+ def validate_duplicate(self):
+ if frappe.db.exists(
+ "Mpesa Payment Record",
+ {"transaction_id": self.transaction_id, "docstatus": 1},
+ ):
+ frappe.throw(f"Mpesa Payment Record for transaction {self.transaction_id} already exists")
+
+
+def on_doctype_update():
+ frappe.db.add_unique("Mpesa Payment Record", ["transaction_id"], constraint_name="unique_payment_record")
diff --git a/press/press/doctype/mpesa_payment_record/patches/add_unique_constraint.py b/press/press/doctype/mpesa_payment_record/patches/add_unique_constraint.py
new file mode 100644
index 00000000000..594d617ddcb
--- /dev/null
+++ b/press/press/doctype/mpesa_payment_record/patches/add_unique_constraint.py
@@ -0,0 +1,6 @@
+import frappe
+
+
+def execute():
+ frappe.reload_doc("press", "doctype", "mpesa_payment_record")
+ frappe.get_doc("DocType", "Mpesa Payment Record").run_module_method("on_doctype_update")
diff --git a/press/press/doctype/mpesa_payment_record/test_mpesa_payment_record.py b/press/press/doctype/mpesa_payment_record/test_mpesa_payment_record.py
new file mode 100644
index 00000000000..96cbf76d190
--- /dev/null
+++ b/press/press/doctype/mpesa_payment_record/test_mpesa_payment_record.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class IntegrationTestMpesaPaymentRecord(FrappeTestCase):
+ """
+ Integration tests for MpesaPaymentRecord.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/mpesa_request_log/__init__.py b/press/press/doctype/mpesa_request_log/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/mpesa_request_log/mpesa_request_log.js b/press/press/doctype/mpesa_request_log/mpesa_request_log.js
new file mode 100644
index 00000000000..bff5d2f427e
--- /dev/null
+++ b/press/press/doctype/mpesa_request_log/mpesa_request_log.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Mpesa Request Log", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/mpesa_request_log/mpesa_request_log.json b/press/press/doctype/mpesa_request_log/mpesa_request_log.json
new file mode 100644
index 00000000000..546b558f668
--- /dev/null
+++ b/press/press/doctype/mpesa_request_log/mpesa_request_log.json
@@ -0,0 +1,138 @@
+{
+ "actions": [],
+ "creation": "2025-01-18 10:47:18.786442",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "request_id",
+ "integration_request_service",
+ "is_remote_request",
+ "column_break_5",
+ "request_description",
+ "status",
+ "section_break_8",
+ "url",
+ "request_headers",
+ "data",
+ "response_section",
+ "output",
+ "error"
+ ],
+ "fields": [
+ {
+ "fieldname": "request_id",
+ "fieldtype": "Data",
+ "label": "Request ID",
+ "read_only": 1
+ },
+ {
+ "fieldname": "integration_request_service",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Service",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "is_remote_request",
+ "fieldtype": "Check",
+ "label": "Is Remote Request?",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_5",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "request_description",
+ "fieldtype": "Data",
+ "label": "Request Description",
+ "read_only": 1
+ },
+ {
+ "default": "Queued",
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Status",
+ "options": "\nQueued\nAuthorized\nCompleted\nCancelled\nFailed",
+ "read_only": 1
+ },
+ {
+ "fieldname": "section_break_8",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "url",
+ "fieldtype": "Small Text",
+ "label": "URL",
+ "read_only": 1
+ },
+ {
+ "fieldname": "request_headers",
+ "fieldtype": "Code",
+ "label": "Request Headers",
+ "read_only": 1
+ },
+ {
+ "fieldname": "data",
+ "fieldtype": "Code",
+ "label": "Request Data",
+ "read_only": 1
+ },
+ {
+ "fieldname": "response_section",
+ "fieldtype": "Section Break",
+ "label": "Response"
+ },
+ {
+ "fieldname": "output",
+ "fieldtype": "Code",
+ "label": "Output",
+ "read_only": 1
+ },
+ {
+ "fieldname": "error",
+ "fieldtype": "Code",
+ "label": "Error",
+ "read_only": 1
+ }
+ ],
+ "in_create": 1,
+ "links": [],
+ "modified": "2025-02-02 18:48:36.387604",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Mpesa Request Log",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "select": 1,
+ "share": 1
+ },
+ {
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": [],
+ "title_field": "request_id",
+ "track_changes": 1
+}
\ No newline at end of file
diff --git a/press/press/doctype/mpesa_request_log/mpesa_request_log.py b/press/press/doctype/mpesa_request_log/mpesa_request_log.py
new file mode 100644
index 00000000000..ad9601559cb
--- /dev/null
+++ b/press/press/doctype/mpesa_request_log/mpesa_request_log.py
@@ -0,0 +1,45 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+import frappe
+from frappe.model.document import Document
+
+
+class MpesaRequestLog(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ data: DF.Code | None
+ error: DF.Code | None
+ integration_request_service: DF.Data | None
+ is_remote_request: DF.Check
+ output: DF.Code | None
+ request_description: DF.Data | None
+ request_headers: DF.Code | None
+ request_id: DF.Data | None
+ status: DF.Literal["", "Queued", "Authorized", "Completed", "Cancelled", "Failed"]
+ url: DF.SmallText | None
+ # end: auto-generated types
+
+ def before_insert(self):
+ self.validate_duplicate_request_id()
+
+ def validate_duplicate_request_id(self):
+ request_logs = frappe.get_all(
+ "Mpesa Request Log",
+ {
+ "name": ("!=", self.name),
+ "request_id": self.request_id,
+ "status": "Completed",
+ "integration_request_service": self.integration_request_service,
+ },
+ pluck="name",
+ )
+ if request_logs:
+ frappe.throw(f"Request log already processed with this request id: {self.request_id}")
diff --git a/press/press/doctype/mpesa_request_log/test_mpesa_request_log.py b/press/press/doctype/mpesa_request_log/test_mpesa_request_log.py
new file mode 100644
index 00000000000..5eda41b3c79
--- /dev/null
+++ b/press/press/doctype/mpesa_request_log/test_mpesa_request_log.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class IntegrationTestMpesaRequestLog(FrappeTestCase):
+ """
+ Integration tests for MpesaRequestLog.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/mpesa_setup/__init__.py b/press/press/doctype/mpesa_setup/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/mpesa_setup/account_balance.html b/press/press/doctype/mpesa_setup/account_balance.html
new file mode 100644
index 00000000000..6614cab89a5
--- /dev/null
+++ b/press/press/doctype/mpesa_setup/account_balance.html
@@ -0,0 +1,27 @@
+{% if not jQuery.isEmptyObject(data) %}
+
{{ __("Balance Details") }}
+
+
+
+ {{ __("Account Type") }}
+ {{ __("Current Balance") }}
+ {{ __("Available Balance") }}
+ {{ __("Reserved Balance") }}
+ {{ __("Uncleared Balance") }}
+
+
+
+ {% for(const [key, value] of Object.entries(data)) { %}
+
+ {%= key %}
+ {%= value["current_balance"] %}
+ {%= value["available_balance"] %}
+ {%= value["reserved_balance"] %}
+ {%= value["uncleared_balance"] %}
+
+ {% } %}
+
+
+{% else %}
+
Account Balance Information Not Available.
+{% endif %}
diff --git a/press/press/doctype/mpesa_setup/mpesa_connector.py b/press/press/doctype/mpesa_setup/mpesa_connector.py
new file mode 100644
index 00000000000..5b853abc10b
--- /dev/null
+++ b/press/press/doctype/mpesa_setup/mpesa_connector.py
@@ -0,0 +1,145 @@
+import base64
+import datetime
+
+import requests
+from requests.auth import HTTPBasicAuth
+
+
+class MpesaConnector:
+ def __init__(
+ self,
+ env="sandbox",
+ app_key=None,
+ app_secret=None,
+ sandbox_url="https://sandbox.safaricom.co.ke",
+ live_url="https://api.safaricom.co.ke",
+ ):
+ """Setup configuration for Mpesa connector and generate new access token."""
+ self.env = env
+ self.app_key = app_key
+ self.app_secret = app_secret
+ if env == "sandbox":
+ self.base_url = sandbox_url
+ else:
+ self.base_url = live_url
+ self.authenticate()
+
+ def authenticate(self):
+ """
+ This method is used to fetch the access token required by Mpesa.
+
+ Returns:
+ access_token (str): This token is to be used with the Bearer header for further API calls to Mpesa.
+ """
+ authenticate_uri = "/oauth/v1/generate?grant_type=client_credentials"
+ authenticate_url = f"{self.base_url}{authenticate_uri}"
+ r = requests.get(authenticate_url, auth=HTTPBasicAuth(self.app_key, self.app_secret))
+ self.authentication_token = r.json()["access_token"]
+ return r.json()["access_token"]
+
+ def get_balance(
+ self,
+ initiator=None,
+ security_credential=None,
+ party_a=None,
+ identifier_type=None,
+ remarks=None,
+ queue_timeout_url=None,
+ result_url=None,
+ ):
+ """
+ This method uses Mpesa's Account Balance API to to enquire the balance on a MPesa BuyGoods (Till Number).
+
+ Args:
+ initiator (str): Username used to authenticate the transaction.
+ security_credential (str): Generate from developer portal.
+ command_id (str): AccountBalance.
+ party_a (int): Till number being queried.
+ identifier_type (int): Type of organization receiving the transaction. (MSISDN/Till Number/Organization short code)
+ remarks (str): Comments that are sent along with the transaction(maximum 100 characters).
+ queue_timeout_url (str): The url that handles information of timed out transactions.
+ result_url (str): The url that receives results from MPesa api call.
+
+ Returns:
+ OriginatorConverstionID (str): The unique request ID for tracking a transaction.
+ ConversationID (str): The unique request ID returned by mpesa for each request made
+ ResponseDescription (str): Response Description message
+ """
+
+ payload = {
+ "Initiator": initiator,
+ "SecurityCredential": security_credential,
+ "CommandID": "AccountBalance",
+ "PartyA": party_a,
+ "IdentifierType": identifier_type,
+ "Remarks": remarks,
+ "QueueTimeOutURL": queue_timeout_url,
+ "ResultURL": result_url,
+ }
+ headers = {
+ "Authorization": f"Bearer {self.authentication_token}",
+ "Content-Type": "application/json",
+ }
+ saf_url = "{}{}".format(self.base_url, "/mpesa/accountbalance/v1/query")
+ r = requests.post(saf_url, headers=headers, json=payload)
+ return r.json()
+
+ def stk_push(
+ self,
+ business_shortcode=None,
+ passcode=None,
+ amount=None,
+ callback_url=None,
+ reference_code=None,
+ phone_number=None,
+ description=None,
+ ):
+ """
+ This method uses Mpesa's Express API to initiate online payment on behalf of a customer.
+
+ Args:
+ business_shortcode (int): The short code of the organization.
+ passcode (str): Get from developer portal
+ amount (int): The amount being transacted
+ callback_url (str): A CallBack URL is a valid secure URL that is used to receive notifications from MPesa API.
+ reference_code(str): Account Reference: This is an Alpha-Numeric parameter that is defined by your system as an Identifier of the transaction for CustomerPayBillOnline transaction type.
+ phone_number(int): The Mobile Number to receive the STK Pin Prompt.
+ description(str): This is any additional information/comment that can be sent along with the request from your system. MAX 13 characters
+
+ Success Response:
+ CustomerMessage(str): Messages that customers can understand.
+ CheckoutRequestID(str): This is a global unique identifier of the processed checkout transaction request.
+ ResponseDescription(str): Describes Success or failure
+ MerchantRequestID(str): This is a global unique Identifier for any submitted payment request.
+ ResponseCode(int): 0 means success all others are error codes. e.g.404.001.03
+
+ Error Response:
+ requestId(str): This is a unique requestID for the payment request
+ errorCode(str): This is a predefined code that indicates the reason for request failure.
+ errorMessage(str): This is a predefined code that indicates the reason for request failure.
+ """
+
+ time = str(datetime.datetime.now()).split(".")[0].replace("-", "").replace(" ", "").replace(":", "")
+ password = f"{business_shortcode!s}{passcode!s}{time}"
+ encoded = base64.b64encode(bytes(password, encoding="utf8"))
+ payload = {
+ "BusinessShortCode": business_shortcode,
+ "Password": encoded.decode("utf-8"),
+ "Timestamp": time,
+ "Amount": amount,
+ "PartyA": int(phone_number),
+ "PartyB": reference_code,
+ "PhoneNumber": int(phone_number),
+ "CallBackURL": callback_url,
+ "AccountReference": reference_code,
+ "TransactionDesc": description,
+ "TransactionType": "CustomerPayBillOnline",
+ }
+ headers = {
+ "Authorization": f"Bearer {self.authentication_token}",
+ "Content-Type": "application/json",
+ }
+
+ saf_url = "{}{}".format(self.base_url, "/mpesa/stkpush/v1/processrequest")
+ r = requests.post(saf_url, headers=headers, json=payload)
+ return r.json()
diff --git a/press/press/doctype/mpesa_setup/mpesa_setup.js b/press/press/doctype/mpesa_setup/mpesa_setup.js
new file mode 100644
index 00000000000..85098ec7b4d
--- /dev/null
+++ b/press/press/doctype/mpesa_setup/mpesa_setup.js
@@ -0,0 +1,38 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Mpesa Setup', {
+ onload_post_render: function (frm) {
+ frm.events.setup_account_balance_html(frm);
+ },
+
+ refresh: function (frm) {
+ frappe.realtime.on('refresh_mpesa_dashboard', function () {
+ frm.reload_doc();
+ frm.events.setup_account_balance_html(frm);
+ });
+ },
+
+ get_account_balance: function (frm) {
+ if (!frm.doc.initiator_name && !frm.doc.security_credential) {
+ frappe.throw(
+ __('Please set the initiator name and the security credential'),
+ );
+ }
+ frappe.call({
+ method: 'get_account_balance_info',
+ doc: frm.doc,
+ });
+ },
+
+ setup_account_balance_html: function (frm) {
+ if (!frm.doc.account_balance) return;
+ $('div').remove('.form-dashboard-section.custom');
+ frm.dashboard.add_section(
+ frappe.render_template('account_balance', {
+ data: JSON.parse(frm.doc.account_balance),
+ }),
+ );
+ frm.dashboard.show();
+ },
+});
diff --git a/press/press/doctype/mpesa_setup/mpesa_setup.json b/press/press/doctype/mpesa_setup/mpesa_setup.json
new file mode 100644
index 00000000000..efe7194ede0
--- /dev/null
+++ b/press/press/doctype/mpesa_setup/mpesa_setup.json
@@ -0,0 +1,143 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "autoname": "format:{mpesa_setup_id}-{api_type}",
+ "creation": "2025-01-17 14:42:39.157512",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "team",
+ "mpesa_setup_id",
+ "consumer_key",
+ "consumer_secret",
+ "till_number",
+ "api_type",
+ "sandbox",
+ "column_break_qzeb",
+ "transaction_limit",
+ "pass_key",
+ "initiator_name",
+ "business_shortcode",
+ "security_credential"
+ ],
+ "fields": [
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Team",
+ "options": "Team",
+ "reqd": 1
+ },
+ {
+ "fieldname": "consumer_key",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Consumer Key",
+ "reqd": 1
+ },
+ {
+ "fieldname": "consumer_secret",
+ "fieldtype": "Password",
+ "in_list_view": 1,
+ "label": "Consumer Secret",
+ "reqd": 1
+ },
+ {
+ "fieldname": "till_number",
+ "fieldtype": "Data",
+ "label": "Till Number",
+ "reqd": 1
+ },
+ {
+ "default": "Mpesa Express",
+ "fieldname": "api_type",
+ "fieldtype": "Select",
+ "label": "API Type",
+ "options": "Mpesa Express\nMpesa C2B"
+ },
+ {
+ "default": "150000",
+ "fieldname": "transaction_limit",
+ "fieldtype": "Float",
+ "label": "Transaction Limit"
+ },
+ {
+ "fieldname": "initiator_name",
+ "fieldtype": "Data",
+ "label": "Initiator Name"
+ },
+ {
+ "default": "0",
+ "fieldname": "sandbox",
+ "fieldtype": "Check",
+ "label": "Sandbox"
+ },
+ {
+ "fieldname": "business_shortcode",
+ "fieldtype": "Data",
+ "label": "Business Shortcode"
+ },
+ {
+ "fieldname": "column_break_qzeb",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "pass_key",
+ "fieldtype": "Password",
+ "label": "Pass Key",
+ "reqd": 1
+ },
+ {
+ "fieldname": "security_credential",
+ "fieldtype": "Small Text",
+ "label": "Security Credential",
+ "reqd": 1
+ },
+ {
+ "fieldname": "mpesa_setup_id",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Mpesa Setup ID",
+ "reqd": 1,
+ "unique": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-01-31 22:35:54.537908",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Mpesa Setup",
+ "naming_rule": "Expression",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/mpesa_setup/mpesa_setup.py b/press/press/doctype/mpesa_setup/mpesa_setup.py
new file mode 100644
index 00000000000..85722e58602
--- /dev/null
+++ b/press/press/doctype/mpesa_setup/mpesa_setup.py
@@ -0,0 +1,32 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+# import frappe
+from frappe.model.document import Document
+
+
+class MpesaSetup(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ api_type: DF.Literal["Mpesa Express", "Mpesa C2B"]
+ business_shortcode: DF.Data | None
+ consumer_key: DF.Data
+ consumer_secret: DF.Password
+ initiator_name: DF.Data | None
+ mpesa_setup_id: DF.Data
+ pass_key: DF.Password
+ sandbox: DF.Check
+ security_credential: DF.SmallText
+ team: DF.Link
+ till_number: DF.Data
+ transaction_limit: DF.Float
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/mpesa_setup/test_mpesa_setup.py b/press/press/doctype/mpesa_setup/test_mpesa_setup.py
new file mode 100644
index 00000000000..3b3772aacbb
--- /dev/null
+++ b/press/press/doctype/mpesa_setup/test_mpesa_setup.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class IntegrationTestMpesaSetup(FrappeTestCase):
+ """
+ Integration tests for MpesaSetup.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/nfs_server/__init__.py b/press/press/doctype/nfs_server/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/nfs_server/nfs_server.js b/press/press/doctype/nfs_server/nfs_server.js
new file mode 100644
index 00000000000..2a624a541ab
--- /dev/null
+++ b/press/press/doctype/nfs_server/nfs_server.js
@@ -0,0 +1,93 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('NFS Server', {
+ refresh(frm) {
+ [
+ [
+ __('Prepare Server'),
+ 'prepare_server',
+ true,
+ !frm.doc.is_server_prepared,
+ ],
+ [__('Setup Server'), 'setup_server', true, !frm.doc.is_server_setup],
+ ].forEach(([label, method, confirm, condition]) => {
+ if (typeof condition === 'undefined' || condition) {
+ frm.add_custom_button(
+ label,
+ () => {
+ if (confirm) {
+ frappe.confirm(
+ `Are you sure you want to ${label.toLowerCase()}?`,
+ () =>
+ frm.call(method).then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ } else {
+ frm.refresh();
+ }
+ }),
+ );
+ } else {
+ frm.call(method).then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ } else {
+ frm.refresh();
+ }
+ });
+ }
+ },
+ __('Actions'),
+ );
+ }
+ });
+ if (frm.doc.status === 'Active') {
+ frm.add_custom_button('Add Mount Enabled Server', () => {
+ frappe.prompt(
+ [
+ {
+ fieldtype: 'Link',
+ fieldname: 'server',
+ label: 'Server',
+ options: 'Server',
+ reqd: 1,
+ },
+ ],
+ ({ server }) => {
+ frm
+ .call('add_mount_enabled_server', {
+ server: server,
+ })
+ .then((r) => {
+ frm.refresh();
+ });
+ },
+ );
+ });
+
+ frm.add_custom_button('Remove Mount Enabled Server', () => {
+ frappe.prompt(
+ [
+ {
+ fieldtype: 'Link',
+ fieldname: 'server',
+ label: 'Server',
+ options: 'Server',
+ reqd: 1,
+ },
+ ],
+ ({ server }) => {
+ frm
+ .call('remove_mount_enabled_server', {
+ server: server,
+ })
+ .then((r) => {
+ frm.refresh();
+ });
+ },
+ );
+ });
+ }
+ },
+});
diff --git a/press/press/doctype/nfs_server/nfs_server.json b/press/press/doctype/nfs_server/nfs_server.json
new file mode 100644
index 00000000000..afebd57e231
--- /dev/null
+++ b/press/press/doctype/nfs_server/nfs_server.json
@@ -0,0 +1,253 @@
+{
+ "actions": [],
+ "creation": "2021-01-04 16:30:38.925921",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "status",
+ "hostname",
+ "domain",
+ "tls_certificate_renewal_failed",
+ "column_break_4",
+ "cluster",
+ "provider",
+ "virtual_machine",
+ "is_server_prepared",
+ "is_server_setup",
+ "networking_section",
+ "ip",
+ "column_break_9",
+ "private_ip",
+ "private_mac_address",
+ "private_vlan_id",
+ "agent_section",
+ "agent_password",
+ "ssh_section",
+ "ssh_user",
+ "ssh_port",
+ "frappe_user_password",
+ "frappe_public_key",
+ "column_break_20",
+ "root_public_key",
+ "monitoring_section",
+ "monitoring_password"
+ ],
+ "fields": [
+ {
+ "default": "Pending",
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Status",
+ "options": "Pending\nInstalling\nActive\nBroken\nArchived",
+ "read_only": 1,
+ "reqd": 1
+ },
+ {
+ "fieldname": "hostname",
+ "fieldtype": "Data",
+ "label": "Hostname",
+ "reqd": 1,
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "domain",
+ "fieldtype": "Link",
+ "label": "Domain",
+ "options": "Root Domain",
+ "read_only": 1,
+ "set_only_once": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "is_server_setup",
+ "fieldtype": "Check",
+ "label": "Server Setup",
+ "read_only": 1
+ },
+ {
+ "fetch_from": "virtual_machine.public_ip_address",
+ "fieldname": "ip",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "IP",
+ "reqd": 1,
+ "set_only_once": 1
+ },
+ {
+ "fetch_from": "virtual_machine.private_ip_address",
+ "fieldname": "private_ip",
+ "fieldtype": "Data",
+ "label": "Private IP",
+ "reqd": 1,
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "ssh_section",
+ "fieldtype": "Section Break",
+ "label": "SSH"
+ },
+ {
+ "fieldname": "root_public_key",
+ "fieldtype": "Code",
+ "label": "Root Public Key",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_20",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "frappe_public_key",
+ "fieldtype": "Code",
+ "label": "Frappe Public Key",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_4",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "Generic",
+ "fieldname": "provider",
+ "fieldtype": "Select",
+ "label": "Provider",
+ "options": "Generic\nScaleway\nAWS EC2\nOCI",
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "frappe_user_password",
+ "fieldtype": "Password",
+ "label": "Frappe User Password",
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "networking_section",
+ "fieldtype": "Section Break",
+ "label": "Networking"
+ },
+ {
+ "fieldname": "column_break_9",
+ "fieldtype": "Column Break"
+ },
+ {
+ "depends_on": "eval: doc.provider === \"Scaleway\"",
+ "fieldname": "private_mac_address",
+ "fieldtype": "Data",
+ "label": "Private Mac Address",
+ "mandatory_depends_on": "eval: doc.provider === \"Scaleway\"",
+ "set_only_once": 1
+ },
+ {
+ "depends_on": "eval: doc.provider === \"Scaleway\"",
+ "fieldname": "private_vlan_id",
+ "fieldtype": "Data",
+ "label": "Private VLAN ID",
+ "mandatory_depends_on": "eval: doc.provider === \"Scaleway\"",
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "agent_section",
+ "fieldtype": "Section Break",
+ "label": "Agent"
+ },
+ {
+ "fieldname": "agent_password",
+ "fieldtype": "Password",
+ "label": "Agent Password"
+ },
+ {
+ "fieldname": "monitoring_section",
+ "fieldtype": "Section Break",
+ "label": "Monitoring"
+ },
+ {
+ "fieldname": "monitoring_password",
+ "fieldtype": "Password",
+ "label": "Monitoring Password",
+ "set_only_once": 1
+ },
+ {
+ "depends_on": "eval:doc.provider === \"AWS EC2\"",
+ "fieldname": "virtual_machine",
+ "fieldtype": "Link",
+ "label": "Virtual Machine",
+ "mandatory_depends_on": "eval:doc.provider === \"AWS EC2\"",
+ "options": "Virtual Machine"
+ },
+ {
+ "fieldname": "ssh_user",
+ "fieldtype": "Data",
+ "label": "SSH User"
+ },
+ {
+ "default": "22",
+ "fieldname": "ssh_port",
+ "fieldtype": "Int",
+ "label": "SSH Port"
+ },
+ {
+ "default": "0",
+ "fieldname": "tls_certificate_renewal_failed",
+ "fieldtype": "Check",
+ "label": "TLS Certificate Renewal Failed",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "is_server_prepared",
+ "fieldtype": "Check",
+ "label": "Is Server Prepared"
+ },
+ {
+ "fieldname": "cluster",
+ "fieldtype": "Link",
+ "label": "Cluster",
+ "options": "Cluster"
+ }
+ ],
+ "links": [
+ {
+ "link_doctype": "Ansible Play",
+ "link_fieldname": "server"
+ },
+ {
+ "link_doctype": "Agent Job",
+ "link_fieldname": "server"
+ },
+ {
+ "link_doctype": "NFS Volume Attachment",
+ "link_fieldname": "nfs_server"
+ },
+ {
+ "link_doctype": "NFS Volume Detachment",
+ "link_fieldname": "nfs_server"
+ }
+ ],
+ "modified": "2025-10-15 23:20:56.419708",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "NFS Server",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": [],
+ "track_changes": 1
+}
diff --git a/press/press/doctype/nfs_server/nfs_server.py b/press/press/doctype/nfs_server/nfs_server.py
new file mode 100644
index 00000000000..fd61ba4fec9
--- /dev/null
+++ b/press/press/doctype/nfs_server/nfs_server.py
@@ -0,0 +1,120 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+
+import frappe
+
+from press.press.doctype.nfs_volume_attachment.nfs_volume_attachment import NFSVolumeAttachment
+from press.press.doctype.nfs_volume_detachment.nfs_volume_detachment import NFSVolumeDetachment
+from press.press.doctype.server.server import BaseServer
+from press.runner import Ansible
+from press.utils import log_error
+
+
+class NFSServer(BaseServer):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ agent_password: DF.Password | None
+ cluster: DF.Link | None
+ domain: DF.Link | None
+ frappe_public_key: DF.Code | None
+ frappe_user_password: DF.Password | None
+ hostname: DF.Data
+ ip: DF.Data
+ is_server_prepared: DF.Check
+ is_server_setup: DF.Check
+ monitoring_password: DF.Password | None
+ private_ip: DF.Data
+ private_mac_address: DF.Data | None
+ private_vlan_id: DF.Data | None
+ provider: DF.Literal["Generic", "Scaleway", "AWS EC2", "OCI"]
+ root_public_key: DF.Code | None
+ ssh_port: DF.Int
+ ssh_user: DF.Data | None
+ status: DF.Literal["Pending", "Installing", "Active", "Broken", "Archived"]
+ tls_certificate_renewal_failed: DF.Check
+ virtual_machine: DF.Link | None
+ # end: auto-generated types
+
+ def validate(self):
+ self.validate_agent_password()
+ # self.validate_monitoring_password()
+
+ def validate_monitoring_password(self):
+ if not self.monitoring_password:
+ self.monitoring_password = frappe.generate_hash()
+
+ def _setup_server(self):
+ agent_password = self.get_password("agent_password")
+ agent_repository_url = self.get_agent_repository_url()
+ monitoring_password = self.get_password("monitoring_password", False)
+ certificate_name = frappe.db.get_value(
+ "TLS Certificate", {"wildcard": True, "domain": self.domain}, "name"
+ )
+ certificate = frappe.get_doc("TLS Certificate", certificate_name)
+ try:
+ ansible = Ansible(
+ playbook="nfs_server.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables={
+ "server": self.name,
+ "workers": 1,
+ "domain": self.domain,
+ "agent_password": agent_password,
+ "agent_repository_url": agent_repository_url,
+ "monitoring_password": monitoring_password,
+ "private_ip": self.private_ip,
+ "certificate_private_key": certificate.private_key,
+ "certificate_full_chain": certificate.full_chain,
+ "certificate_intermediate_chain": certificate.intermediate_chain,
+ },
+ )
+ play = ansible.run()
+ self.reload()
+ if play.status == "Success":
+ self.status = "Active"
+ self.is_server_setup = True
+ else:
+ self.status = "Broken"
+ except Exception:
+ log_error("Agent NFS Setup Exception", server=self.as_dict())
+
+ self.save()
+
+ @frappe.whitelist()
+ def add_mount_enabled_server(
+ self,
+ server: str,
+ ) -> NFSVolumeAttachment:
+ """Add server to nfs servers ACL and create a shared directory"""
+ secondary_server = frappe.get_value("Server", server, "secondary_server")
+ nfs_volume_attachment: NFSVolumeAttachment = frappe.get_doc(
+ {
+ "doctype": "NFS Volume Attachment",
+ "nfs_server": self.name,
+ "primary_server": server,
+ "secondary_server": secondary_server,
+ }
+ )
+ return nfs_volume_attachment.insert()
+
+ @frappe.whitelist()
+ def remove_mount_enabled_server(self, server: str) -> NFSVolumeDetachment:
+ secondary_server = frappe.get_value("Server", server, "secondary_server")
+ nfs_volume_detachment: NFSVolumeDetachment = frappe.get_doc(
+ {
+ "doctype": "NFS Volume Detachment",
+ "nfs_server": self.name,
+ "primary_server": server,
+ "secondary_server": secondary_server,
+ }
+ )
+ return nfs_volume_detachment.insert()
diff --git a/press/press/doctype/nfs_server/test_nfs_server.py b/press/press/doctype/nfs_server/test_nfs_server.py
new file mode 100644
index 00000000000..2f44ae8548d
--- /dev/null
+++ b/press/press/doctype/nfs_server/test_nfs_server.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests import IntegrationTestCase, UnitTestCase
+
+# On IntegrationTestCase, the doctype test records and all
+# link-field test record dependencies are recursively loaded
+# Use these module variables to add/remove to/from that list
+EXTRA_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+IGNORE_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+
+
+class UnitTestNFSServer(UnitTestCase):
+ """
+ Unit tests for NFSServer.
+ Use this class for testing individual functions and methods.
+ """
+
+ pass
+
+
+class IntegrationTestNFSServer(IntegrationTestCase):
+ """
+ Integration tests for NFSServer.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/nfs_volume_attachment/__init__.py b/press/press/doctype/nfs_volume_attachment/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/nfs_volume_attachment/nfs_volume_attachment.js b/press/press/doctype/nfs_volume_attachment/nfs_volume_attachment.js
new file mode 100644
index 00000000000..50ad0315757
--- /dev/null
+++ b/press/press/doctype/nfs_volume_attachment/nfs_volume_attachment.js
@@ -0,0 +1,18 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('NFS Volume Attachment', {
+ refresh(frm) {
+ if (frm.doc.status === 'Failure') {
+ frm.add_custom_button('Force Continue', () => {
+ frm.call('force_continue').then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ } else {
+ frm.refresh();
+ }
+ });
+ });
+ }
+ },
+});
diff --git a/press/press/doctype/nfs_volume_attachment/nfs_volume_attachment.json b/press/press/doctype/nfs_volume_attachment/nfs_volume_attachment.json
new file mode 100644
index 00000000000..8f0dd2f8128
--- /dev/null
+++ b/press/press/doctype/nfs_volume_attachment/nfs_volume_attachment.json
@@ -0,0 +1,119 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-10-06 18:23:22.045154",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "nfs_settings_section",
+ "primary_server",
+ "secondary_server",
+ "status",
+ "column_break_pbze",
+ "steps_section",
+ "nfs_volume_attachment_steps"
+ ],
+ "fields": [
+ {
+ "fieldname": "nfs_settings_section",
+ "fieldtype": "Section Break",
+ "label": "NFS Settings"
+ },
+ {
+ "description": "Server that is sharing it's file system.",
+ "fieldname": "primary_server",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Primary Server",
+ "options": "Server",
+ "reqd": 1
+ },
+ {
+ "description": "Server that is using the primary server's file system.",
+ "fieldname": "secondary_server",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Secondary Server",
+ "options": "Server",
+ "read_only": 1
+ },
+ {
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Status",
+ "options": "Pending\nRunning\nSuccess\nFailure\nArchived",
+ "read_only": 1
+ },
+ {
+ "fieldname": "steps_section",
+ "fieldtype": "Section Break",
+ "label": "Steps"
+ },
+ {
+ "fieldname": "nfs_volume_attachment_steps",
+ "fieldtype": "Table",
+ "options": "NFS Volume Attachment Step"
+ },
+ {
+ "fieldname": "column_break_pbze",
+ "fieldtype": "Column Break"
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "links": [
+ {
+ "link_doctype": "Press Notification",
+ "link_fieldname": "document_name"
+ }
+ ],
+ "modified": "2026-01-16 14:56:58.218182",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "NFS Volume Attachment",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "rows_threshold_for_grid_search": 20,
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": [],
+ "title_field": "primary_server"
+}
\ No newline at end of file
diff --git a/press/press/doctype/nfs_volume_attachment/nfs_volume_attachment.py b/press/press/doctype/nfs_volume_attachment/nfs_volume_attachment.py
new file mode 100644
index 00000000000..b7d3ff7807a
--- /dev/null
+++ b/press/press/doctype/nfs_volume_attachment/nfs_volume_attachment.py
@@ -0,0 +1,459 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+import typing
+
+import frappe
+from frappe.model.document import Document
+
+from press.agent import Agent
+from press.press.doctype.auto_scale_record.auto_scale_record import AutoScaleStepFailureHandler
+from press.press.doctype.deploy_candidate_build.deploy_candidate_build import is_image_in_registry
+from press.runner import Ansible, Status, StepHandler
+
+if typing.TYPE_CHECKING:
+ from press.press.doctype.agent_job.agent_job import AgentJob
+ from press.press.doctype.nfs_volume_attachment_step.nfs_volume_attachment_step import (
+ NFSVolumeAttachmentStep,
+ )
+ from press.press.doctype.server.server import Server
+ from press.press.doctype.virtual_machine.virtual_machine import VirtualMachine
+
+
+class NFSVolumeAttachment(Document, AutoScaleStepFailureHandler, StepHandler):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.nfs_volume_attachment_step.nfs_volume_attachment_step import (
+ NFSVolumeAttachmentStep,
+ )
+
+ nfs_volume_attachment_steps: DF.Table[NFSVolumeAttachmentStep]
+ primary_server: DF.Link
+ secondary_server: DF.Link | None
+ status: DF.Literal["Pending", "Running", "Success", "Failure", "Archived"]
+ # end: auto-generated types
+
+ def validate(self):
+ """Check if the primary server has a secondary server provisioned with no existing attachments"""
+ has_shared_volume_setup = frappe.db.exists(
+ "NFS Volume Attachment",
+ {
+ "primary_server": self.primary_server,
+ "secondary_server": self.secondary_server,
+ "status": "Success",
+ },
+ )
+
+ if has_shared_volume_setup:
+ frappe.throw(
+ f"{self.primary_server} is already sharing benches with {self.secondary_server}!",
+ )
+
+ def mark_servers_as_installing(self, step: "NFSVolumeAttachmentStep"):
+ """Mark primary and secondary servers as `Installing`"""
+ step.status = Status.Running
+ step.save()
+
+ frappe.db.set_value("Server", self.primary_server, "status", "Installing")
+ frappe.db.set_value("Server", self.secondary_server, "status", "Installing")
+
+ step.status = Status.Success
+ step.save()
+
+ def start_secondary_server(self, step: "NFSVolumeAttachmentStep"):
+ """Start secondary server"""
+ step.status = Status.Running
+ step.save()
+
+ secondary_server_vm = frappe.db.get_value("Server", self.secondary_server, "virtual_machine")
+ virtual_machine: "VirtualMachine" = frappe.get_doc("Virtual Machine", secondary_server_vm)
+
+ if virtual_machine.status != "Running":
+ virtual_machine.start()
+
+ step.status = Status.Success
+ step.save()
+
+ def wait_for_secondary_server_to_start(self, step: "NFSVolumeAttachmentStep"):
+ """Wait for secondary server to start"""
+ step.status = Status.Running
+ step.is_waiting = True
+ step.save()
+
+ virtual_machine = frappe.db.get_value("Server", self.secondary_server, "virtual_machine")
+
+ self.handle_vm_status_job(step, virtual_machine=virtual_machine, expected_status="Running")
+
+ def setup_nfs_common_on_secondary(self, step: "NFSVolumeAttachmentStep"):
+ """Install nfs common on secondary server for sanity"""
+ server: Server = frappe.get_doc("Server", self.secondary_server)
+ step.status = Status.Running
+ step.save()
+
+ try:
+ ansible = Ansible(
+ playbook="install_nfs_common.yml",
+ server=server,
+ user=server._ssh_user(),
+ port=server._ssh_port(),
+ )
+ self.handle_ansible_play(step, ansible)
+ except Exception as e:
+ self._fail_ansible_step(step, ansible, e)
+ raise
+
+ def allow_servers_to_mount(self, step: "NFSVolumeAttachmentStep"):
+ """Allow primary and secondary server to share fs"""
+ primary_server: Server = frappe.get_cached_doc("Server", self.primary_server)
+ secondary_server_private_ip = frappe.db.get_value(
+ "Server", primary_server.secondary_server, "private_ip"
+ )
+
+ try:
+ agent_job = Agent(primary_server.name).add_servers_to_acl(
+ secondary_server_private_ip=secondary_server_private_ip,
+ reference_doctype=primary_server.doctype,
+ reference_name=primary_server.name,
+ )
+ step.job_type = "Agent Job"
+ step.job = agent_job.name
+ step.status = Status.Success
+ step.save()
+ except Exception as e:
+ self._fail_job_step(step, e)
+ raise
+
+ def wait_for_acl_addition(self, step: "NFSVolumeAttachmentStep"):
+ """Wait for servers to be added to ACL"""
+ step.status = Status.Running
+ step.is_waiting = True
+ step.save()
+
+ job = frappe.db.get_value(
+ "NFS Volume Attachment Step",
+ {
+ "parent": self.name,
+ "step_name": "Allow primary and secondary server to share fs",
+ },
+ "job",
+ )
+
+ # Jobs go undelivered for some reason, need to manually get status
+ job_doc: "AgentJob" = frappe.get_doc("Agent Job", job)
+ job_doc.get_status()
+
+ self.handle_agent_job(step, job)
+
+ def mount_shared_folder_on_secondary_server(self, step: "NFSVolumeAttachmentStep") -> None:
+ """Mount shared folder on secondary server"""
+ secondary_server: Server = frappe.get_cached_doc("Server", self.secondary_server)
+ primary_server_private_ip = frappe.db.get_value("Server", secondary_server.primary, "private_ip")
+ shared_directory = frappe.db.get_single_value("Press Settings", "shared_directory")
+
+ step.status = Status.Running
+ step.save()
+
+ try:
+ ansible = Ansible(
+ playbook="mount_shared_folder.yml",
+ server=secondary_server,
+ user=secondary_server._ssh_user(),
+ port=secondary_server._ssh_port(),
+ variables={
+ "primary_server_private_ip": primary_server_private_ip,
+ "using_fs_of_server": self.primary_server,
+ "shared_directory": shared_directory,
+ },
+ )
+
+ self.handle_ansible_play(step, ansible)
+ except Exception as e:
+ self._fail_ansible_step(step, ansible, e)
+ raise
+
+ def link_benches_to_shared(self, step: "NFSVolumeAttachmentStep") -> None:
+ """Link benches to the shared NFS directory."""
+ primary_server: Server = frappe.get_cached_doc("Server", self.primary_server)
+ shared_directory = frappe.db.get_single_value("Press Settings", "shared_directory")
+
+ step.status = Status.Running
+ step.save()
+
+ try:
+ ansible = Ansible(
+ playbook="link_benches_to_nfs.yml",
+ server=primary_server,
+ user=primary_server._ssh_user(),
+ port=primary_server._ssh_port(),
+ variables={"shared_directory": shared_directory},
+ )
+
+ self.handle_ansible_play(step, ansible)
+ except Exception as e:
+ self._fail_ansible_step(step, ansible, e)
+ raise
+
+ def run_primary_server_benches_on_shared_fs(
+ self,
+ step: "NFSVolumeAttachmentStep",
+ ) -> None:
+ """Run benches on shared volume"""
+ secondary_server_private_ip = frappe.db.get_value(
+ "Server",
+ self.secondary_server,
+ "private_ip",
+ )
+ shared_directory = frappe.db.get_single_value("Press Settings", "shared_directory")
+
+ agent_job = Agent(self.primary_server).change_bench_directory(
+ redis_connection_string_ip="localhost",
+ directory=shared_directory,
+ secondary_server_private_ip=secondary_server_private_ip,
+ is_primary=True,
+ restart_benches=True,
+ reference_doctype="Server",
+ reference_name=self.primary_server,
+ )
+
+ step.status = Status.Success
+ step.job_type = "Agent Job"
+ step.job = agent_job.name
+ step.save()
+
+ def wait_for_benches_to_run_on_shared(self, step: "NFSVolumeAttachmentStep"):
+ """Wait for benches to run on shared volume"""
+ step.status = Status.Running
+ step.is_waiting = True
+ step.save()
+
+ job = frappe.db.get_value(
+ "NFS Volume Attachment Step",
+ {
+ "parent": self.name,
+ "step_name": "Run benches on shared volume",
+ },
+ "job",
+ )
+
+ self.handle_agent_job(step, job, poll=True)
+
+ def add_loopback_rule(self, step: "NFSVolumeAttachmentStep"):
+ """Allow loopback requests from container"""
+ step.status = Status.Running
+ step.save()
+
+ primary_server: "Server" = frappe.get_doc("Server", self.primary_server)
+
+ try:
+ ansible = Ansible(
+ playbook="allow_docker_loopback.yml",
+ server=primary_server,
+ user=primary_server._ssh_user(),
+ port=primary_server._ssh_port(),
+ )
+ self.handle_ansible_play(step, ansible)
+ except Exception as e:
+ self._fail_ansible_step(step, ansible, e)
+ raise
+
+ def stop_secondary_server(self, step: "NFSVolumeAttachmentStep"):
+ """Stop secondary server"""
+ step.status = Status.Running
+ step.save()
+
+ secondary_server_vm = frappe.db.get_value("Server", self.secondary_server, "virtual_machine")
+ virtual_machine: "VirtualMachine" = frappe.get_doc("Virtual Machine", secondary_server_vm)
+
+ if virtual_machine.status == "Running":
+ virtual_machine.stop()
+
+ step.status = Status.Success
+ step.save()
+
+ def wait_for_secondary_server_to_stop(self, step: "NFSVolumeAttachmentStep"):
+ """Wait for secondary server to stop"""
+ step.status = Status.Running
+ step.is_waiting = True
+ step.save()
+
+ virtual_machine = frappe.db.get_value("Server", self.secondary_server, "virtual_machine")
+
+ self.handle_vm_status_job(step, virtual_machine=virtual_machine, expected_status="Stopped")
+
+ def create_subscription_record(self, step: "NFSVolumeAttachmentStep"):
+ """Create a subscription record for secondary server"""
+ step.status = Status.Running
+ step.save()
+
+ team = frappe.db.get_value("Server", self.primary_server, "team")
+
+ if not frappe.db.exists(
+ "Subscription",
+ {"document_name": self.secondary_server, "team": team, "plan_type": "Server Plan"},
+ ):
+ frappe.get_doc(
+ doctype="Subscription",
+ team=team,
+ plan_type="Server Plan",
+ plan=frappe.db.get_value("Server", self.secondary_server, "plan"),
+ document_type="Server",
+ document_name=self.secondary_server,
+ interval="Hourly",
+ enabled=1,
+ ).insert()
+
+ step.status = Status.Success
+ step.save()
+
+ def push_missing_images_to_registry(self, step: "NFSVolumeAttachmentStep"):
+ """Push missing bench images present on server to registry"""
+ # We know for a fact that newer benches will have images present in the registry
+ # due to the s3 storage backend, therefore this only needs to be done for older benches
+ # which might not have their image in the registry making it a one time step
+
+ step.status = Status.Running
+ step.save()
+
+ missing_images = []
+ registry_settings = frappe.db.get_value(
+ "Press Settings",
+ None,
+ [
+ "domain",
+ "docker_registry_url",
+ "docker_registry_namespace",
+ "docker_registry_username",
+ "docker_registry_password",
+ ],
+ as_dict=True,
+ )
+ Bench = frappe.qb.DocType("Bench")
+ DeployCandidateBuild = frappe.qb.DocType("Deploy Candidate Build")
+ active_bench_images_on_server = (
+ frappe.qb.from_(Bench)
+ .join(DeployCandidateBuild)
+ .on(Bench.build == DeployCandidateBuild.name)
+ .select(DeployCandidateBuild.name, DeployCandidateBuild.group, DeployCandidateBuild.docker_image)
+ .where(Bench.server == self.primary_server)
+ .where(Bench.status == "Active")
+ .run(as_dict=True)
+ )
+
+ for image_on_server in active_bench_images_on_server:
+ is_image_present = is_image_in_registry(
+ image_on_server["name"], image_on_server["group"], registry_settings
+ )
+ if not is_image_present:
+ missing_images.append(image_on_server["docker_image"])
+
+ if not missing_images:
+ # Successful if all images are in registry
+ step.status = Status.Success
+ step.save()
+ return
+
+ # At this point we can trigger a agent job to push missing docker images
+ agent_job = Agent(self.primary_server).push_docker_images(
+ images=missing_images, reference_doctype="Server", reference_name=self.primary_server
+ )
+
+ step.status = Status.Success
+ step.job_type = "Agent Job"
+ step.job = agent_job.name
+ step.is_waiting = True
+
+ step.output = "Pushing the following missing images to registry:\n{}".format(
+ "\n".join(missing_images)
+ )
+ step.save()
+
+ def wait_for_missing_images(self, step: "NFSVolumeAttachmentStep"):
+ "Wait for missing images push to complete"
+ step.status = Status.Running
+ step.is_waiting = True
+ step.save()
+
+ job = frappe.db.get_value(
+ "NFS Volume Attachment Step",
+ {
+ "parent": self.name,
+ "step_name": "Push missing bench images present on server to registry",
+ },
+ "job",
+ )
+
+ if not job:
+ # All images were preset
+ step.status = Status.Success
+ step.save()
+ return
+
+ self.handle_agent_job(step, job)
+
+ def ready_to_auto_scale(self, step: "NFSVolumeAttachmentStep"):
+ """Mark server as ready to auto scale"""
+ step.status = Status.Running
+ step.save()
+
+ frappe.db.set_value(
+ "Server", self.primary_server, {"benches_on_shared_volume": True, "status": "Active"}
+ )
+ frappe.db.set_value(
+ "Server", self.secondary_server, {"status": "Active", "is_monitoring_disabled": True}
+ )
+
+ step.status = Status.Success
+ step.save()
+
+ def before_insert(self):
+ """Append defined steps to the document before saving."""
+ for step in self.get_steps(
+ [
+ self.mark_servers_as_installing,
+ self.start_secondary_server,
+ self.wait_for_secondary_server_to_start,
+ self.setup_nfs_common_on_secondary,
+ self.allow_servers_to_mount,
+ self.wait_for_acl_addition,
+ self.mount_shared_folder_on_secondary_server,
+ self.link_benches_to_shared,
+ self.run_primary_server_benches_on_shared_fs,
+ self.wait_for_benches_to_run_on_shared,
+ self.add_loopback_rule,
+ self.stop_secondary_server,
+ self.wait_for_secondary_server_to_stop,
+ self.create_subscription_record,
+ self.push_missing_images_to_registry,
+ self.wait_for_missing_images,
+ self.ready_to_auto_scale,
+ ]
+ ):
+ self.append("nfs_volume_attachment_steps", step)
+
+ self.secondary_server = frappe.db.get_value("Server", self.primary_server, "secondary_server")
+
+ @frappe.whitelist()
+ def force_continue(self):
+ self.execute_mount_steps()
+
+ def execute_mount_steps(self):
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_execute_steps",
+ steps=self.nfs_volume_attachment_steps,
+ timeout=18000,
+ at_front=True,
+ queue="auto-scale",
+ enqueue_after_commit=True,
+ )
+
+ def after_insert(self):
+ self.execute_mount_steps()
diff --git a/press/press/doctype/nfs_volume_attachment/test_nfs_volume_attachment.py b/press/press/doctype/nfs_volume_attachment/test_nfs_volume_attachment.py
new file mode 100644
index 00000000000..5dfeb254ce5
--- /dev/null
+++ b/press/press/doctype/nfs_volume_attachment/test_nfs_volume_attachment.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests import IntegrationTestCase, UnitTestCase
+
+# On IntegrationTestCase, the doctype test records and all
+# link-field test record dependencies are recursively loaded
+# Use these module variables to add/remove to/from that list
+EXTRA_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+IGNORE_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+
+
+class UnitTestNFSVolumeAttachment(UnitTestCase):
+ """
+ Unit tests for NFSVolumeAttachment.
+ Use this class for testing individual functions and methods.
+ """
+
+ pass
+
+
+class IntegrationTestNFSVolumeAttachment(IntegrationTestCase):
+ """
+ Integration tests for NFSVolumeAttachment.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/nfs_volume_attachment_step/__init__.py b/press/press/doctype/nfs_volume_attachment_step/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/nfs_volume_attachment_step/nfs_volume_attachment_step.json b/press/press/doctype/nfs_volume_attachment_step/nfs_volume_attachment_step.json
new file mode 100644
index 00000000000..2505b36b2be
--- /dev/null
+++ b/press/press/doctype/nfs_volume_attachment_step/nfs_volume_attachment_step.json
@@ -0,0 +1,81 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-10-06 18:30:54.878299",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "step_name",
+ "method_name",
+ "status",
+ "job_type",
+ "job",
+ "attempt",
+ "is_waiting",
+ "output"
+ ],
+ "fields": [
+ {
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Status",
+ "options": "Pending\nRunning\nSuccess\nFailure"
+ },
+ {
+ "fieldname": "output",
+ "fieldtype": "Text",
+ "label": "Output"
+ },
+ {
+ "fieldname": "step_name",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Name"
+ },
+ {
+ "fieldname": "method_name",
+ "fieldtype": "Data",
+ "label": "Method"
+ },
+ {
+ "fieldname": "job_type",
+ "fieldtype": "Link",
+ "label": "Job Type",
+ "options": "DocType"
+ },
+ {
+ "fieldname": "job",
+ "fieldtype": "Dynamic Link",
+ "in_list_view": 1,
+ "label": "Job",
+ "options": "job_type"
+ },
+ {
+ "fieldname": "attempt",
+ "fieldtype": "Int",
+ "label": "Attempt"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_waiting",
+ "fieldtype": "Check",
+ "label": "Wait"
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-10-08 17:38:30.784005",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "NFS Volume Attachment Step",
+ "owner": "Administrator",
+ "permissions": [],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/nfs_volume_attachment_step/nfs_volume_attachment_step.py b/press/press/doctype/nfs_volume_attachment_step/nfs_volume_attachment_step.py
new file mode 100644
index 00000000000..86d5963d5c1
--- /dev/null
+++ b/press/press/doctype/nfs_volume_attachment_step/nfs_volume_attachment_step.py
@@ -0,0 +1,30 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class NFSVolumeAttachmentStep(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ attempt: DF.Int
+ is_waiting: DF.Check
+ job: DF.DynamicLink | None
+ job_type: DF.Link | None
+ method_name: DF.Data | None
+ output: DF.Text | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ status: DF.Literal["Pending", "Running", "Success", "Failure"]
+ step_name: DF.Data | None
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/nfs_volume_detachment/__init__.py b/press/press/doctype/nfs_volume_detachment/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/nfs_volume_detachment/nfs_volume_detachment.js b/press/press/doctype/nfs_volume_detachment/nfs_volume_detachment.js
new file mode 100644
index 00000000000..58bde0f3b7e
--- /dev/null
+++ b/press/press/doctype/nfs_volume_detachment/nfs_volume_detachment.js
@@ -0,0 +1,18 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('NFS Volume Detachment', {
+ refresh(frm) {
+ if (frm.doc.status === 'Failure') {
+ frm.add_custom_button('Force Continue', () => {
+ frm.call('force_continue').then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ } else {
+ frm.refresh();
+ }
+ });
+ });
+ }
+ },
+});
diff --git a/press/press/doctype/nfs_volume_detachment/nfs_volume_detachment.json b/press/press/doctype/nfs_volume_detachment/nfs_volume_detachment.json
new file mode 100644
index 00000000000..d4a288c3ab7
--- /dev/null
+++ b/press/press/doctype/nfs_volume_detachment/nfs_volume_detachment.json
@@ -0,0 +1,110 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-10-08 11:42:59.416534",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "nfs_settings_section",
+ "primary_server",
+ "secondary_server",
+ "status",
+ "steps_section",
+ "nfs_volume_detachment_steps"
+ ],
+ "fields": [
+ {
+ "fieldname": "nfs_settings_section",
+ "fieldtype": "Section Break",
+ "label": "NFS Settings"
+ },
+ {
+ "fieldname": "primary_server",
+ "fieldtype": "Link",
+ "label": "Primary Server",
+ "options": "Server",
+ "reqd": 1
+ },
+ {
+ "fieldname": "secondary_server",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Secondary Server",
+ "options": "Server",
+ "read_only": 1
+ },
+ {
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Status",
+ "options": "Pending\nRunning\nSuccess\nFailure"
+ },
+ {
+ "fieldname": "steps_section",
+ "fieldtype": "Section Break",
+ "label": "Steps"
+ },
+ {
+ "fieldname": "nfs_volume_detachment_steps",
+ "fieldtype": "Table",
+ "options": "NFS Volume Detachment Step"
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "links": [
+ {
+ "link_doctype": "Press Notification",
+ "link_fieldname": "document_name"
+ }
+ ],
+ "modified": "2026-01-16 14:56:33.594039",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "NFS Volume Detachment",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "rows_threshold_for_grid_search": 20,
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": [],
+ "title_field": "primary_server"
+}
\ No newline at end of file
diff --git a/press/press/doctype/nfs_volume_detachment/nfs_volume_detachment.py b/press/press/doctype/nfs_volume_detachment/nfs_volume_detachment.py
new file mode 100644
index 00000000000..4d23da05b34
--- /dev/null
+++ b/press/press/doctype/nfs_volume_detachment/nfs_volume_detachment.py
@@ -0,0 +1,370 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+import typing
+
+import frappe
+from frappe.model.document import Document
+
+from press.agent import Agent
+from press.press.doctype.auto_scale_record.auto_scale_record import AutoScaleStepFailureHandler
+from press.runner import Ansible, Status, StepHandler
+
+if typing.TYPE_CHECKING:
+ from press.press.doctype.agent_job.agent_job import AgentJob
+ from press.press.doctype.nfs_server.nfs_server import NFSServer
+ from press.press.doctype.nfs_volume_detachment_step.nfs_volume_detachment_step import (
+ NFSVolumeDetachmentStep,
+ )
+ from press.press.doctype.server.server import Server
+ from press.press.doctype.virtual_machine.virtual_machine import VirtualMachine
+
+
+class NFSVolumeDetachment(Document, AutoScaleStepFailureHandler, StepHandler):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.nfs_volume_detachment_step.nfs_volume_detachment_step import (
+ NFSVolumeDetachmentStep,
+ )
+
+ nfs_volume_detachment_steps: DF.Table[NFSVolumeDetachmentStep]
+ primary_server: DF.Link
+ secondary_server: DF.Link | None
+ status: DF.Literal["Pending", "Running", "Success", "Failure"]
+ # end: auto-generated types
+
+ def mark_servers_as_installing(self, step: "NFSVolumeDetachmentStep"):
+ """Mark primary and secondary servers as `Installing`"""
+ step.status = Status.Running
+ step.save()
+
+ frappe.db.set_value("Server", self.primary_server, "status", "Installing")
+ frappe.db.set_value("Server", self.secondary_server, "status", "Installing")
+
+ step.status = Status.Success
+ step.save()
+
+ def start_secondary_server(self, step: "NFSVolumeDetachmentStep"):
+ """Start secondary server"""
+ step.status = Status.Running
+ step.save()
+
+ secondary_server_vm = frappe.db.get_value("Server", self.secondary_server, "virtual_machine")
+ virtual_machine: "VirtualMachine" = frappe.get_doc("Virtual Machine", secondary_server_vm)
+
+ if virtual_machine.status != "Running":
+ virtual_machine.start()
+
+ step.status = Status.Success
+ step.save()
+
+ def wait_for_secondary_server_to_start(self, step: "NFSVolumeDetachmentStep"):
+ """Wait for secondary server to start"""
+ step.status = Status.Running
+ step.is_waiting = True
+ step.save()
+
+ virtual_machine = frappe.db.get_value("Server", self.secondary_server, "virtual_machine")
+
+ self.handle_vm_status_job(step, virtual_machine=virtual_machine, expected_status="Running")
+
+ def stop_all_benches(self, step: "NFSVolumeDetachmentStep"):
+ """Stop all running benches"""
+ server: Server = frappe.get_doc("Server", self.primary_server)
+ step.status = Status.Running
+ step.save()
+
+ try:
+ ansible = Ansible(
+ playbook="stop_benches.yml",
+ server=server,
+ user=server._ssh_user(),
+ port=server._ssh_port(),
+ )
+ self.handle_ansible_play(step, ansible)
+ except Exception as e:
+ self._fail_ansible_step(step, ansible, e)
+ raise
+
+ def unlink_benches_from_shared(self, step: "NFSVolumeDetachmentStep"):
+ """Sync data from shared to /home/frappe/benches"""
+ primary_server: Server = frappe.get_cached_doc("Server", self.primary_server)
+ shared_directory = frappe.db.get_single_value("Press Settings", "shared_directory")
+ step.status = Status.Running
+ step.save()
+
+ try:
+ ansible = Ansible(
+ playbook="unlink_benches_from_nfs.yml",
+ server=primary_server,
+ user=primary_server._ssh_user(),
+ port=primary_server._ssh_port(),
+ variables={"shared_directory": shared_directory},
+ )
+ self.handle_ansible_play(step, ansible)
+ except Exception as e:
+ self._fail_ansible_step(step, ansible, e)
+ raise
+
+ def run_bench_on_primary_server(self, step: "NFSVolumeDetachmentStep"):
+ """Change bench directory"""
+ secondary_server_private_ip = frappe.db.get_value(
+ "Server",
+ self.secondary_server,
+ "private_ip",
+ )
+
+ agent_job = Agent(self.primary_server).change_bench_directory(
+ redis_connection_string_ip="localhost",
+ directory="/home/frappe/benches/",
+ secondary_server_private_ip=secondary_server_private_ip,
+ is_primary=True,
+ restart_benches=True,
+ reference_doctype="Server",
+ reference_name=self.primary_server,
+ )
+
+ step.status = Status.Success
+ step.job_type = "Agent Job"
+ step.job = agent_job.name
+ step.save()
+
+ def wait_for_job_completion(self, step: "NFSVolumeDetachmentStep") -> None:
+ """Wait for agent job completion"""
+ step.status = Status.Running
+ step.is_waiting = True
+ step.save()
+
+ job = frappe.db.get_value(
+ "NFS Volume Detachment Step",
+ {
+ "parent": self.name,
+ "step_name": "Change bench directory",
+ },
+ "job",
+ )
+ self.handle_agent_job(step, job, poll=True)
+
+ def umount_from_primary_server(self, step: "NFSVolumeDetachmentStep") -> None:
+ """Umount /shared from primary server and remove from fstab"""
+ primary_server: Server = frappe.get_cached_doc("Server", self.primary_server)
+ nfs_server_private_ip = frappe.db.get_value("NFS Server", self.nfs_server, "private_ip")
+ step.status = Status.Running
+ step.save()
+
+ try:
+ ansible = Ansible(
+ playbook="umount_and_cleanup_shared.yml",
+ server=primary_server,
+ user=primary_server._ssh_user(),
+ port=primary_server._ssh_port(),
+ variables={
+ "nfs_server_private_ip": nfs_server_private_ip,
+ "shared_directory": f"/home/frappe/nfs/{self.primary_server}",
+ },
+ )
+ self.handle_ansible_play(step, ansible)
+ except Exception as e:
+ self._fail_ansible_step(step, ansible, e)
+ raise
+
+ def remove_servers_from_acl(self, step: "NFSVolumeDetachmentStep") -> None:
+ """Remove primary and secondary servers from acl"""
+ secondary_server_private_ip = frappe.db.get_value("Server", self.secondary_server, "private_ip")
+
+ try:
+ agent_job = Agent(self.primary_server).remove_servers_from_acl(
+ secondary_server_private_ip=secondary_server_private_ip,
+ )
+ step.job_type = "Agent Job"
+ step.job = agent_job.name
+ step.status = Status.Success
+ step.save()
+ except Exception as e:
+ self._fail_job_step(step, e)
+ raise
+
+ def wait_for_acl_deletion(self, step: "NFSVolumeDetachmentStep"):
+ """Wait for servers to be remove from the ACL"""
+ step.status = Status.Running
+ step.is_waiting = True
+ step.save()
+
+ job = frappe.db.get_value(
+ "NFS Volume Detachment Step",
+ {
+ "parent": self.name,
+ "step_name": "Remove primary and secondary servers from acl",
+ },
+ "job",
+ )
+
+ # Jobs go undelivered for some reason, need to manually get status
+ job_doc: "AgentJob" = frappe.get_doc("Agent Job", job)
+ job_doc.get_status()
+
+ self.handle_agent_job(step, job)
+
+ def umount_volume_from_nfs_server(self, step: "NFSVolumeDetachmentStep") -> None:
+ """Umount volume from NFS Server"""
+ step.status = Status.Running
+ step.save()
+
+ nfs_server: NFSServer = frappe.get_cached_doc("NFS Server", self.nfs_server)
+
+ try:
+ ansible = Ansible(
+ playbook="umount_volume_nfs_server.yml",
+ server=nfs_server,
+ user=nfs_server._ssh_user(),
+ port=nfs_server._ssh_port(),
+ variables={
+ "shared_directory": f"/home/frappe/nfs/{self.primary_server}",
+ },
+ )
+ self.handle_ansible_play(step, ansible)
+ except Exception as e:
+ self._fail_ansible_step(step, ansible, e)
+ raise
+
+ def detach_and_delete_volume_from_nfs_server(self, step: "NFSVolumeDetachmentStep") -> None:
+ """Detach and delete volume from nfs server"""
+ step.status = Status.Running
+ step.save()
+
+ virtual_machine: VirtualMachine = frappe.get_cached_doc("Virtual Machine", self.nfs_server)
+ volume_id = frappe.get_value(
+ "NFS Volume Attachment", {"primary_server": self.primary_server}, "volume_id"
+ )
+ try:
+ virtual_machine.delete_volume(volume_id)
+ step.status = Status.Success
+ step.save()
+ except Exception as e:
+ self._fail_job_step(step, e)
+ raise
+
+ def mark_attachment_as_archived(self, step: "NFSVolumeDetachmentStep") -> None:
+ """Mark the attachment doc as archived"""
+ step.status = Status.Running
+ step.save()
+
+ frappe.db.set_value(
+ "NFS Volume Attachment", {"primary_server": self.primary_server}, "status", "Archived"
+ )
+
+ step.status = Status.Success
+ step.save()
+
+ def not_ready_to_auto_scale(self, step: "NFSVolumeDetachmentStep"):
+ """Mark server as not ready to auto scale & drop secondary server"""
+ step.status = Status.Running
+ step.save()
+
+ try:
+ # Drop secondary server
+ primary_server: "Server" = frappe.get_doc("Server", self.primary_server)
+ primary_server.drop_secondary_server()
+
+ # Mark secondary server field as empty on the primary server
+ frappe.db.set_value(
+ "Server",
+ self.primary_server,
+ {"benches_on_shared_volume": False, "secondary_server": None, "status": "Active"},
+ )
+
+ step.status = Status.Success
+ step.save()
+ except Exception:
+ raise
+
+ def remove_subscription_record(self, step: "NFSVolumeDetachmentStep"):
+ """Disable the subscription record for secondary server"""
+ step.status = Status.Running
+ step.save()
+
+ team = frappe.db.get_value("Server", self.primary_server, "team")
+
+ if frappe.db.exists(
+ "Subscription",
+ {"document_name": self.secondary_server, "team": team, "plan_type": "Server Plan"},
+ ):
+ frappe.db.set_value(
+ "Subscription",
+ {"document_name": self.secondary_server, "team": team, "plan_type": "Server Plan"},
+ "enabled",
+ 0,
+ )
+
+ step.status = Status.Success
+ step.save()
+
+ def before_insert(self):
+ """Append defined steps to the document before saving."""
+ for step in self.get_steps(
+ [
+ self.mark_servers_as_installing,
+ self.start_secondary_server,
+ self.wait_for_secondary_server_to_start,
+ self.unlink_benches_from_shared,
+ self.run_bench_on_primary_server,
+ self.wait_for_job_completion,
+ self.remove_servers_from_acl,
+ self.wait_for_acl_deletion,
+ self.mark_attachment_as_archived,
+ self.remove_subscription_record,
+ self.not_ready_to_auto_scale,
+ ]
+ ):
+ self.append("nfs_volume_detachment_steps", step)
+
+ self.secondary_server = frappe.db.get_value("Server", self.primary_server, "secondary_server")
+
+ def validate(self):
+ is_server_auto_scaled = frappe.db.get_value("Server", self.primary_server, "scaled_up")
+ if is_server_auto_scaled:
+ frappe.throw("Benches are currently running on the secondary server!")
+
+ has_triggers = frappe.db.get_value(
+ "Prometheus Alert Rule",
+ filters={
+ "name": [
+ "in",
+ [
+ f"Auto Scale Up Trigger - {self.primary_server}",
+ f"Auto Scale Down Trigger - {self.primary_server}",
+ ],
+ ],
+ "enabled": 1,
+ },
+ pluck="name",
+ )
+
+ if has_triggers:
+ frappe.throw("Please remove all auto scale triggers before dropping the secondary server")
+
+ @frappe.whitelist()
+ def force_continue(self):
+ self.execute_mount_steps()
+
+ def execute_mount_steps(self):
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_execute_steps",
+ steps=self.nfs_volume_detachment_steps,
+ timeout=18000,
+ at_front=True,
+ queue="auto-scale",
+ enqueue_after_commit=True,
+ )
+
+ def after_insert(self):
+ self.execute_mount_steps()
diff --git a/press/press/doctype/nfs_volume_detachment/test_nfs_volume_detachment.py b/press/press/doctype/nfs_volume_detachment/test_nfs_volume_detachment.py
new file mode 100644
index 00000000000..c61244cdaa6
--- /dev/null
+++ b/press/press/doctype/nfs_volume_detachment/test_nfs_volume_detachment.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests import IntegrationTestCase, UnitTestCase
+
+# On IntegrationTestCase, the doctype test records and all
+# link-field test record dependencies are recursively loaded
+# Use these module variables to add/remove to/from that list
+EXTRA_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+IGNORE_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+
+
+class UnitTestNFSVolumeDetachment(UnitTestCase):
+ """
+ Unit tests for NFSVolumeDetachment.
+ Use this class for testing individual functions and methods.
+ """
+
+ pass
+
+
+class IntegrationTestNFSVolumeDetachment(IntegrationTestCase):
+ """
+ Integration tests for NFSVolumeDetachment.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/nfs_volume_detachment_step/__init__.py b/press/press/doctype/nfs_volume_detachment_step/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/nfs_volume_detachment_step/nfs_volume_detachment_step.json b/press/press/doctype/nfs_volume_detachment_step/nfs_volume_detachment_step.json
new file mode 100644
index 00000000000..d0226bca5c5
--- /dev/null
+++ b/press/press/doctype/nfs_volume_detachment_step/nfs_volume_detachment_step.json
@@ -0,0 +1,82 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-10-08 12:05:49.273995",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "step_name",
+ "method_name",
+ "status",
+ "job_type",
+ "job",
+ "attempt",
+ "is_waiting",
+ "output"
+ ],
+ "fields": [
+ {
+ "fieldname": "step_name",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Name"
+ },
+ {
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Status",
+ "options": "Pending\nRunning\nSuccess\nFailure"
+ },
+ {
+ "fieldname": "job_type",
+ "fieldtype": "Link",
+ "label": "Job Type",
+ "options": "DocType"
+ },
+ {
+ "fieldname": "job",
+ "fieldtype": "Dynamic Link",
+ "in_list_view": 1,
+ "label": "Job",
+ "options": "job_type"
+ },
+ {
+ "fieldname": "output",
+ "fieldtype": "Long Text",
+ "label": "Output"
+ },
+ {
+ "fieldname": "method_name",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Method"
+ },
+ {
+ "fieldname": "attempt",
+ "fieldtype": "Int",
+ "label": "Attempt"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_waiting",
+ "fieldtype": "Check",
+ "label": "Wait"
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-10-08 16:59:32.287685",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "NFS Volume Detachment Step",
+ "owner": "Administrator",
+ "permissions": [],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/nfs_volume_detachment_step/nfs_volume_detachment_step.py b/press/press/doctype/nfs_volume_detachment_step/nfs_volume_detachment_step.py
new file mode 100644
index 00000000000..0f0b175c658
--- /dev/null
+++ b/press/press/doctype/nfs_volume_detachment_step/nfs_volume_detachment_step.py
@@ -0,0 +1,30 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class NFSVolumeDetachmentStep(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ attempt: DF.Int
+ is_waiting: DF.Check
+ job: DF.DynamicLink | None
+ job_type: DF.Link | None
+ method_name: DF.Data | None
+ output: DF.LongText | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ status: DF.Literal["Pending", "Running", "Success", "Failure"]
+ step_name: DF.Data | None
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/oauth_domain_mapping/__init__.py b/press/press/doctype/oauth_domain_mapping/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/oauth_domain_mapping/oauth_domain_mapping.js b/press/press/doctype/oauth_domain_mapping/oauth_domain_mapping.js
new file mode 100644
index 00000000000..3609e1aa7eb
--- /dev/null
+++ b/press/press/doctype/oauth_domain_mapping/oauth_domain_mapping.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("OAuth Domain Mapping", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/oauth_domain_mapping/oauth_domain_mapping.json b/press/press/doctype/oauth_domain_mapping/oauth_domain_mapping.json
new file mode 100644
index 00000000000..c3ac31047f9
--- /dev/null
+++ b/press/press/doctype/oauth_domain_mapping/oauth_domain_mapping.json
@@ -0,0 +1,66 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-05-13 15:31:01.487795",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "social_login_key",
+ "provider_name",
+ "column_break_bfdt",
+ "email_domain"
+ ],
+ "fields": [
+ {
+ "fieldname": "social_login_key",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Social Login Key",
+ "options": "Social Login Key",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_bfdt",
+ "fieldtype": "Column Break"
+ },
+ {
+ "description": "Emails matching this domain will get custom oauth login",
+ "fieldname": "email_domain",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Email Domain"
+ },
+ {
+ "fetch_from": "social_login_key.provider_name",
+ "fieldname": "provider_name",
+ "fieldtype": "Data",
+ "label": "Provider Name"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2024-05-13 21:34:04.985596",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "OAuth Domain Mapping",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/oauth_domain_mapping/oauth_domain_mapping.py b/press/press/doctype/oauth_domain_mapping/oauth_domain_mapping.py
new file mode 100644
index 00000000000..b4ba52a7702
--- /dev/null
+++ b/press/press/doctype/oauth_domain_mapping/oauth_domain_mapping.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class OAuthDomainMapping(Document):
+ pass
diff --git a/press/press/doctype/oauth_domain_mapping/test_oauth_domain_mapping.py b/press/press/doctype/oauth_domain_mapping/test_oauth_domain_mapping.py
new file mode 100644
index 00000000000..73485527ab1
--- /dev/null
+++ b/press/press/doctype/oauth_domain_mapping/test_oauth_domain_mapping.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestOAuthDomainMapping(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/partner_lead/partner_lead.js b/press/press/doctype/partner_lead/partner_lead.js
deleted file mode 100644
index 3c6f7aa895e..00000000000
--- a/press/press/doctype/partner_lead/partner_lead.js
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2021, Frappe and contributors
-// For license information, please see license.txt
-
-frappe.ui.form.on('Partner Lead', {
- // refresh: function(frm) {
- // }
-});
diff --git a/press/press/doctype/partner_lead/partner_lead.json b/press/press/doctype/partner_lead/partner_lead.json
deleted file mode 100644
index 905cbf7f28e..00000000000
--- a/press/press/doctype/partner_lead/partner_lead.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
- "actions": [],
- "allow_rename": 1,
- "autoname": "format:PL-{YYYY}-{#####}",
- "creation": "2021-09-30 12:57:34.909563",
- "doctype": "DocType",
- "editable_grid": 1,
- "engine": "InnoDB",
- "field_order": [
- "team",
- "site",
- "frappe_lead"
- ],
- "fields": [
- {
- "fieldname": "team",
- "fieldtype": "Link",
- "in_list_view": 1,
- "label": "Team",
- "options": "Team",
- "reqd": 1
- },
- {
- "fieldname": "site",
- "fieldtype": "Link",
- "in_list_view": 1,
- "label": "Site",
- "options": "Site"
- },
- {
- "fieldname": "frappe_lead",
- "fieldtype": "Data",
- "label": "Frappe Lead"
- }
- ],
- "index_web_pages_for_search": 1,
- "links": [],
- "modified": "2021-09-30 12:59:12.284003",
- "modified_by": "Administrator",
- "module": "Press",
- "name": "Partner Lead",
- "naming_rule": "Expression",
- "owner": "Administrator",
- "permissions": [
- {
- "create": 1,
- "delete": 1,
- "email": 1,
- "export": 1,
- "print": 1,
- "read": 1,
- "report": 1,
- "role": "System Manager",
- "share": 1,
- "write": 1
- }
- ],
- "sort_field": "modified",
- "sort_order": "DESC",
- "title_field": "team",
- "track_changes": 1
-}
\ No newline at end of file
diff --git a/press/press/doctype/partner_lead/partner_lead.py b/press/press/doctype/partner_lead/partner_lead.py
deleted file mode 100644
index adaa9f3386b..00000000000
--- a/press/press/doctype/partner_lead/partner_lead.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright (c) 2021, Frappe and contributors
-# For license information, please see license.txt
-
-# import frappe
-from frappe.model.document import Document
-
-
-class PartnerLead(Document):
- pass
diff --git a/press/press/doctype/partner_lead/test_partner_lead.py b/press/press/doctype/partner_lead/test_partner_lead.py
deleted file mode 100644
index 3af8d35fcd1..00000000000
--- a/press/press/doctype/partner_lead/test_partner_lead.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright (c) 2021, Frappe and Contributors
-# See license.txt
-
-# import frappe
-import unittest
-
-
-class TestPartnerLead(unittest.TestCase):
- pass
diff --git a/press/press/doctype/partner_payment_payout/__init__.py b/press/press/doctype/partner_payment_payout/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/partner_payment_payout/partner_payment_payout.js b/press/press/doctype/partner_payment_payout/partner_payment_payout.js
new file mode 100644
index 00000000000..982877809af
--- /dev/null
+++ b/press/press/doctype/partner_payment_payout/partner_payment_payout.js
@@ -0,0 +1,37 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Partner Payment Payout', {
+ refresh(frm) {
+ if (frm.doc.docstatus == 0) {
+ frm.add_custom_button('Fetch Payments', () => {
+ frappe.call({
+ method: 'press.api.regional_payments.mpesa.utils.fetch_payments',
+ args: {
+ // transaction_doctype: frm.doc.transaction_doctype,
+ from_date: frm.doc.from_date,
+ to_date: frm.doc.to_date,
+ partner: frm.doc.partner,
+ payment_gateway: frm.doc.payment_gateway,
+ },
+ callback: function (response) {
+ if (response.message) {
+ // Clear existing entries in transfer_items
+ frm.clear_table('transfer_items');
+
+ response.message.forEach((payment) => {
+ let row = frm.add_child('transfer_items');
+ row.transaction_id = payment.name;
+ row.posting_date = payment.posting_date;
+ row.amount = payment.amount;
+ });
+
+ frm.refresh_field('transfer_items');
+ // frappe.msgprint("Payments fetched and added to the transfer items table.");
+ }
+ },
+ });
+ });
+ }
+ },
+});
diff --git a/press/press/doctype/partner_payment_payout/partner_payment_payout.json b/press/press/doctype/partner_payment_payout/partner_payment_payout.json
new file mode 100644
index 00000000000..fcc8eee3ea5
--- /dev/null
+++ b/press/press/doctype/partner_payment_payout/partner_payment_payout.json
@@ -0,0 +1,156 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "autoname": "format:PPT-{MM}-{#####}",
+ "creation": "2025-01-20 14:16:57.352757",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "section_break_l0lu",
+ "amended_from",
+ "from_date",
+ "partner",
+ "payment_gateway",
+ "column_break_plbi",
+ "to_date",
+ "partner_commission",
+ "posting_date",
+ "section_break_tvae",
+ "transfer_items",
+ "section_break_qxag",
+ "total_amount",
+ "column_break_lgdh",
+ "commission",
+ "column_break_jfqp",
+ "net_amount"
+ ],
+ "fields": [
+ {
+ "fieldname": "section_break_l0lu",
+ "fieldtype": "Section Break",
+ "label": "Filters"
+ },
+ {
+ "fieldname": "amended_from",
+ "fieldtype": "Link",
+ "label": "Amended From",
+ "no_copy": 1,
+ "options": "Partner Payment Payout",
+ "print_hide": 1,
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "default": "Today",
+ "fieldname": "from_date",
+ "fieldtype": "Date",
+ "label": "From Date"
+ },
+ {
+ "fieldname": "partner",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Partner",
+ "options": "Team",
+ "reqd": 1
+ },
+ {
+ "fieldname": "payment_gateway",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Payment Gateway",
+ "options": "Payment Gateway",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_plbi",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "Today",
+ "fieldname": "to_date",
+ "fieldtype": "Date",
+ "label": "To Date"
+ },
+ {
+ "fetch_from": "partner.partner_commission",
+ "fieldname": "partner_commission",
+ "fieldtype": "Percent",
+ "label": "Partner Commission",
+ "read_only": 1
+ },
+ {
+ "fieldname": "section_break_tvae",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "transfer_items",
+ "fieldtype": "Table",
+ "label": "Partner Payment Transfer Item",
+ "options": "Partner Payment Payout Item",
+ "reqd": 1
+ },
+ {
+ "fieldname": "section_break_qxag",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "total_amount",
+ "fieldtype": "Currency",
+ "label": "Total Amount",
+ "non_negative": 1
+ },
+ {
+ "fieldname": "column_break_lgdh",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "commission",
+ "fieldtype": "Currency",
+ "label": "Commission"
+ },
+ {
+ "fieldname": "column_break_jfqp",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "net_amount",
+ "fieldtype": "Currency",
+ "label": "Net Amount"
+ },
+ {
+ "default": "Today",
+ "fieldname": "posting_date",
+ "fieldtype": "Date",
+ "label": "Posting Date",
+ "read_only": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "is_submittable": 1,
+ "links": [],
+ "modified": "2025-03-26 10:13:59.824685",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Partner Payment Payout",
+ "naming_rule": "Expression",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "submit": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/partner_payment_payout/partner_payment_payout.py b/press/press/doctype/partner_payment_payout/partner_payment_payout.py
new file mode 100644
index 00000000000..ab635a6a3b3
--- /dev/null
+++ b/press/press/doctype/partner_payment_payout/partner_payment_payout.py
@@ -0,0 +1,124 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+import frappe
+from frappe.model.document import Document
+
+
+class PartnerPaymentPayout(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.partner_payment_payout_item.partner_payment_payout_item import (
+ PartnerPaymentPayoutItem,
+ )
+
+ amended_from: DF.Link | None
+ commission: DF.Currency
+ from_date: DF.Date | None
+ net_amount: DF.Currency
+ partner: DF.Link
+ partner_commission: DF.Percent
+ payment_gateway: DF.Link
+ posting_date: DF.Date | None
+ to_date: DF.Date | None
+ total_amount: DF.Currency
+ transfer_items: DF.Table[PartnerPaymentPayoutItem]
+ # end: auto-generated types
+ dashboard_fields = (
+ "posting_date",
+ "total_amount",
+ "commission",
+ "net_amount",
+ "payment_gateway",
+ )
+
+ def before_save(self):
+ self.total_amount = sum([item.amount for item in self.transfer_items])
+ self.commission = self.total_amount * (self.partner_commission / 100)
+ self.net_amount = self.total_amount - self.commission
+ for item in self.transfer_items:
+ item.commission_amount = item.amount * (self.partner_commission / 100)
+ item.net_amount = item.amount - item.commission_amount
+
+ def on_submit(self):
+ transaction_names = [item.transaction_id for item in self.transfer_items]
+
+ if transaction_names:
+ frappe.db.set_value(
+ "Payment Partner Transaction",
+ {"name": ["in", transaction_names], "submitted_to_frappe": 0},
+ "submitted_to_frappe",
+ 1,
+ )
+ frappe.db.commit()
+
+ def on_cancel(self):
+ transaction_names = [item.transaction_id for item in self.transfer_items]
+
+ # Update Payment Partner Records
+ if transaction_names:
+ frappe.db.set_value(
+ "Payment Partner Transaction",
+ {"name": ["in", transaction_names], "submitted_to_frappe": 1},
+ "submitted_to_frappe",
+ 0,
+ )
+ frappe.db.commit()
+
+
+@frappe.whitelist()
+def submit_payment_payout(partner, payment_gateway, from_date, to_date, partner_commission, transactions):
+ partner = (
+ partner if frappe.db.exists("Team", partner) else frappe.get_value("Team", {"user": partner}, "name")
+ )
+
+ try:
+ payout = frappe.new_doc("Partner Payment Payout")
+ payout.partner = partner
+ payout.payment_gateway = payment_gateway
+ payout.from_date = from_date
+ payout.to_date = to_date
+ payout.partner_commission = partner_commission
+
+ for transaction in transactions:
+ payout.append(
+ "transfer_items",
+ {
+ "transaction_id": transaction.get("name"),
+ "amount": transaction.get("amount"),
+ "posting_date": transaction.get("posting_date"),
+ },
+ )
+
+ payout.total_amount = sum(t.get("amount", 0) for t in transactions)
+ payout.commission = payout.total_amount * (payout.partner_commission / 100)
+ payout.net_amount = payout.total_amount - payout.commission
+
+ payout.insert()
+ payout.submit()
+
+ transaction_names = [t.get("name") for t in transactions]
+ if transaction_names:
+ frappe.db.set_value(
+ "Payment Partner Transaction",
+ {"name": ["in", transaction_names], "submitted_to_frappe": 0},
+ "submitted_to_frappe",
+ 1,
+ )
+
+ return {
+ "name": payout.name,
+ "total_amount": payout.total_amount,
+ "commission": payout.commission,
+ "net_amount": payout.net_amount,
+ }
+
+ except Exception as e:
+ frappe.log_error(f"Failed to create payout: {e!s}")
diff --git a/press/press/doctype/partner_payment_payout/test_partner_payment_payout.py b/press/press/doctype/partner_payment_payout/test_partner_payment_payout.py
new file mode 100644
index 00000000000..9c921a08c68
--- /dev/null
+++ b/press/press/doctype/partner_payment_payout/test_partner_payment_payout.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class IntegrationTestPartnerPaymentPayout(FrappeTestCase):
+ """
+ Integration tests for PartnerPaymentPayout.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/partner_payment_payout_item/__init__.py b/press/press/doctype/partner_payment_payout_item/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/partner_payment_payout_item/partner_payment_payout_item.json b/press/press/doctype/partner_payment_payout_item/partner_payment_payout_item.json
new file mode 100644
index 00000000000..6a19ab86f52
--- /dev/null
+++ b/press/press/doctype/partner_payment_payout_item/partner_payment_payout_item.json
@@ -0,0 +1,90 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-01-20 14:16:33.396639",
+ "default_view": "List",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "transaction_id",
+ "posting_date",
+ "amount",
+ "column_break_ayfd",
+ "commission_amount",
+ "amount_in_local_currency",
+ "net_amount",
+ "exchange_rate"
+ ],
+ "fields": [
+ {
+ "fieldname": "transaction_id",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "in_preview": 1,
+ "in_standard_filter": 1,
+ "label": "Transaction Id",
+ "options": "Payment Partner Transaction",
+ "reqd": 1
+ },
+ {
+ "fieldname": "posting_date",
+ "fieldtype": "Date",
+ "in_list_view": 1,
+ "in_preview": 1,
+ "in_standard_filter": 1,
+ "label": "Posting Date"
+ },
+ {
+ "fieldname": "amount",
+ "fieldtype": "Currency",
+ "in_list_view": 1,
+ "in_preview": 1,
+ "in_standard_filter": 1,
+ "label": "Amount(USD)"
+ },
+ {
+ "fieldname": "column_break_ayfd",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "commission_amount",
+ "fieldtype": "Currency",
+ "hidden": 1,
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Commission Amount"
+ },
+ {
+ "fieldname": "amount_in_local_currency",
+ "fieldtype": "Currency",
+ "hidden": 1,
+ "label": "Amount(LC)"
+ },
+ {
+ "fieldname": "net_amount",
+ "fieldtype": "Currency",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Net Amount"
+ },
+ {
+ "fetch_from": "transaction_id.exchange_rate",
+ "fieldname": "exchange_rate",
+ "fieldtype": "Float",
+ "label": "Exchange Rate"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-03-26 09:07:18.191183",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Partner Payment Payout Item",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/partner_payment_payout_item/partner_payment_payout_item.py b/press/press/doctype/partner_payment_payout_item/partner_payment_payout_item.py
new file mode 100644
index 00000000000..018167ebc8b
--- /dev/null
+++ b/press/press/doctype/partner_payment_payout_item/partner_payment_payout_item.py
@@ -0,0 +1,30 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+# import frappe
+from frappe.model.document import Document
+
+
+class PartnerPaymentPayoutItem(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ amount: DF.Currency
+ amount_in_local_currency: DF.Currency
+ commission_amount: DF.Currency
+ exchange_rate: DF.Float
+ net_amount: DF.Currency
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ posting_date: DF.Date | None
+ transaction_id: DF.Link
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/payment_dispute/__init__.py b/press/press/doctype/payment_dispute/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/payment_dispute/payment_dispute.js b/press/press/doctype/payment_dispute/payment_dispute.js
new file mode 100644
index 00000000000..93b40da975d
--- /dev/null
+++ b/press/press/doctype/payment_dispute/payment_dispute.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2023, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Payment Dispute", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/payment_dispute/payment_dispute.json b/press/press/doctype/payment_dispute/payment_dispute.json
new file mode 100644
index 00000000000..9ad4f9a7dfa
--- /dev/null
+++ b/press/press/doctype/payment_dispute/payment_dispute.json
@@ -0,0 +1,86 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2023-11-30 14:26:12.167431",
+ "default_view": "List",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "dispute_id",
+ "event_type",
+ "payment_intent",
+ "column_break_crci",
+ "email",
+ "reason",
+ "status"
+ ],
+ "fields": [
+ {
+ "fieldname": "payment_intent",
+ "fieldtype": "Data",
+ "label": "Payment Intent",
+ "read_only": 1
+ },
+ {
+ "fieldname": "email",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "in_preview": 1,
+ "label": "Email",
+ "read_only": 1
+ },
+ {
+ "fieldname": "dispute_id",
+ "fieldtype": "Data",
+ "label": "Dispute Id",
+ "read_only": 1
+ },
+ {
+ "fieldname": "event_type",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "in_preview": 1,
+ "label": "Event Type",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_crci",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "reason",
+ "fieldtype": "Data",
+ "label": "Reason"
+ },
+ {
+ "fieldname": "status",
+ "fieldtype": "Data",
+ "label": "Status"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2024-09-23 12:27:14.065913",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Payment Dispute",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/payment_dispute/payment_dispute.py b/press/press/doctype/payment_dispute/payment_dispute.py
new file mode 100644
index 00000000000..2d096bedb66
--- /dev/null
+++ b/press/press/doctype/payment_dispute/payment_dispute.py
@@ -0,0 +1,40 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+# import frappe
+from frappe.model.document import Document
+
+from press.press.doctype.telegram_message.telegram_message import TelegramMessage
+
+
+class PaymentDispute(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ dispute_id: DF.Data | None
+ email: DF.Data | None
+ event_type: DF.Data | None
+ payment_intent: DF.Data | None
+ reason: DF.Data | None
+ status: DF.Data | None
+ # end: auto-generated types
+
+ def after_insert(self):
+ message = f"""
+ Dispute {self.event_type}!
+
+ Email: {self.email}
+ Dispute ID: `{self.dispute_id}`
+ Event: `{self.event_type}`
+ Reason: `{self.reason}`
+ Status: `{self.status}`
+ [Payment reference on Stripe Dashboard](https://dashboard.stripe.com/payments/{self.payment_intent})
+ """
+ TelegramMessage.enqueue(message=message, topic="Disputes", group="Billing")
diff --git a/press/press/doctype/payment_dispute/test_payment_dispute.py b/press/press/doctype/payment_dispute/test_payment_dispute.py
new file mode 100644
index 00000000000..adb7b02e856
--- /dev/null
+++ b/press/press/doctype/payment_dispute/test_payment_dispute.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2023, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestPaymentDispute(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/payment_due_extension/__init__.py b/press/press/doctype/payment_due_extension/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/payment_due_extension/payment_due_extension.js b/press/press/doctype/payment_due_extension/payment_due_extension.js
new file mode 100644
index 00000000000..44f415bd56e
--- /dev/null
+++ b/press/press/doctype/payment_due_extension/payment_due_extension.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Payment Due Extension", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/payment_due_extension/payment_due_extension.json b/press/press/doctype/payment_due_extension/payment_due_extension.json
new file mode 100644
index 00000000000..1d2ba7f0135
--- /dev/null
+++ b/press/press/doctype/payment_due_extension/payment_due_extension.json
@@ -0,0 +1,73 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-04-14 11:50:10.971387",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "team",
+ "extension_date",
+ "reason",
+ "amended_from"
+ ],
+ "fields": [
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "in_filter": 1,
+ "in_list_view": 1,
+ "label": "Team",
+ "options": "Team",
+ "reqd": 1
+ },
+ {
+ "fieldname": "extension_date",
+ "fieldtype": "Date",
+ "in_list_view": 1,
+ "label": "Extension Date",
+ "reqd": 1
+ },
+ {
+ "fieldname": "reason",
+ "fieldtype": "Small Text",
+ "label": "Reason",
+ "reqd": 1
+ },
+ {
+ "fieldname": "amended_from",
+ "fieldtype": "Link",
+ "label": "Amended From",
+ "no_copy": 1,
+ "options": "Payment Due Extension",
+ "print_hide": 1,
+ "read_only": 1,
+ "search_index": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "is_submittable": 1,
+ "links": [],
+ "modified": "2025-04-14 11:55:56.662380",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Payment Due Extension",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": [],
+ "title_field": "team"
+}
\ No newline at end of file
diff --git a/press/press/doctype/payment_due_extension/payment_due_extension.py b/press/press/doctype/payment_due_extension/payment_due_extension.py
new file mode 100644
index 00000000000..88aa62e42be
--- /dev/null
+++ b/press/press/doctype/payment_due_extension/payment_due_extension.py
@@ -0,0 +1,50 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+import frappe
+from frappe.model.document import Document
+
+
+class PaymentDueExtension(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ amended_from: DF.Link | None
+ extension_date: DF.Date
+ reason: DF.SmallText
+ team: DF.Link
+ # end: auto-generated types
+
+ def validate(self):
+ if self.extension_date < frappe.utils.today():
+ frappe.throw("Extension date cannot be in the past")
+
+ def before_insert(self):
+ if frappe.db.exists(
+ "Payment Due Extension",
+ {"team": self.team, "docstatus": 1, "extension_date": (">=", frappe.utils.today())},
+ ):
+ frappe.throw("An active Payment due extension record already exists for this team")
+
+ def on_submit(self):
+ frappe.db.set_value("Team", self.team, "extend_payment_due_suspension", 1)
+
+ def on_cancel(self):
+ frappe.db.set_value("Team", self.team, "extend_payment_due_suspension", 0)
+
+
+def remove_payment_due_extension():
+ extensions = frappe.get_all(
+ "Payment Due Extension",
+ {"docstatus": 1, "extension_date": ("<", frappe.utils.today())},
+ pluck="team",
+ )
+ for team in extensions:
+ frappe.db.set_value("Team", team, "extend_payment_due_suspension", 0)
+ frappe.db.commit()
diff --git a/press/press/doctype/payment_due_extension/test_payment_due_extension.py b/press/press/doctype/payment_due_extension/test_payment_due_extension.py
new file mode 100644
index 00000000000..2966b771f0b
--- /dev/null
+++ b/press/press/doctype/payment_due_extension/test_payment_due_extension.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class IntegrationTestPaymentDueExtension(FrappeTestCase):
+ """
+ Integration tests for PaymentDueExtension.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/payment_gateway/__init__.py b/press/press/doctype/payment_gateway/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/payment_gateway/payment_gateway.js b/press/press/doctype/payment_gateway/payment_gateway.js
new file mode 100644
index 00000000000..2902656cac2
--- /dev/null
+++ b/press/press/doctype/payment_gateway/payment_gateway.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Payment Gateway", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/payment_gateway/payment_gateway.json b/press/press/doctype/payment_gateway/payment_gateway.json
new file mode 100644
index 00000000000..548d5ad1206
--- /dev/null
+++ b/press/press/doctype/payment_gateway/payment_gateway.json
@@ -0,0 +1,178 @@
+{
+ "actions": [],
+ "autoname": "field:gateway",
+ "creation": "2025-01-18 10:45:52.966302",
+ "default_view": "List",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "team",
+ "column_break_kvnc",
+ "team_name",
+ "column_break_pwgv",
+ "currency",
+ "gateway",
+ "ui_configuration_section",
+ "integration_logo",
+ "column_break_jcag",
+ "gateway_settings",
+ "column_break_noki",
+ "gateway_controller",
+ "partner_integration_section",
+ "url",
+ "print_format",
+ "column_break_oefu",
+ "api_key",
+ "column_break_slwm",
+ "api_secret",
+ "taxes_section",
+ "taxes_and_charges"
+ ],
+ "fields": [
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Team",
+ "options": "Team"
+ },
+ {
+ "fieldname": "column_break_kvnc",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fetch_from": "team.team_title",
+ "fieldname": "team_name",
+ "fieldtype": "Data",
+ "label": "Team Name",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_pwgv",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "currency",
+ "fieldtype": "Link",
+ "label": "Currency",
+ "options": "Currency"
+ },
+ {
+ "fieldname": "gateway",
+ "fieldtype": "Data",
+ "label": "Gateway",
+ "unique": 1
+ },
+ {
+ "fieldname": "ui_configuration_section",
+ "fieldtype": "Section Break",
+ "label": "UI Configuration"
+ },
+ {
+ "fieldname": "integration_logo",
+ "fieldtype": "Attach Image",
+ "label": "Integration Logo"
+ },
+ {
+ "fieldname": "column_break_jcag",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "gateway_settings",
+ "fieldtype": "Link",
+ "label": "Gateway Settings",
+ "options": "DocType"
+ },
+ {
+ "fieldname": "column_break_noki",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "gateway_controller",
+ "fieldtype": "Dynamic Link",
+ "in_list_view": 1,
+ "label": "Gateway Controller",
+ "options": "gateway_settings"
+ },
+ {
+ "fieldname": "partner_integration_section",
+ "fieldtype": "Section Break",
+ "label": "Partner Integration"
+ },
+ {
+ "fieldname": "url",
+ "fieldtype": "Data",
+ "label": "URL"
+ },
+ {
+ "fieldname": "column_break_oefu",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "api_key",
+ "fieldtype": "Data",
+ "label": "API Key"
+ },
+ {
+ "fieldname": "column_break_slwm",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "api_secret",
+ "fieldtype": "Password",
+ "label": "API Secret"
+ },
+ {
+ "fieldname": "taxes_section",
+ "fieldtype": "Section Break",
+ "label": "Taxes"
+ },
+ {
+ "fieldname": "taxes_and_charges",
+ "fieldtype": "Percent",
+ "label": "Taxes and Charges"
+ },
+ {
+ "fieldname": "print_format",
+ "fieldtype": "Data",
+ "label": "Print Format"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-02-02 19:51:36.821998",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Payment Gateway",
+ "naming_rule": "By fieldname",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/payment_gateway/payment_gateway.py b/press/press/doctype/payment_gateway/payment_gateway.py
new file mode 100644
index 00000000000..c3aa2a06b71
--- /dev/null
+++ b/press/press/doctype/payment_gateway/payment_gateway.py
@@ -0,0 +1,32 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+# import frappe
+from frappe.model.document import Document
+
+
+class PaymentGateway(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ api_key: DF.Data | None
+ api_secret: DF.Password | None
+ currency: DF.Link | None
+ gateway: DF.Data | None
+ gateway_controller: DF.DynamicLink | None
+ gateway_settings: DF.Link | None
+ integration_logo: DF.AttachImage | None
+ print_format: DF.Data | None
+ taxes_and_charges: DF.Percent
+ team: DF.Link | None
+ team_name: DF.Data | None
+ url: DF.Data | None
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/payment_gateway/test_payment_gateway.py b/press/press/doctype/payment_gateway/test_payment_gateway.py
new file mode 100644
index 00000000000..eb80e51c45a
--- /dev/null
+++ b/press/press/doctype/payment_gateway/test_payment_gateway.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class IntegrationTestPaymentGateway(FrappeTestCase):
+ """
+ Integration tests for PaymentGateway.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/payment_partner_transaction/__init__.py b/press/press/doctype/payment_partner_transaction/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/payment_partner_transaction/payment_partner_transaction.js b/press/press/doctype/payment_partner_transaction/payment_partner_transaction.js
new file mode 100644
index 00000000000..36c70648b07
--- /dev/null
+++ b/press/press/doctype/payment_partner_transaction/payment_partner_transaction.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Payment Partner Transaction", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/payment_partner_transaction/payment_partner_transaction.json b/press/press/doctype/payment_partner_transaction/payment_partner_transaction.json
new file mode 100644
index 00000000000..3ab6153ab8c
--- /dev/null
+++ b/press/press/doctype/payment_partner_transaction/payment_partner_transaction.json
@@ -0,0 +1,175 @@
+{
+ "actions": [],
+ "autoname": "PPT.-.YY.-.MM.-.####",
+ "creation": "2025-01-18 10:53:07.242575",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "partner_details_section",
+ "payment_partner",
+ "posting_date",
+ "column_break_manc",
+ "payment_gateway",
+ "column_break_ejza",
+ "team",
+ "transaction_details_section",
+ "amount",
+ "actual_amount",
+ "column_break_xqsh",
+ "currency",
+ "actual_currency",
+ "column_break_jyxx",
+ "exchange_rate",
+ "submitted_to_frappe",
+ "section_break_yhqq",
+ "payment_transaction_details",
+ "section_break_7oh3",
+ "amended_from"
+ ],
+ "fields": [
+ {
+ "fieldname": "partner_details_section",
+ "fieldtype": "Section Break",
+ "label": "Team Details"
+ },
+ {
+ "fieldname": "payment_partner",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Payment Partner",
+ "options": "Team"
+ },
+ {
+ "default": "Today",
+ "fieldname": "posting_date",
+ "fieldtype": "Date",
+ "label": "Posting Date"
+ },
+ {
+ "fieldname": "column_break_manc",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "payment_gateway",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Payment Gateway",
+ "options": "Payment Gateway"
+ },
+ {
+ "fieldname": "column_break_ejza",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Team",
+ "options": "Team"
+ },
+ {
+ "fieldname": "transaction_details_section",
+ "fieldtype": "Section Break",
+ "label": "Transaction Details"
+ },
+ {
+ "fieldname": "amount",
+ "fieldtype": "Currency",
+ "label": "Amount",
+ "options": "currency"
+ },
+ {
+ "fieldname": "actual_amount",
+ "fieldtype": "Currency",
+ "label": "Actual Amount",
+ "options": "actual_currency"
+ },
+ {
+ "fieldname": "column_break_xqsh",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "USD",
+ "fieldname": "currency",
+ "fieldtype": "Link",
+ "label": "Currency",
+ "options": "Currency"
+ },
+ {
+ "fetch_from": "payment_gateway.currency",
+ "fieldname": "actual_currency",
+ "fieldtype": "Link",
+ "label": "Actual Currency",
+ "options": "Currency"
+ },
+ {
+ "fieldname": "column_break_jyxx",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "exchange_rate",
+ "fieldtype": "Float",
+ "label": "Exchange Rate"
+ },
+ {
+ "default": "0",
+ "fieldname": "submitted_to_frappe",
+ "fieldtype": "Check",
+ "label": "Submitted To Frappe"
+ },
+ {
+ "fieldname": "section_break_yhqq",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "payment_transaction_details",
+ "fieldtype": "Code",
+ "label": "Payment Transaction Details",
+ "options": "JSON",
+ "read_only": 1
+ },
+ {
+ "fieldname": "section_break_7oh3",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "amended_from",
+ "fieldtype": "Link",
+ "label": "Amended From",
+ "no_copy": 1,
+ "options": "Payment Partner Transaction",
+ "print_hide": 1,
+ "read_only": 1,
+ "search_index": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "is_submittable": 1,
+ "links": [],
+ "modified": "2025-02-02 17:54:48.975501",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Payment Partner Transaction",
+ "naming_rule": "Expression (old style)",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/payment_partner_transaction/payment_partner_transaction.py b/press/press/doctype/payment_partner_transaction/payment_partner_transaction.py
new file mode 100644
index 00000000000..280d051595d
--- /dev/null
+++ b/press/press/doctype/payment_partner_transaction/payment_partner_transaction.py
@@ -0,0 +1,32 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+# import frappe
+from frappe.model.document import Document
+
+
+class PaymentPartnerTransaction(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ actual_amount: DF.Currency
+ actual_currency: DF.Link | None
+ amended_from: DF.Link | None
+ amount: DF.Currency
+ currency: DF.Link | None
+ exchange_rate: DF.Float
+ payment_gateway: DF.Link | None
+ payment_partner: DF.Link | None
+ payment_transaction_details: DF.Code | None
+ posting_date: DF.Date | None
+ submitted_to_frappe: DF.Check
+ team: DF.Link | None
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/payment_partner_transaction/test_payment_partner_transaction.py b/press/press/doctype/payment_partner_transaction/test_payment_partner_transaction.py
new file mode 100644
index 00000000000..61452e02ef1
--- /dev/null
+++ b/press/press/doctype/payment_partner_transaction/test_payment_partner_transaction.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class IntegrationTestPaymentPartnerTransaction(FrappeTestCase):
+ """
+ Integration tests for PaymentPartnerTransaction.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/payout_order/patches/change_fields_from_recipient_to_team.py b/press/press/doctype/payout_order/patches/change_fields_from_recipient_to_team.py
new file mode 100644
index 00000000000..3485a8d18a6
--- /dev/null
+++ b/press/press/doctype/payout_order/patches/change_fields_from_recipient_to_team.py
@@ -0,0 +1,8 @@
+# Copyright (c) 2024, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+
+import frappe
+
+
+def execute():
+ frappe.db.sql("UPDATE `tabPayout Order` SET team = recipient")
diff --git a/press/press/doctype/payout_order/patches/compute_total_amount.py b/press/press/doctype/payout_order/patches/compute_total_amount.py
new file mode 100644
index 00000000000..e6ac60f30ae
--- /dev/null
+++ b/press/press/doctype/payout_order/patches/compute_total_amount.py
@@ -0,0 +1,28 @@
+# Copyright (c) 2024, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+
+import frappe
+from tqdm import tqdm
+
+
+def execute():
+ exchange_rate = frappe.db.get_single_value("Press Settings", "usd_rate")
+ payout_orders = frappe.get_all(
+ "Payout Order",
+ {"docstatus": 0},
+ ["name", "net_total_inr", "net_total_usd", "recipient_currency"],
+ )
+
+ for payout_order in tqdm(payout_orders):
+ total_amount = 0
+ if payout_order.recipient_currency == "USD":
+ inr_in_usd = 0
+ if payout_order.net_total_inr > 0:
+ inr_in_usd = payout_order.net_total_inr / exchange_rate
+ total_amount = payout_order.net_total_usd + inr_in_usd
+ elif payout_order.recipient_currency == "INR":
+ total_amount = (
+ payout_order.net_total_inr + payout_order.net_total_usd * exchange_rate
+ )
+
+ frappe.db.set_value("Payout Order", payout_order.name, "total_amount", total_amount)
diff --git a/press/press/doctype/payout_order/payout_order.json b/press/press/doctype/payout_order/payout_order.json
index 4bfc6989265..31f2985261c 100644
--- a/press/press/doctype/payout_order/payout_order.json
+++ b/press/press/doctype/payout_order/payout_order.json
@@ -7,7 +7,7 @@
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
- "recipient",
+ "team",
"recipient_currency",
"period_start",
"due_date",
@@ -25,20 +25,15 @@
"items",
"section_break_14",
"net_total_inr",
+ "currency_inr",
+ "total_amount",
"column_break_15",
"net_total_usd",
+ "currency_usd",
"section_break_21",
"ignore_commission"
],
"fields": [
- {
- "fieldname": "recipient",
- "fieldtype": "Link",
- "in_list_view": 1,
- "label": "Recipient",
- "options": "Team",
- "reqd": 1
- },
{
"fieldname": "due_date",
"fieldtype": "Date",
@@ -109,12 +104,14 @@
"fieldname": "net_total_inr",
"fieldtype": "Currency",
"label": "Net Total INR",
+ "options": "currency_inr",
"read_only": 1
},
{
"fieldname": "net_total_usd",
"fieldtype": "Currency",
"label": "Net Total USD",
+ "options": "currency_usd",
"read_only": 1
},
{
@@ -136,10 +133,10 @@
"label": "Period End"
},
{
- "fetch_from": "recipient.currency",
+ "fetch_from": "team.currency",
"fieldname": "recipient_currency",
"fieldtype": "Data",
- "label": "Recipient Currency"
+ "label": "Currency"
},
{
"fieldname": "section_break_21",
@@ -151,12 +148,41 @@
"fieldtype": "Check",
"label": "Ignore Commission",
"options": "0"
+ },
+ {
+ "default": "INR",
+ "fieldname": "currency_inr",
+ "fieldtype": "Data",
+ "hidden": 1,
+ "label": "Currency INR"
+ },
+ {
+ "default": "USD",
+ "fieldname": "currency_usd",
+ "fieldtype": "Data",
+ "hidden": 1,
+ "label": "Currency USD"
+ },
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Team",
+ "options": "Team",
+ "reqd": 1
+ },
+ {
+ "fieldname": "total_amount",
+ "fieldtype": "Currency",
+ "label": "Total Amount",
+ "options": "recipient_currency",
+ "read_only": 1
}
],
"index_web_pages_for_search": 1,
"is_submittable": 1,
"links": [],
- "modified": "2023-04-30 12:41:38.091177",
+ "modified": "2025-03-20 10:23:23.836067",
"modified_by": "Administrator",
"module": "Press",
"name": "Payout Order",
diff --git a/press/press/doctype/payout_order/payout_order.py b/press/press/doctype/payout_order/payout_order.py
index df5d706c026..65d0f766f2f 100644
--- a/press/press/doctype/payout_order/payout_order.py
+++ b/press/press/doctype/payout_order/payout_order.py
@@ -1,22 +1,69 @@
# Copyright (c) 2022, Frappe and contributors
# For license information, please see license.txt
-import frappe
-
-from typing import List
+from datetime import date
from itertools import groupby
-from press.utils import log_error
+from typing import List
+
+import frappe
from frappe.model.document import Document
+
from press.press.doctype.invoice_item.invoice_item import InvoiceItem
from press.press.doctype.payout_order_item.payout_order_item import PayoutOrderItem
-
-from datetime import date
+from press.utils import log_error
class PayoutOrder(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+ from press.press.doctype.payout_order_item.payout_order_item import PayoutOrderItem
+
+ amended_from: DF.Link | None
+ currency_inr: DF.Data | None
+ currency_usd: DF.Data | None
+ due_date: DF.Date | None
+ frappe_purchase_order: DF.Data | None
+ ignore_commission: DF.Check
+ items: DF.Table[PayoutOrderItem]
+ mode_of_payment: DF.Literal["Cash", "Credits", "Internal"]
+ net_total_inr: DF.Currency
+ net_total_usd: DF.Currency
+ notes: DF.SmallText | None
+ period_end: DF.Date | None
+ period_start: DF.Date | None
+ recipient_currency: DF.Data | None
+ status: DF.Literal["Draft", "Paid", "Commissioned"]
+ team: DF.Link
+ total_amount: DF.Currency
+ type: DF.Literal["Marketplace", "SaaS"]
+ # end: auto-generated types
+
+ dashboard_fields = [
+ "period_end",
+ "team",
+ "mode_of_payment",
+ "net_total_inr",
+ "net_total_usd",
+ "status",
+ "total_amount",
+ "items",
+ ]
+
+ @staticmethod
+ def get_list_query(query):
+ PayoutOrder = frappe.qb.DocType("Payout Order")
+ query = query.where((PayoutOrder.docstatus != 2))
+ return query
+
def validate(self):
self.validate_items()
self.validate_net_totals()
+ self.compute_total_amount()
def validate_items(self):
for row in self.items:
@@ -39,6 +86,10 @@ def validate_items(self):
invoice_item = get_invoice_item_for_po_item(invoice_name, row)
+ # check to avoid app revenue ledger item's calculation
+ if not invoice_item:
+ return
+
row.tax = row.tax or 0.0
row.total_amount = invoice_item.amount
row.site = invoice_item.site
@@ -53,7 +104,7 @@ def validate_items(self):
{
"doctype": "Marketplace App Payment",
"app": row.document_name,
- "team": self.recipient,
+ "team": self.team,
}
).insert(ignore_permissions=True)
)
@@ -83,6 +134,18 @@ def validate_net_totals(self):
if self.net_total_usd <= 0 and self.net_total_inr <= 0:
self.status = "Commissioned"
+ elif (self.net_total_usd > 0 or self.net_total_inr > 0) and not self.frappe_purchase_order:
+ self.status = "Draft"
+
+ def compute_total_amount(self):
+ exchange_rate = frappe.db.get_single_value("Press Settings", "usd_rate")
+ if self.recipient_currency == "USD":
+ inr_in_usd = 0
+ if self.net_total_inr > 0:
+ inr_in_usd = self.net_total_inr / exchange_rate
+ self.total_amount = self.net_total_usd + inr_in_usd
+ elif self.recipient_currency == "INR":
+ self.total_amount = self.net_total_inr + (self.net_total_usd * exchange_rate)
def before_submit(self):
if self.mode_of_payment == "Cash" and (not self.frappe_purchase_order):
@@ -94,17 +157,31 @@ def before_submit(self):
def get_invoice_item_for_po_item(
invoice_name: str, payout_order_item: PayoutOrderItem
-) -> InvoiceItem:
- return frappe.get_doc(
- "Invoice Item",
- {
- "parent": invoice_name,
- "document_name": payout_order_item.document_name,
- "document_type": payout_order_item.document_type,
- "plan": payout_order_item.plan,
- "rate": payout_order_item.rate,
- },
- )
+) -> InvoiceItem | None:
+ try:
+ if payout_order_item.invoice_item:
+ item = frappe.get_doc("Invoice Item", payout_order_item.invoice_item)
+ if (
+ item.parent == invoice_name
+ and item.document_name == payout_order_item.document_name
+ and item.document_type == payout_order_item.document_type
+ and item.plan == payout_order_item.plan
+ and item.rate == payout_order_item.rate
+ ):
+ return item
+
+ return frappe.get_doc(
+ "Invoice Item",
+ {
+ "parent": invoice_name,
+ "document_name": payout_order_item.document_name,
+ "document_type": payout_order_item.document_type,
+ "plan": payout_order_item.plan,
+ "rate": payout_order_item.rate,
+ },
+ )
+ except frappe.DoesNotExistError:
+ return None
def create_marketplace_payout_orders_monthly(period_start=None, period_end=None):
@@ -118,21 +195,18 @@ def create_marketplace_payout_orders_monthly(period_start=None, period_end=None)
# Group by teams
for app_team, items in groupby(items, key=lambda x: x["app_team"]):
try:
-
item_names = [i.name for i in items]
po_exists = frappe.db.exists(
- "Payout Order", {"recipient": app_team, "period_end": period_end}
+ "Payout Order", {"team": app_team, "period_end": period_end}
)
if not po_exists:
create_payout_order_from_invoice_item_names(
- item_names, recipient=app_team, period_start=period_start, period_end=period_end
+ item_names, team=app_team, period_start=period_start, period_end=period_end
)
else:
- po = frappe.get_doc(
- "Payout Order", {"recipient": app_team, "period_end": period_end}
- )
+ po = frappe.get_doc("Payout Order", {"team": app_team, "period_end": period_end})
add_invoice_items_to_po(po, item_names)
frappe.db.set_value(
@@ -195,6 +269,7 @@ def get_unaccounted_marketplace_invoice_items():
.select(
invoice_item.name, invoice_item.document_name, marketplace_app.team.as_("app_team")
)
+ .distinct()
.run(as_dict=True)
)
@@ -204,7 +279,7 @@ def get_unaccounted_marketplace_invoice_items():
@frappe.whitelist()
def create_payout_order_from_invoice_items(
invoice_items: List[InvoiceItem],
- recipient: str,
+ team: str,
period_start: date,
period_end: date,
mode_of_payment: str = "Cash",
@@ -215,7 +290,7 @@ def create_payout_order_from_invoice_items(
po = frappe.get_doc(
{
"doctype": "Payout Order",
- "recipient": recipient,
+ "team": team,
"mode_of_payment": mode_of_payment,
"notes": notes,
"type": type,
diff --git a/press/press/doctype/payout_order/test_payout_order.py b/press/press/doctype/payout_order/test_payout_order.py
index 30a616cda2a..d664fb951d7 100644
--- a/press/press/doctype/payout_order/test_payout_order.py
+++ b/press/press/doctype/payout_order/test_payout_order.py
@@ -1,9 +1,11 @@
# Copyright (c) 2022, Frappe and Contributors
# See license.txt
-import frappe
+from unittest.mock import Mock, patch
+import frappe
from frappe.tests.utils import FrappeTestCase
+
from press.press.doctype.app.test_app import create_test_app
from press.press.doctype.invoice.invoice import Invoice
from press.press.doctype.marketplace_app.test_marketplace_app import (
@@ -15,8 +17,6 @@
)
from press.press.doctype.team.test_team import create_test_team
-from unittest.mock import patch, Mock
-
@patch.object(Invoice, "create_invoice_on_frappeio", new=Mock())
class TestPayoutOrder(FrappeTestCase):
@@ -140,15 +140,15 @@ def test_create_marketplace_monthly_payout_order(self):
self.create_test_usd_invoice()
# No payout order before running the job
- self.assertFalse(frappe.db.exists("Payout Order", {"recipient": self.test_team.name}))
+ self.assertFalse(frappe.db.exists("Payout Order", {"team": self.test_team.name}))
# Run the monthly job
create_marketplace_payout_orders_monthly()
# The Payout Order should have been created
- self.assertTrue(frappe.db.exists("Payout Order", {"recipient": self.test_team.name}))
+ self.assertTrue(frappe.db.exists("Payout Order", {"team": self.test_team.name}))
- po = frappe.get_doc("Payout Order", {"recipient": self.test_team.name})
+ po = frappe.get_doc("Payout Order", {"team": self.test_team.name})
self.assertEqual(len(po.items), 1)
# The invoice item must be marked as paid out
@@ -160,7 +160,7 @@ def test_create_marketplace_monthly_payout_order(self):
# Re-run should not create a new PO
# Since all items are already accounted for
create_marketplace_payout_orders_monthly()
- po_count = frappe.db.count("Payout Order", {"recipient": self.test_team.name})
+ po_count = frappe.db.count("Payout Order", {"team": self.test_team.name})
self.assertEqual(po_count, 1)
def test_does_not_create_duplicate_monthly_payout_order(self):
@@ -172,9 +172,7 @@ def test_does_not_create_duplicate_monthly_payout_order(self):
period_end = frappe.utils.data.get_last_day(today)
# No POs initially
- num_payout_orders = frappe.db.count(
- "Payout Order", {"recipient": self.test_team.name}
- )
+ num_payout_orders = frappe.db.count("Payout Order", {"team": self.test_team.name})
self.assertEqual(num_payout_orders, 0)
po = create_payout_order_from_invoice_items(
@@ -183,9 +181,7 @@ def test_does_not_create_duplicate_monthly_payout_order(self):
create_marketplace_payout_orders_monthly()
- num_payout_orders = frappe.db.count(
- "Payout Order", {"recipient": self.test_team.name}
- )
+ num_payout_orders = frappe.db.count("Payout Order", {"team": self.test_team.name})
self.assertEqual(num_payout_orders, 1)
# The original PO must now contain the invoice item
diff --git a/press/press/doctype/payout_order_item/payout_order_item.json b/press/press/doctype/payout_order_item/payout_order_item.json
index 1c585419054..267810ebc6a 100644
--- a/press/press/doctype/payout_order_item/payout_order_item.json
+++ b/press/press/doctype/payout_order_item/payout_order_item.json
@@ -51,23 +51,27 @@
{
"fieldname": "total_amount",
"fieldtype": "Currency",
- "label": "Total Amount"
+ "label": "Total Amount",
+ "options": "currency"
},
{
"fieldname": "tax",
"fieldtype": "Currency",
- "label": "Tax"
+ "label": "Tax",
+ "options": "currency"
},
{
"fieldname": "gateway_fee",
"fieldtype": "Currency",
- "label": "Gateway Fee"
+ "label": "Gateway Fee",
+ "options": "currency"
},
{
"fieldname": "net_amount",
"fieldtype": "Currency",
"in_list_view": 1,
- "label": "Net Amount"
+ "label": "Net Amount",
+ "options": "currency"
},
{
"fieldname": "column_break_5",
@@ -76,7 +80,8 @@
{
"fieldname": "commission",
"fieldtype": "Currency",
- "label": "Commission"
+ "label": "Commission",
+ "options": "currency"
},
{
"fieldname": "section_break_10",
@@ -91,7 +96,7 @@
"fieldname": "rate",
"fieldtype": "Currency",
"label": "Rate",
- "reqd": 1
+ "options": "currency"
},
{
"fieldname": "currency",
@@ -108,7 +113,8 @@
"fieldtype": "Link",
"in_list_view": 1,
"label": "Site",
- "options": "Site"
+ "options": "Site",
+ "search_index": 1
},
{
"fieldname": "quantity",
@@ -119,14 +125,13 @@
"fieldname": "invoice_item",
"fieldtype": "Link",
"label": "Invoice Item",
- "options": "Invoice Item",
- "reqd": 1
+ "options": "Invoice Item"
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
- "modified": "2022-10-31 00:19:23.739873",
+ "modified": "2025-03-18 10:10:51.856778",
"modified_by": "Administrator",
"module": "Press",
"name": "Payout Order Item",
diff --git a/press/press/doctype/payout_order_item/payout_order_item.py b/press/press/doctype/payout_order_item/payout_order_item.py
index 7c703255f1c..71487cac4dc 100644
--- a/press/press/doctype/payout_order_item/payout_order_item.py
+++ b/press/press/doctype/payout_order_item/payout_order_item.py
@@ -6,4 +6,32 @@
class PayoutOrderItem(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ commission: DF.Currency
+ currency: DF.Literal["USD", "INR"]
+ document_name: DF.DynamicLink
+ document_type: DF.Link
+ gateway_fee: DF.Currency
+ invoice: DF.Link
+ invoice_item: DF.Link | None
+ name: DF.Int | None
+ net_amount: DF.Currency
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ plan: DF.Data | None
+ quantity: DF.Float
+ rate: DF.Currency
+ site: DF.Link | None
+ tax: DF.Currency
+ total_amount: DF.Currency
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/physical_backup_group/__init__.py b/press/press/doctype/physical_backup_group/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/physical_backup_group/physical_backup_group.js b/press/press/doctype/physical_backup_group/physical_backup_group.js
new file mode 100644
index 00000000000..d5b80b8cc63
--- /dev/null
+++ b/press/press/doctype/physical_backup_group/physical_backup_group.js
@@ -0,0 +1,37 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Physical Backup Group', {
+ refresh(frm) {
+ if (frm.is_new()) {
+ return;
+ }
+
+ [
+ [__('Sync'), 'sync', false],
+ [__('Start / Resume'), 'trigger_next_backup', false],
+ [__('Set DB Sizes'), 'set_db_sizes', true],
+ [__('Retry Failed Backups'), 'retry_failed_backups', true],
+ [__('Delete Backups'), 'delete_backups', true],
+ [__('Activate All Sites'), 'activate_all_sites', true],
+ [__('Create Duplicate Group'), 'create_duplicate_group', true],
+ ].forEach(([label, method, grouped]) => {
+ frm.add_custom_button(
+ label,
+ () => {
+ frappe.confirm(
+ `Are you sure you want to ${label.toLowerCase()}?`,
+ () =>
+ frm
+ .call(method, {
+ freeze: true,
+ freeze_message: __('Please wait...'),
+ })
+ .then(() => frm.refresh()),
+ );
+ },
+ grouped ? __('Actions') : null,
+ );
+ });
+ },
+});
diff --git a/press/press/doctype/physical_backup_group/physical_backup_group.json b/press/press/doctype/physical_backup_group/physical_backup_group.json
new file mode 100644
index 00000000000..cc9419d394a
--- /dev/null
+++ b/press/press/doctype/physical_backup_group/physical_backup_group.json
@@ -0,0 +1,97 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "autoname": "prompt",
+ "creation": "2025-02-18 10:25:00.790197",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "no_of_sites",
+ "column_break_bbil",
+ "successful_backups",
+ "column_break_vwuc",
+ "available_backups",
+ "column_break_ikwc",
+ "unavailable_backups",
+ "section_break_bvmf",
+ "site_backups"
+ ],
+ "fields": [
+ {
+ "fieldname": "no_of_sites",
+ "fieldtype": "Int",
+ "in_list_view": 1,
+ "label": "No of Sites",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_bbil",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "successful_backups",
+ "fieldtype": "Int",
+ "in_list_view": 1,
+ "label": "Successful Backups",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_vwuc",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "available_backups",
+ "fieldtype": "Int",
+ "in_list_view": 1,
+ "label": "Available Backups",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_ikwc",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "unavailable_backups",
+ "fieldtype": "Int",
+ "in_list_view": 1,
+ "label": "Unavailable Backups",
+ "read_only": 1
+ },
+ {
+ "fieldname": "section_break_bvmf",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "site_backups",
+ "fieldtype": "Table",
+ "label": "Site Backups",
+ "options": "Physical Backup Group Site",
+ "reqd": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-02-18 15:14:19.059871",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Physical Backup Group",
+ "naming_rule": "Set by user",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/physical_backup_group/physical_backup_group.py b/press/press/doctype/physical_backup_group/physical_backup_group.py
new file mode 100644
index 00000000000..4a66edb1c3c
--- /dev/null
+++ b/press/press/doctype/physical_backup_group/physical_backup_group.py
@@ -0,0 +1,127 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+import frappe
+from frappe.model.document import Document
+
+from press.agent import Agent
+
+
+class PhysicalBackupGroup(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.physical_backup_group_site.physical_backup_group_site import (
+ PhysicalBackupGroupSite,
+ )
+
+ available_backups: DF.Int
+ no_of_sites: DF.Int
+ site_backups: DF.Table[PhysicalBackupGroupSite]
+ successful_backups: DF.Int
+ unavailable_backups: DF.Int
+ # end: auto-generated types
+
+ @property
+ def next_site_backup(self) -> PhysicalBackupGroupSite | None:
+ # Fetch the last one before pending
+ for site in self.site_backups:
+ if site.status == "Pending":
+ return site
+ return None
+
+ @property
+ def current_site_backup(self) -> PhysicalBackupGroupSite | None:
+ # Fetch the last one before pending
+ for site in reversed(self.site_backups):
+ if site.status != "Pending":
+ return site
+ return None
+
+ @frappe.whitelist()
+ def sync(self):
+ self.no_of_sites = len(self.site_backups)
+ # Check site backup's status
+ for site in self.site_backups:
+ site.sync()
+ self.successful_backups = len([site for site in self.site_backups if site.status == "Success"])
+ self.available_backups = len([site for site in self.site_backups if site.backup_available])
+ self.unavailable_backups = len(
+ [site for site in self.site_backups if (not site.backup_available and site.status == "Success")]
+ )
+ self.save(ignore_permissions=True)
+
+ @frappe.whitelist()
+ def set_db_sizes(self):
+ for site in self.site_backups:
+ site.set_db_size()
+
+ @frappe.whitelist()
+ def trigger_next_backup(self):
+ frappe.enqueue_doc(self.doctype, self.name, "_trigger_next_backup", queue="default", at_front=True)
+ frappe.msgprint("Triggered next backup")
+
+ def _trigger_next_backup(self):
+ current_site_backup = self.current_site_backup
+ if current_site_backup and current_site_backup.status == "Running":
+ return
+
+ next_site_backup = self.next_site_backup
+ if not next_site_backup:
+ frappe.msgprint("No more sites to backup")
+ return
+ next_site_backup.status = "Running"
+ next_site_backup.save(ignore_permissions=True)
+ frappe.enqueue_doc(
+ "Physical Backup Group Site",
+ next_site_backup.name,
+ "physical_backup",
+ queue="default",
+ enqueue_after_commit=True,
+ )
+
+ @frappe.whitelist()
+ def retry_failed_backups(self):
+ for site in self.site_backups:
+ if site.status == "Failure":
+ site.backup = None
+ site.backup_available = False
+ site.status = "Pending"
+ site.save(ignore_permissions=True)
+
+ @frappe.whitelist()
+ def delete_backups(self):
+ for site in self.site_backups:
+ site.delete_backup()
+
+ @frappe.whitelist()
+ def activate_all_sites(self):
+ for site_backup in self.site_backups:
+ site = frappe.get_doc("Site", site_backup.site)
+ agent = Agent(site.server)
+ agent.activate_site(site)
+
+ @frappe.whitelist()
+ def create_duplicate_group(self):
+ suffix = 2
+ name = self.name + "-" + str(suffix)
+ while frappe.db.exists("Physical Backup Group", name):
+ suffix += 1
+ name = self.name + "-" + str(suffix)
+ duplicate_group = frappe.get_doc(
+ {
+ "doctype": "Physical Backup Group",
+ "name": name,
+ "site_backups": [
+ {"site": site_backup.site, "status": "Pending"} for site_backup in self.site_backups
+ ],
+ }
+ ).insert()
+ frappe.msgprint("Created duplicate group - " + duplicate_group.name)
diff --git a/press/press/doctype/physical_backup_group/test_physical_backup_group.py b/press/press/doctype/physical_backup_group/test_physical_backup_group.py
new file mode 100644
index 00000000000..6be5537611b
--- /dev/null
+++ b/press/press/doctype/physical_backup_group/test_physical_backup_group.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class IntegrationTestPhysicalBackupGroup(FrappeTestCase):
+ """
+ Integration tests for PhysicalBackupGroup.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/physical_backup_group_site/__init__.py b/press/press/doctype/physical_backup_group_site/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/physical_backup_group_site/physical_backup_group_site.json b/press/press/doctype/physical_backup_group_site/physical_backup_group_site.json
new file mode 100644
index 00000000000..40e9d8346da
--- /dev/null
+++ b/press/press/doctype/physical_backup_group_site/physical_backup_group_site.json
@@ -0,0 +1,74 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-02-18 10:31:17.504134",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "site",
+ "db_size",
+ "status",
+ "backup_available",
+ "backup",
+ "duration_seconds"
+ ],
+ "fields": [
+ {
+ "fieldname": "site",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Site",
+ "options": "Site",
+ "reqd": 1
+ },
+ {
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Status",
+ "options": "Pending\nRunning\nSuccess\nFailure",
+ "reqd": 1
+ },
+ {
+ "fieldname": "backup",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Backup",
+ "options": "Site Backup"
+ },
+ {
+ "default": "0",
+ "fieldname": "backup_available",
+ "fieldtype": "Check",
+ "in_list_view": 1,
+ "label": "Backup Available",
+ "read_only": 1
+ },
+ {
+ "fieldname": "db_size",
+ "fieldtype": "Int",
+ "in_list_view": 1,
+ "label": "DB Size (MB)",
+ "read_only": 1
+ },
+ {
+ "fieldname": "duration_seconds",
+ "fieldtype": "Int",
+ "in_list_view": 1,
+ "label": "Duration (seconds)"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-02-18 13:39:44.803273",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Physical Backup Group Site",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/physical_backup_group_site/physical_backup_group_site.py b/press/press/doctype/physical_backup_group_site/physical_backup_group_site.py
new file mode 100644
index 00000000000..1b8f713fb24
--- /dev/null
+++ b/press/press/doctype/physical_backup_group_site/physical_backup_group_site.py
@@ -0,0 +1,115 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+import time
+
+import frappe
+from frappe.exceptions import DoesNotExistError
+from frappe.model.document import Document
+
+from press.agent import Agent
+
+
+class PhysicalBackupGroupSite(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ backup: DF.Link | None
+ backup_available: DF.Check
+ db_size: DF.Int
+ duration_seconds: DF.Int
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ site: DF.Link
+ status: DF.Literal["Pending", "Running", "Success", "Failure"]
+ # end: auto-generated types
+
+ def set_db_size(self):
+ if self.db_size:
+ return
+ doc = frappe.get_doc("Site", self.site)
+ self.db_size = doc.current_usage["database"]
+ self.save()
+
+ def sync(self):
+ if not self.backup:
+ return
+ try:
+ backup = frappe.get_doc("Site Backup", self.backup)
+ if backup.database_snapshot:
+ # sync status of snapshot
+ frappe.get_doc("Virtual Disk Snapshot", backup.database_snapshot).sync()
+ backup.reload()
+ if backup.status == "Pending":
+ self.status = "Running"
+ else:
+ self.status = backup.status
+ self.backup_available = backup.files_availability == "Available"
+ except DoesNotExistError:
+ self.backup = None
+ self.backup_available = False
+ self.save()
+
+ def physical_backup(self):
+ start_time = time.time()
+ site = frappe.get_doc("Site", self.site)
+ agent = Agent(site.server)
+ try:
+ deactivate_job = agent.deactivate_site(site)
+ deactivate_job_status = deactivate_job.status
+ while True:
+ deactivate_job_status = frappe.get_value("Agent Job", deactivate_job.name, "status")
+ frappe.db.commit()
+ if deactivate_job_status in ("Success", "Failure", "Delivery Failure"):
+ break
+ time.sleep(1)
+
+ if deactivate_job_status != "Success":
+ self.status = "Failure"
+ self.save()
+ return
+
+ # backup site
+ backup = site.physical_backup()
+ self.backup = backup.name
+ self.save()
+
+ backup_status = backup.status
+ while True:
+ backup_status = frappe.get_value("Site Backup", self.backup, "status")
+ frappe.db.commit()
+ if backup_status in ("Success", "Failure"):
+ break
+ time.sleep(5)
+
+ if backup_status == "Success":
+ self.status = "Success"
+ else:
+ self.status = "Failure"
+ duration = time.time() - start_time
+ self.duration_seconds = int(duration)
+ self.save()
+ except Exception:
+ frappe.log_error(title="Error while bulk physical backup")
+ finally:
+ agent.activate_site(site)
+
+ def delete_backup(self):
+ if not self.backup:
+ return
+ database_snapshot = frappe.get_value("Site Backup", self.backup, "database_snapshot")
+ if database_snapshot:
+ frappe.get_doc("Virtual Disk Snapshot", database_snapshot).delete_snapshot()
+
+ # def on_update(self):
+ # if self.has_value_changed("status") and self.status in ("Success", "Failure"):
+ # # trigger next backup
+ # frappe.enqueue_doc("Physical Backup Group", self.parent, "_trigger_next_backup", queue="default")
diff --git a/press/press/doctype/physical_backup_restoration/__init__.py b/press/press/doctype/physical_backup_restoration/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/physical_backup_restoration/physical_backup_restoration.js b/press/press/doctype/physical_backup_restoration/physical_backup_restoration.js
new file mode 100644
index 00000000000..bcdb371abd1
--- /dev/null
+++ b/press/press/doctype/physical_backup_restoration/physical_backup_restoration.js
@@ -0,0 +1,37 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Physical Backup Restoration', {
+ refresh(frm) {
+ if (frm.is_new()) {
+ return;
+ }
+
+ [
+ [__('Start'), 'execute', frm.doc.status === 'Pending', false],
+ [__('Force Continue'), 'force_continue', true, true],
+ [__('Force Fail'), 'force_fail', frm.doc.status === 'Running', true],
+ [__('Cleanup'), 'cleanup', frm.doc.status === 'Failure', true],
+ [__('Retry'), 'retry', frm.doc.status === 'Failure', false],
+ ].forEach(([label, method, condition, grouped]) => {
+ if (condition) {
+ frm.add_custom_button(
+ label,
+ () => {
+ frappe.confirm(
+ `Are you sure you want to ${label.toLowerCase()}?`,
+ () =>
+ frm
+ .call(method, {
+ freeze: true,
+ freeze_message: __('Please wait...'),
+ })
+ .then(() => frm.refresh()),
+ );
+ },
+ grouped ? __('Actions') : null,
+ );
+ }
+ });
+ },
+});
diff --git a/press/press/doctype/physical_backup_restoration/physical_backup_restoration.json b/press/press/doctype/physical_backup_restoration/physical_backup_restoration.json
new file mode 100644
index 00000000000..40b27b4d75a
--- /dev/null
+++ b/press/press/doctype/physical_backup_restoration/physical_backup_restoration.json
@@ -0,0 +1,276 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-01-10 13:02:39.393157",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "site",
+ "status",
+ "cleanup_completed",
+ "site_backup",
+ "disk_snapshot",
+ "column_break_zind",
+ "job",
+ "volume",
+ "device",
+ "mount_point",
+ "section_break_pqgo",
+ "source_database",
+ "column_break_kaja",
+ "destination_database",
+ "destination_server",
+ "section_break_gthb",
+ "deactivate_site_during_restoration",
+ "restore_specific_tables",
+ "tables_to_restore",
+ "log_ansible_output",
+ "is_failure_resolved",
+ "section_break_swxv",
+ "start",
+ "column_break_xqdd",
+ "end",
+ "column_break_qosz",
+ "duration",
+ "section_break_aqam",
+ "steps",
+ "section_break_weie",
+ "physical_restoration_test"
+ ],
+ "fields": [
+ {
+ "fieldname": "site",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Site",
+ "options": "Site",
+ "reqd": 1,
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "column_break_zind",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "Pending",
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Status",
+ "options": "Pending\nScheduled\nRunning\nSuccess\nFailure",
+ "read_only": 1,
+ "reqd": 1
+ },
+ {
+ "fieldname": "disk_snapshot",
+ "fieldtype": "Link",
+ "label": "Disk Snapshot",
+ "options": "Virtual Disk Snapshot",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "site_backup",
+ "fieldtype": "Link",
+ "label": "Site Backup",
+ "options": "Site Backup",
+ "reqd": 1,
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "section_break_pqgo",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "source_database",
+ "fieldtype": "Data",
+ "label": "Source Database",
+ "reqd": 1,
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "column_break_kaja",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "destination_database",
+ "fieldtype": "Data",
+ "label": " Destination Database",
+ "reqd": 1,
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "section_break_aqam",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "steps",
+ "fieldtype": "Table",
+ "label": "Steps",
+ "options": "Physical Backup Restoration Step"
+ },
+ {
+ "fieldname": "destination_server",
+ "fieldtype": "Link",
+ "label": "Destination Server",
+ "options": "Database Server",
+ "reqd": 1,
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "volume",
+ "fieldtype": "Data",
+ "label": "Volume",
+ "read_only": 1
+ },
+ {
+ "fieldname": "device",
+ "fieldtype": "Data",
+ "label": "Device",
+ "read_only": 1
+ },
+ {
+ "fieldname": "job",
+ "fieldtype": "Link",
+ "label": "Job",
+ "options": "Agent Job",
+ "read_only": 1
+ },
+ {
+ "fieldname": "section_break_swxv",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "start",
+ "fieldtype": "Datetime",
+ "label": "Start",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_xqdd",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "end",
+ "fieldtype": "Datetime",
+ "label": "End",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_qosz",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "duration",
+ "fieldtype": "Duration",
+ "label": "Duration",
+ "read_only": 1
+ },
+ {
+ "fieldname": "mount_point",
+ "fieldtype": "Data",
+ "label": "Mount Point",
+ "read_only": 1
+ },
+ {
+ "fieldname": "section_break_gthb",
+ "fieldtype": "Section Break"
+ },
+ {
+ "depends_on": "eval: doc.restore_specific_tables",
+ "fieldname": "tables_to_restore",
+ "fieldtype": "JSON",
+ "label": "Tables To Restore"
+ },
+ {
+ "default": "0",
+ "fieldname": "restore_specific_tables",
+ "fieldtype": "Check",
+ "label": "Restore Specific Tables"
+ },
+ {
+ "fieldname": "section_break_weie",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "physical_restoration_test",
+ "fieldtype": "Data",
+ "label": "Physical Restoration Test"
+ },
+ {
+ "default": "0",
+ "fieldname": "cleanup_completed",
+ "fieldtype": "Check",
+ "label": "Cleanup Completed"
+ },
+ {
+ "default": "0",
+ "fieldname": "log_ansible_output",
+ "fieldtype": "Check",
+ "label": "Log Ansible Output"
+ },
+ {
+ "default": "0",
+ "fieldname": "deactivate_site_during_restoration",
+ "fieldtype": "Check",
+ "label": "Deactivate Site During Restoration"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_failure_resolved",
+ "fieldtype": "Check",
+ "label": "Is Failure Resolved"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [
+ {
+ "link_doctype": "Agent Job",
+ "link_fieldname": "reference_name"
+ }
+ ],
+ "modified": "2025-04-17 11:13:10.779531",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Physical Backup Restoration",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/physical_backup_restoration/physical_backup_restoration.py b/press/press/doctype/physical_backup_restoration/physical_backup_restoration.py
new file mode 100644
index 00000000000..498c214cfa4
--- /dev/null
+++ b/press/press/doctype/physical_backup_restoration/physical_backup_restoration.py
@@ -0,0 +1,1001 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+import contextlib
+import json
+import os
+import time
+from enum import Enum
+from typing import TYPE_CHECKING
+
+import frappe
+import frappe.utils
+from frappe.model.document import Document
+
+from press.agent import Agent
+from press.press.doctype.ansible_console.ansible_console import AnsibleAdHoc
+from press.press.doctype.physical_restoration_test.physical_restoration_test import trigger_next_restoration
+from press.utils import log_error
+
+if TYPE_CHECKING:
+ from collections.abc import Callable
+
+ from press.press.doctype.physical_backup_restoration_step.physical_backup_restoration_step import (
+ PhysicalBackupRestorationStep,
+ )
+ from press.press.doctype.site.site import Site
+ from press.press.doctype.site_backup.site_backup import SiteBackup
+ from press.press.doctype.virtual_disk_snapshot.virtual_disk_snapshot import VirtualDiskSnapshot
+ from press.press.doctype.virtual_machine.virtual_machine import VirtualMachine
+
+
+StepStatus = Enum("StepStatus", ["Pending", "Running", "Skipped", "Success", "Failure"])
+
+
+class PhysicalBackupRestoration(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.physical_backup_restoration_step.physical_backup_restoration_step import (
+ PhysicalBackupRestorationStep,
+ )
+
+ cleanup_completed: DF.Check
+ deactivate_site_during_restoration: DF.Check
+ destination_database: DF.Data
+ destination_server: DF.Link
+ device: DF.Data | None
+ disk_snapshot: DF.Link | None
+ duration: DF.Duration | None
+ end: DF.Datetime | None
+ is_failure_resolved: DF.Check
+ job: DF.Link | None
+ log_ansible_output: DF.Check
+ mount_point: DF.Data | None
+ physical_restoration_test: DF.Data | None
+ restore_specific_tables: DF.Check
+ site: DF.Link
+ site_backup: DF.Link
+ source_database: DF.Data
+ start: DF.Datetime | None
+ status: DF.Literal["Pending", "Scheduled", "Running", "Success", "Failure"]
+ steps: DF.Table[PhysicalBackupRestorationStep]
+ tables_to_restore: DF.JSON | None
+ volume: DF.Data | None
+ # end: auto-generated types
+
+ @property
+ def virtual_machine_name(self) -> str:
+ return frappe.get_value("Database Server", self.destination_server, "virtual_machine")
+
+ @property
+ def virtual_machine(self) -> VirtualMachine:
+ """Get virtual machine of destination server."""
+ return frappe.get_doc("Virtual Machine", self.virtual_machine_name)
+
+ @property
+ def migration_steps(self):
+ SyncStep = False
+ AsyncStep = True
+ GeneralStep = False
+ CleanupStep = True
+ Wait = True
+ NoWait = False
+ methods = [
+ (self.wait_for_pending_snapshot_to_be_completed, AsyncStep, NoWait, GeneralStep),
+ (self.create_volume_from_snapshot, SyncStep, NoWait, GeneralStep),
+ (self.wait_for_volume_to_be_available, SyncStep, Wait, GeneralStep),
+ (self.attach_volume_to_instance, SyncStep, NoWait, GeneralStep),
+ (self.create_mount_point, SyncStep, NoWait, GeneralStep),
+ (self.mount_volume_to_instance, SyncStep, NoWait, GeneralStep),
+ (self.allow_user_to_modify_db_files_permissions, SyncStep, NoWait, GeneralStep),
+ (self.change_permission_of_backup_directory, SyncStep, NoWait, GeneralStep),
+ (self.change_permission_of_database_directory, SyncStep, NoWait, GeneralStep),
+ (self.restore_database, AsyncStep, NoWait, GeneralStep),
+ (self.rollback_permission_of_database_directory, SyncStep, NoWait, CleanupStep),
+ (self.unmount_volume_from_instance, SyncStep, NoWait, CleanupStep),
+ (self.delete_mount_point, SyncStep, NoWait, CleanupStep),
+ (self.detach_volume_from_instance, SyncStep, NoWait, CleanupStep),
+ (self.wait_for_volume_to_be_detached, SyncStep, Wait, CleanupStep),
+ (self.delete_volume, SyncStep, NoWait, CleanupStep),
+ ]
+
+ if self.deactivate_site_during_restoration:
+ methods.insert(0, (self.deactivate_site, AsyncStep, NoWait, GeneralStep))
+
+ steps = []
+ for method, is_async, wait_for_completion, is_cleanup_step in methods:
+ steps.append(
+ {
+ "step": method.__doc__,
+ "method": method.__name__,
+ "is_async": is_async,
+ "wait_for_completion": wait_for_completion,
+ "is_cleanup_step": is_cleanup_step,
+ }
+ )
+ return steps
+
+ def before_insert(self):
+ self.validate_aws_only()
+ self.set_disk_snapshot()
+ self.validate_snapshot_region()
+ self.validate_snapshot_status()
+ self.cleanup_restorable_tables()
+
+ def after_insert(self):
+ self.set_mount_point()
+ self.add_steps()
+ self.save()
+
+ def on_update(self):
+ if self.has_value_changed("status") and self.status in ["Success", "Failure"]:
+ from press.press.doctype.site_update.site_update import (
+ process_physical_backup_restoration_status_update,
+ )
+
+ if self.deactivate_site_during_restoration and self.status == "Success":
+ self.activate_site()
+ frappe.db.set_value("Site", self.site, "status", "Active")
+
+ if self.deactivate_site_during_restoration and self.status == "Failure":
+ if self.is_db_files_modified_during_failed_restoration():
+ frappe.db.set_value("Site", self.site, "status", "Broken")
+ else:
+ self.activate_site()
+ frappe.db.set_value("Site", self.site, "status", "Active")
+
+ process_physical_backup_restoration_status_update(self.name)
+
+ if self.physical_restoration_test:
+ trigger_next_restoration(self.physical_restoration_test)
+
+ def validate_aws_only(self):
+ server_provider = frappe.db.get_value("Database Server", self.destination_server, "provider")
+ if server_provider != "AWS EC2":
+ frappe.throw("Only AWS hosted server is supported currently.")
+
+ def set_disk_snapshot(self):
+ if not self.disk_snapshot:
+ site_backup: SiteBackup = frappe.get_doc("Site Backup", self.site_backup)
+ if not site_backup.physical:
+ frappe.throw("Provided site backup is not physical backup.")
+
+ if site_backup.status != "Success" or site_backup.files_availability != "Available":
+ frappe.throw("Provided site backup is not available.")
+
+ if not site_backup.database_snapshot:
+ frappe.throw("Disk Snapshot is not available in site backup")
+
+ self.disk_snapshot = site_backup.database_snapshot
+ if not self.disk_snapshot:
+ frappe.throw("Disk Snapshot is not available in site backup")
+
+ def validate_snapshot_region(self):
+ snapshot_region = frappe.db.get_value("Virtual Disk Snapshot", self.disk_snapshot, "region")
+ if snapshot_region != self.virtual_machine.region:
+ frappe.throw("Snapshot and server should be in same region.")
+
+ def validate_snapshot_status(self):
+ snapshot_status = frappe.db.get_value("Virtual Disk Snapshot", self.disk_snapshot, "status")
+ if snapshot_status not in ("Pending", "Completed"):
+ frappe.throw("Snapshot status should be Pending or Completed.")
+
+ def cleanup_restorable_tables(self):
+ if not self.restore_specific_tables:
+ self.tables_to_restore = "[]"
+ return
+
+ tables_to_restore = []
+ with contextlib.suppress(Exception):
+ tables_to_restore = json.loads(self.tables_to_restore)
+
+ # If restore_specific_tables is checked, raise error if tables_to_restore is empty
+ if not tables_to_restore:
+ frappe.throw("You must provide at least one table to restore.")
+
+ def set_mount_point(self):
+ self.mount_point = f"/mnt/{self.name}"
+
+ def deactivate_site(self):
+ """Deactivate site"""
+ deactivate_site_job = frappe.db.get_value(
+ "Agent Job",
+ {"job_type": "Deactivate Site", "reference_doctype": self.doctype, "reference_name": self.name},
+ ["name", "status"],
+ )
+ if not deactivate_site_job:
+ site: Site = frappe.get_doc("Site", self.site)
+ agent = Agent(site.server)
+ agent.deactivate_site(site, reference_doctype=self.doctype, reference_name=self.name)
+ # Send `Running` status to the queue
+ # So, that the current job can exit for now
+ # Once Snapshot status updated, someone will trigger this job again
+ return StepStatus.Running
+
+ if deactivate_site_job[1] == "Success":
+ return StepStatus.Success
+
+ if deactivate_site_job[1] in ("Failure", "Delivery Failure"):
+ return StepStatus.Failure
+
+ return StepStatus.Running
+
+ def wait_for_pending_snapshot_to_be_completed(self) -> StepStatus:
+ """Wait for pending snapshot to be completed"""
+ snapshot: VirtualDiskSnapshot = frappe.get_doc("Virtual Disk Snapshot", self.disk_snapshot)
+ with contextlib.suppress(Exception):
+ # Don't fail this step due to timestamp mismatch like basic error
+ # One background job will trigger this job again anyway
+ snapshot.sync()
+
+ if snapshot.status == "Completed":
+ return StepStatus.Success
+ if snapshot.status == "Pending":
+ # Send `Running` status to the queue
+ # So, that the current job can exit for now
+ # Once Snapshot status updated, someone will trigger this job again
+ return StepStatus.Running
+ return StepStatus.Failure
+
+ def create_volume_from_snapshot(self) -> StepStatus:
+ """Create volume from snapshot"""
+ snapshot: VirtualDiskSnapshot = frappe.get_doc("Virtual Disk Snapshot", self.disk_snapshot)
+ self.volume = snapshot.create_volume(
+ availability_zone=self.virtual_machine.availability_zone, throughput=300, iops=3000
+ )
+ self.add_comment(text=f"{self.volume} - Volume Created")
+ return StepStatus.Success
+
+ def wait_for_volume_to_be_available(self) -> StepStatus:
+ """Wait for volume to be available"""
+ status = self.virtual_machine.get_state_of_volume(self.volume)
+ # https://docs.aws.amazon.com/ebs/latest/userguide/ebs-describing-volumes.html
+ if status == "available":
+ return StepStatus.Success
+ if status == "creating":
+ return StepStatus.Running
+ return StepStatus.Failure
+
+ def attach_volume_to_instance(self) -> StepStatus:
+ """Attach volume to instance"""
+ # Used `for_update` to take lock on the record to avoid race condition
+ # and make this step failure due to VersionMismatch or TimestampMismatchError
+ virtual_machine: VirtualMachine = frappe.get_doc(
+ "Virtual Machine", self.virtual_machine_name, for_update=True
+ )
+ self.device = virtual_machine.attach_volume(self.volume, is_temporary_volume=True)
+ return StepStatus.Success
+
+ def create_mount_point(self) -> StepStatus:
+ """Create mount point"""
+ result = self.ansible_run(f"mkdir -p {self.mount_point}")
+ if result["status"] == "Success":
+ return StepStatus.Success
+ return StepStatus.Failure
+
+ def mount_volume_to_instance(self) -> StepStatus: # noqa: C901
+ """Mount volume to instance"""
+
+ """
+ > Find out the disk name
+
+ If the disk name is /dev/sdg, it might be renamed to /dev/xvdg in the instance.
+
+ Next, If the volume was created from a snapshot of root volume, the volume will have multiple partitions.
+
+ > lsblk --json -o name,fstype,type,label,serial,size -b
+
+ > Dummy output
+
+ {
+ "blockdevices":[
+ { "name":"loop0", "fstype":null, "type": "loop", "label": null, "size": 16543383 },
+ { "name":"loop1", "fstype":null, "type": "loop", "label": null, "size": 16543383 },
+ { "name":"loop2", "fstype":null, "type": "loop", "label": null, "size": 16543383 },
+ { "name":"loop3", "fstype":null, "type": "loop", "label": null, "size": 16543383 },
+ { "name":"loop4", "fstype":null, "type": "loop", "label": null, "size": 16543383 },
+ {
+ "name":"xvda","fstype":null, "type": "disk", "label": null, "size": 4294966784
+ "children":[
+ {
+ "name":"xvda1",
+ "fstype":"ext4",
+ "type":"part",
+ "label":"cloudimg-rootfs",
+ "size": 4294966784
+ },
+ {
+ "name":"xvda14",
+ "fstype":null,
+ "type":"part",
+ "label":null,
+ "size": 123345
+ },
+ {
+ "name":"xvda15",
+ "fstype":"vfat",
+ "type":"part",
+ "label":"UEFI",
+ "size": 124553
+ }
+ ]
+ },
+ {"name":"nvme0n1", "fstype":null, "type":"disk", "label":null, "serial":"vol0784b4423604486ea", "size": 4294966784
+ "children": [
+ {"name":"nvme0n1p1", "fstype":"ext4", "type":"part", "label":"cloudimg-rootfs", "serial":null, "size": 4123906784},
+ {"name":"nvme0n1p14", "fstype":null, "type":"part", "label":null, "serial":null "size": 234232},
+ {"name":"nvme0n1p15", "fstype":"vfat", "type":"part", "label":"UEFI", "serial":null, "size": 124553}
+ ]
+ }
+ ]
+ }
+
+ """
+ result = self.ansible_run("lsblk --json -o name,fstype,type,label,serial,size -b")
+ if result["status"] != "Success":
+ return StepStatus.Failure
+
+ devices_info_str: str = result["output"]
+ devices_info = json.loads(devices_info_str)["blockdevices"]
+
+ disk_name = self.device.split("/")[-1] # /dev/sdf -> sdf
+
+ # If disk name is sdf, it might be possible mounted as xvdf
+ # https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html#device-name-limits
+ possible_disks = [disk_name, "xvd{}".format(disk_name.lstrip("sd")[-1])]
+ disk_serial = self.volume.replace("-", "").lower()
+ disk_partition_to_mount = None
+
+ for device_info in devices_info:
+ if device_info["type"] not in ["disk", "part"]:
+ continue
+
+ # Check for nvme disks
+ is_disk_found = (
+ device_info["name"].startswith("nvme") and device_info.get("serial") == disk_serial
+ )
+ # check for normal disks
+ if not is_disk_found:
+ for possible_disk in possible_disks:
+ if device_info["name"] == possible_disk:
+ is_disk_found = True
+ break
+
+ # If disk is not found, then continue to next disk
+ if not is_disk_found:
+ continue
+
+ # If the volume was created from a snapshot of data volume
+ # the volume will have only one partition.
+ if device_info["type"] == "part":
+ disk_partition_to_mount = "/dev/{}".format(device_info["name"])
+ break
+
+ if device_info["type"] == "disk":
+ children = device_info.get("children", [])
+ if len(children) == 0:
+ # Disk doesn't have any partitions, mount the disk directly
+ disk_partition_to_mount = "/dev/{}".format(device_info["name"])
+ else:
+ # Disk has multiple partitions, so find the correct partition
+ largest_partition_size = 1073741824 # 1GB | Disk partition should be larger than 1GB
+ largest_partition = None
+ # try to find the partition with label cloudimg-rootfs or old-rootfs
+ for child in children:
+ if child["size"] > largest_partition_size:
+ largest_partition_size = child["size"]
+ largest_partition = child["name"]
+
+ if child["label"] == "cloudimg-rootfs" or child["label"] == "old-rootfs":
+ disk_partition_to_mount = "/dev/{}".format(child["name"])
+ break
+
+ # If the partitions are not labeled, try to find largest partition
+ if not disk_partition_to_mount and largest_partition is not None:
+ disk_partition_to_mount = f"/dev/{largest_partition}"
+ break
+
+ if disk_partition_to_mount:
+ break
+
+ if not disk_partition_to_mount:
+ self.log_error(
+ title="Not able to find disk partition to mount",
+ message=f"Disk name: {disk_name}, Possible disks: {possible_disks} or with serial {disk_serial}",
+ )
+ return StepStatus.Failure
+
+ mount_response = self.ansible_run(f"mount {disk_partition_to_mount} {self.mount_point}")
+ if mount_response["status"] != "Success":
+ return StepStatus.Failure
+ return StepStatus.Success
+
+ def allow_user_to_modify_db_files_permissions(self) -> StepStatus:
+ """Allow user to modify db files permissions"""
+
+ result = self.ansible_run(
+ r'echo "frappe ALL=(ALL) NOPASSWD: /bin/chown mysql\:mysql /var/lib/mysql/*/*" > /etc/sudoers.d/frappe-mysql',
+ raw_params=True,
+ )
+ if result["status"] == "Success":
+ return StepStatus.Success
+ return StepStatus.Failure
+
+ def change_permission_of_backup_directory(self) -> StepStatus:
+ """Change permission of backup files"""
+ base_path = os.path.join(self.mount_point, "var/lib/mysql")
+ result = self.ansible_run(f"chmod 777 {base_path}")
+ if result["status"] == "Success":
+ db_path = os.path.join(self.mount_point, "var/lib/mysql", self.source_database)
+ result = self.ansible_run(f"chmod -R 777 {db_path}")
+ if result["status"] == "Success":
+ return StepStatus.Success
+ return StepStatus.Failure
+
+ def change_permission_of_database_directory(self) -> StepStatus:
+ """Change permission of database directory"""
+ result = self.ansible_run(f"chmod 770 /var/lib/mysql/{self.destination_database}")
+ if result["status"] == "Success":
+ return StepStatus.Success
+ return StepStatus.Failure
+
+ def restore_database(self) -> StepStatus:
+ """Restore database"""
+ if not self.job:
+ site = frappe.get_doc("Site", self.site)
+ agent = Agent(self.destination_server, "Database Server")
+ self.job = agent.physical_restore_database(site, self)
+ return StepStatus.Running
+ job_status = frappe.db.get_value("Agent Job", self.job, "status")
+ if job_status in ["Undelivered", "Running", "Pending"]:
+ return StepStatus.Running
+ if job_status == "Success":
+ return StepStatus.Success
+ return StepStatus.Failure
+
+ def rollback_permission_of_database_directory(self) -> StepStatus:
+ """Rollback permission of database directory"""
+
+ # Docs > https://mariadb.com/kb/en/specifying-permissions-for-schema-data-directories-and-tables/
+ # Directory > 700 and File > 660
+
+ result = self.ansible_run(
+ f"chmod -R 660 /var/lib/mysql/{self.destination_database} && chmod 700 /var/lib/mysql/{self.destination_database} && chown -R mysql:mysql /var/lib/mysql/{self.destination_database}"
+ )
+ if result["status"] == "Success":
+ return StepStatus.Success
+ return StepStatus.Failure
+
+ def unmount_volume_from_instance(self) -> StepStatus:
+ """Unmount volume from instance"""
+ if self.get_step_status(self.mount_volume_to_instance) != StepStatus.Success.name:
+ return StepStatus.Success
+ response = self.ansible_run(f"umount {self.mount_point}")
+ if response["status"] != "Success":
+ return StepStatus.Failure
+ return StepStatus.Success
+
+ def delete_mount_point(self) -> StepStatus:
+ """Delete mount point"""
+ if not self.mount_point or not self.mount_point.startswith("/mnt"):
+ frappe.throw("Mount point is not valid.")
+ # check if mount point was created
+ if self.get_step_status(self.create_mount_point) != "Success":
+ return StepStatus.Success
+ response = self.ansible_run(f"rm -rf {self.mount_point}")
+ if response["status"] != "Success":
+ return StepStatus.Failure
+ return StepStatus.Success
+
+ def detach_volume_from_instance(self) -> StepStatus:
+ """Detach volume from instance"""
+ # check if volume was attached
+ if not self.volume or self.get_step_status(self.attach_volume_to_instance) != "Success":
+ return StepStatus.Success
+ state = self.virtual_machine.get_state_of_volume(self.volume)
+ if state != "in-use":
+ return StepStatus.Success
+
+ # Used `for_update` to take lock on the record to avoid race condition
+ # and make this step failure due to VersionMismatch or TimestampMismatchError
+ virtual_machine: VirtualMachine = frappe.get_doc(
+ "Virtual Machine", self.virtual_machine_name, for_update=True
+ )
+ virtual_machine.detach(self.volume)
+ return StepStatus.Success
+
+ def wait_for_volume_to_be_detached(self) -> StepStatus:
+ """Wait for volume to be detached"""
+ if not self.volume:
+ return StepStatus.Success
+ state = self.virtual_machine.get_state_of_volume(self.volume)
+ if state in ["available", "deleting", "deleted"]:
+ with contextlib.suppress(Exception):
+ self.virtual_machine.sync()
+ return StepStatus.Success
+ if state == "error":
+ return StepStatus.Failure
+ return StepStatus.Running
+
+ def delete_volume(self) -> StepStatus:
+ """Delete volume"""
+ if (
+ not self.volume
+ or self.get_step_status(self.create_volume_from_snapshot) != StepStatus.Success.name
+ ):
+ return StepStatus.Success
+ state = self.virtual_machine.get_state_of_volume(self.volume)
+ if state in ["deleting", "deleted"]:
+ return StepStatus.Success
+ self.virtual_machine.client().delete_volume(VolumeId=self.volume)
+ self.add_comment(text=f"{self.volume} - Volume Deleted")
+ return StepStatus.Success
+
+ def activate_site(self):
+ """Activate site"""
+ site: Site = frappe.get_doc("Site", self.site)
+ agent = Agent(site.server)
+ agent.activate_site(site, reference_doctype=self.doctype, reference_name=self.name)
+
+ def is_db_files_modified_during_failed_restoration(self):
+ if self.status != "Failure":
+ return False
+ # Check if Restore Database job has created
+ if not self.job:
+ return False
+ # Check if Restore Database job has failed
+ job_status = frappe.db.get_value("Agent Job", self.job, "status")
+ if job_status == "Failure":
+ job_steps = frappe.get_all(
+ "Agent Job Step",
+ filters={
+ "agent_job": self.job,
+ },
+ fields=["step_name", "status"],
+ order_by="creation asc",
+ )
+ """
+ [
+ {'step_name': 'Validate Backup Files', 'status': 'Success'},
+ {'step_name': 'Validate Connection to Target Database', 'status': 'Success'},
+ {'step_name': 'Warmup MyISAM Files', 'status': 'Success'},
+ {'step_name': 'Check and Fix MyISAM Table Files', 'status': 'Success'},
+ {'step_name': 'Warmup InnoDB Files', 'status': 'Success'},
+ {'step_name': 'Prepare Database for Restoration', 'status': 'Success'},
+ {'step_name': 'Create Tables from Table Schema', 'status': 'Success'},
+ {'step_name': 'Discard InnoDB Tablespaces', 'status': 'Success'},
+ {'step_name': 'Copying InnoDB Table Files', 'status': 'Success'},
+ {'step_name': 'Import InnoDB Tablespaces', 'status': 'Success'},
+ {'step_name': 'Hold Write Lock on MyISAM Tables', 'status': 'Success'},
+ {'step_name': 'Copying MyISAM Table Files', 'status': 'Success'},
+ {'step_name': 'Unlock All Tables', 'status': 'Success'}
+ ]
+ """
+ # Check on which step the job has failed
+ # Anything on after `Prepare Database for Restoration` is considered as full restoration required
+ first_failed_step = None
+ for step in job_steps:
+ if step["status"] == "Failure":
+ first_failed_step = step
+ break
+ if first_failed_step and first_failed_step["step_name"] in [
+ "Create Tables from Table Schema",
+ "Discard InnoDB Tablespaces",
+ "Copying InnoDB Table Files",
+ "Import InnoDB Tablespaces",
+ "Hold Write Lock on MyISAM Tables",
+ "Copying MyISAM Table Files",
+ "Unlock All Tables",
+ ]:
+ return True
+ return False
+
+ def get_step_status(self, step_method: Callable) -> str:
+ step = self.get_step_by_method(step_method.__name__)
+ return step.status if step else "Pending"
+
+ def add_steps(self):
+ for step in self.migration_steps:
+ step.update({"status": "Pending"})
+ self.append("steps", step)
+
+ @frappe.whitelist()
+ def execute(self):
+ if self.status == "Scheduled":
+ frappe.msgprint("Restoration is already in Scheduled state. It will be executed soon.")
+ return
+ # If restore_specific_tables was provided, but no tables are there to restore, then skip the restore
+ if self.restore_specific_tables:
+ try:
+ restorable_tables = json.loads(self.tables_to_restore)
+ except Exception:
+ restorable_tables = []
+ if len(restorable_tables) == 0:
+ self.status = "Success"
+ for step in self.steps:
+ step.status = "Skipped"
+ self.save()
+ return
+ # Else, continue with the restoration
+ # Just set to scheduled, scheduler will pick it up
+ self.status = "Scheduled"
+ self.start = frappe.utils.now_datetime()
+ self.save()
+
+ def fail(self, save: bool = True) -> None:
+ self.status = "Failure"
+ for step in self.steps:
+ if step.status == "Pending":
+ step.status = "Skipped"
+ self.end = frappe.utils.now_datetime()
+ self.duration = frappe.utils.cint((self.end - self.start).total_seconds())
+ if save:
+ self.save(ignore_version=True)
+ self.cleanup()
+
+ def finish(self) -> None:
+ # if status is already Success or Failure, then don't update the status and durations
+ if self.status not in ("Success", "Failure"):
+ self.status = "Success" if self.is_restoration_steps_successful() else "Failure"
+ self.end = frappe.utils.now_datetime()
+ self.duration = frappe.utils.cint((self.end - self.start).total_seconds())
+
+ self.cleanup_completed = self.is_cleanup_steps_successful()
+ self.save()
+
+ @frappe.whitelist()
+ def next(self) -> None:
+ if self.status != "Running" and self.status not in ("Success", "Failure"):
+ self.status = "Running"
+ self.save(ignore_version=True)
+
+ next_step_to_run = None
+
+ # Check if current_step is running
+ current_running_step = self.current_running_step
+ if current_running_step:
+ next_step_to_run = current_running_step
+ elif self.next_step:
+ next_step_to_run = self.next_step
+
+ if not next_step_to_run:
+ # We've executed everything
+ self.finish()
+ return
+
+ if next_step_to_run.method == self.rollback_permission_of_database_directory.__name__:
+ # That means `Restore Database` step has been executed
+ self.finish()
+
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "execute_step",
+ step_name=next_step_to_run.name,
+ enqueue_after_commit=True,
+ deduplicate=next_step_to_run.wait_for_completion
+ is False, # Don't deduplicate if wait_for_completion is True
+ job_id=f"physical_restoration||{self.name}||{next_step_to_run.name}",
+ )
+
+ @frappe.whitelist()
+ def cleanup(self):
+ is_cleanup_required = False
+ for step in self.steps:
+ # Mark the pending non-cleanup steps as skipped
+ if not step.is_cleanup_step and step.status == "Pending":
+ step.status = "Skipped"
+
+ # Mark the cleanup steps with non-failure status as pending
+ if step.is_cleanup_step and step.status != "Failure":
+ step.status = "Pending"
+ is_cleanup_required = True
+
+ if is_cleanup_required:
+ self.next()
+
+ @frappe.whitelist(allow_guest=True)
+ def retry(self):
+ # Check if all the cleanup steps are completed
+ for step in self.steps:
+ if not step.is_cleanup_step:
+ continue
+ if step.status not in ["Success", "Skipped"]:
+ frappe.throw("Cleanup steps are not completed. Please clean up before retrying.")
+ # Reset the states
+ self.status = "Scheduled"
+ self.start = frappe.utils.now_datetime()
+ self.volume = None
+ self.end = None
+ self.duration = None
+ self.job = None
+ self.cleanup_completed = False
+ for step in self.steps:
+ step.status = "Pending"
+ self.save(ignore_version=True)
+
+ @frappe.whitelist()
+ def force_continue(self) -> None:
+ first_failed_step: PhysicalBackupRestorationStep = None
+ # Mark all failed and skipped steps as pending
+ for step in self.steps:
+ if step.status in ("Failure", "Skipped"):
+ if not first_failed_step:
+ first_failed_step = step
+ step.status = "Pending"
+
+ # If the job was failed in Restore Database step, then reset the job
+ if first_failed_step and first_failed_step.method == self.restore_database.__name__:
+ self.job = None
+ self.next()
+
+ @frappe.whitelist()
+ def force_fail(self) -> None:
+ # Mark all pending steps as failure
+ for step in self.steps:
+ if step.status == "Pending":
+ step.status = "Failure"
+ self.status = "Failure"
+ self.save()
+
+ @property
+ def current_running_step(self) -> PhysicalBackupRestorationStep | None:
+ for step in self.steps:
+ if step.status == "Running":
+ return step
+ return None
+
+ @property
+ def next_step(self) -> PhysicalBackupRestorationStep | None:
+ for step in self.steps:
+ if step.status == "Pending":
+ return step
+ return None
+
+ def is_restoration_steps_successful(self) -> bool:
+ return all(step.status == "Success" for step in self.steps if not step.is_cleanup_step)
+
+ def is_cleanup_steps_successful(self) -> bool:
+ if self.cleanup_completed:
+ return True
+
+ # All the cleanup steps need to be Skipped or Success
+ # Anything else means the cleanup steps are not completed
+ return all(step.status in ("Skipped", "Success") for step in self.steps if step.is_cleanup_step)
+
+ @frappe.whitelist()
+ def execute_step(self, step_name):
+ step = self.get_step(step_name)
+
+ if not step.start:
+ step.start = frappe.utils.now_datetime()
+ try:
+ result = getattr(self, step.method)()
+ step.status = result.name
+ """
+ If the step is async and function has returned Running,
+ Then save the document and return
+
+ Some external process will resume the job later
+ """
+ if step.is_async and result == StepStatus.Running:
+ self.save(ignore_version=True)
+ return
+
+ """
+ If the step is sync and function is marked to wait for completion,
+ Then wait for the function to complete
+ """
+ if step.wait_for_completion and result == StepStatus.Running:
+ step.attempts = step.attempts + 1 if step.attempts else 1
+ self.save(ignore_version=True)
+ time.sleep(1)
+
+ except Exception:
+ step.status = "Failure"
+ step.traceback = frappe.get_traceback(with_context=True)
+
+ step.end = frappe.utils.now_datetime()
+ step.duration = (step.end - step.start).total_seconds()
+
+ if step.status == "Failure":
+ self.fail(save=True)
+ else:
+ self.save(ignore_version=True)
+ self.next()
+
+ def get_step(self, step_name) -> PhysicalBackupRestorationStep | None:
+ for step in self.steps:
+ if step.name == step_name:
+ return step
+ return None
+
+ def get_step_by_method(self, method_name) -> PhysicalBackupRestorationStep | None:
+ for step in self.steps:
+ if step.method == method_name:
+ return step
+ return None
+
+ def ansible_run(self, command, raw_params: bool = False):
+ inventory = f"{self.virtual_machine.public_ip_address},"
+ result = AnsibleAdHoc(sources=inventory).run(command, self.name, raw_params=raw_params)[0]
+ self.add_command(command, result)
+ return result
+
+ def add_command(self, command, result):
+ if not self.log_ansible_output:
+ return
+ pretty_result = json.dumps(result, indent=2, sort_keys=True, default=str)
+ comment = f"
{command}{pretty_result}"
+ self.add_comment(text=comment)
+
+
+def process_scheduled_restorations(): # noqa: C901
+ start_time = time.time()
+ scheduled_restorations = frappe.get_list(
+ "Physical Backup Restoration", filters={"status": "Scheduled"}, pluck="name", order_by="creation asc"
+ )
+ max_concurrent_restorations = frappe.utils.cint(
+ frappe.get_cached_value("Press Settings", "Press Settings", "max_concurrent_physical_restorations")
+ )
+ db_servers_with_max_running_concurrent_restorations = set()
+ db_servers_with_incident = set(
+ frappe.db.get_all(
+ "Incident",
+ filters={
+ "resource_type": "Database Server",
+ "status": ["in", ["Confirmed", "Acknowledged", "Investigating"]],
+ },
+ pluck="resource",
+ )
+ )
+
+ for restoration in scheduled_restorations:
+ if time.time() - start_time > 25:
+ """
+ The job runs every 30 seconds
+ So, if we already took 25 seconds, then we should just stop processing
+ and let the next job run
+ """
+ break
+ try:
+ doc: PhysicalBackupRestoration = frappe.get_doc("Physical Backup Restoration", restoration)
+ """
+ Avoid to start restoration on server, if DB server has incident
+ """
+ if doc.destination_server in db_servers_with_incident:
+ continue
+
+ """
+ Check if DB server has `enable_physical_backup` checked
+ If not, then skip the restoration
+ """
+ if not frappe.utils.cint(
+ frappe.db.get_value("Database Server", doc.destination_server, "enable_physical_backup")
+ ):
+ continue
+
+ """
+ Take count of `Success` or `Failure` restorations with cleanup pending on db server
+ If there are more than 4 jobs like this, don't start new job.
+
+ Until unless cleanup happens the temporary volumes will be left behind in EBS.
+ That can create issues in restorations.
+ """
+ if (
+ frappe.db.count(
+ "Physical Backup Restoration",
+ filters={
+ "status": ["in", ["Success", "Failure"]],
+ "cleanup_completed": 0,
+ "destination_server": doc.destination_server,
+ },
+ )
+ > 4
+ ):
+ continue
+
+ """
+ Take count of `Running` restorations on db server
+ If count is less than `max_concurrent_restorations`, then start the restoration
+ """
+ running_restorations = frappe.db.count(
+ "Physical Backup Restoration",
+ filters={"status": "Running", "destination_server": doc.destination_server},
+ )
+ if running_restorations > max_concurrent_restorations:
+ db_servers_with_max_running_concurrent_restorations.add(doc.destination_server)
+ continue
+
+ if doc.status != "Scheduled":
+ continue
+
+ doc.next()
+ frappe.db.commit()
+ except Exception:
+ log_error(title="Physical Backup Restoration Start Error", physical_restoration=restoration)
+ frappe.db.rollback()
+
+
+def process_job_update(job):
+ if job.reference_doctype != "Physical Backup Restoration":
+ return
+
+ doc: PhysicalBackupRestoration = frappe.get_doc("Physical Backup Restoration", job.reference_name)
+ if job.status in ["Success", "Failure", "Delivery Failure"]:
+ doc.next()
+
+
+def process_physical_backup_restoration_deactivate_site_job_update(job):
+ if job.reference_doctype != "Physical Backup Restoration":
+ return
+ if job.status not in ["Success", "Failure", "Delivery Failure"]:
+ return
+ doc: PhysicalBackupRestoration = frappe.get_doc("Physical Backup Restoration", job.reference_name)
+ doc.next()
+
+
+def get_physical_backup_restoration_steps(name: str) -> list[dict]:
+ """
+ {
+ "title": "Step Name",
+ "status": "Success",
+ "output": "Output",
+ "stage": "Restore Backup"
+ }
+ """
+ steps = frappe.get_all(
+ "Physical Backup Restoration Step",
+ filters={"parent": name},
+ fields=["step", "status", "name", "creation"],
+ order_by="idx asc",
+ )
+ job_name = frappe.db.get_value("Physical Backup Restoration", name, "job")
+ steps = [
+ {
+ "title": step["step"],
+ "status": step["status"],
+ "output": "",
+ "stage": "Restore Backup",
+ "name": step["name"],
+ }
+ for step in steps
+ ]
+ job_steps = []
+ if job_name:
+ job_steps = frappe.get_all(
+ "Agent Job Step",
+ filters={"agent_job": job_name},
+ fields=["output", "step_name", "status", "name"],
+ order_by="creation asc",
+ )
+ if steps:
+ index_of_restore_database_step = None
+ for index, step in enumerate(steps):
+ if step["title"] == "Restore database":
+ index_of_restore_database_step = index
+ break
+ if index_of_restore_database_step is not None:
+ job_steps = [
+ {
+ "title": step.get("step_name"),
+ "status": step.get("status"),
+ "output": step.get("output"),
+ "stage": "Restore Backup",
+ }
+ for step in job_steps
+ ]
+ steps = (
+ steps[:index_of_restore_database_step]
+ + job_steps
+ + steps[index_of_restore_database_step + 1 :]
+ )
+ return steps
diff --git a/press/press/doctype/physical_backup_restoration/test_physical_backup_restoration.py b/press/press/doctype/physical_backup_restoration/test_physical_backup_restoration.py
new file mode 100644
index 00000000000..f8500c30505
--- /dev/null
+++ b/press/press/doctype/physical_backup_restoration/test_physical_backup_restoration.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class IntegrationTestPhysicalBackupRestoration(FrappeTestCase):
+ """
+ Integration tests for PhysicalBackupRestoration.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/physical_backup_restoration_step/__init__.py b/press/press/doctype/physical_backup_restoration_step/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/physical_backup_restoration_step/physical_backup_restoration_step.json b/press/press/doctype/physical_backup_restoration_step/physical_backup_restoration_step.json
new file mode 100644
index 00000000000..cfd5de7307e
--- /dev/null
+++ b/press/press/doctype/physical_backup_restoration_step/physical_backup_restoration_step.json
@@ -0,0 +1,115 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-01-10 13:15:34.497717",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "step",
+ "status",
+ "method",
+ "column_break_ahqu",
+ "start",
+ "end",
+ "duration",
+ "column_break_qtmf",
+ "is_cleanup_step",
+ "is_async",
+ "wait_for_completion",
+ "attempts",
+ "section_break_vyao",
+ "traceback"
+ ],
+ "fields": [
+ {
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Status",
+ "options": "Pending\nScheduled\nRunning\nSkipped\nSuccess\nFailure",
+ "reqd": 1
+ },
+ {
+ "fieldname": "step",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Step",
+ "reqd": 1
+ },
+ {
+ "fieldname": "method",
+ "fieldtype": "Data",
+ "label": "Method",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_ahqu",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "column_break_qtmf",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "start",
+ "fieldtype": "Datetime",
+ "label": "Start"
+ },
+ {
+ "fieldname": "end",
+ "fieldtype": "Datetime",
+ "label": "End"
+ },
+ {
+ "fieldname": "duration",
+ "fieldtype": "Duration",
+ "label": "Duration"
+ },
+ {
+ "fieldname": "section_break_vyao",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "traceback",
+ "fieldtype": "Code",
+ "label": "Traceback"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_async",
+ "fieldtype": "Check",
+ "label": "Is Async"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_cleanup_step",
+ "fieldtype": "Check",
+ "label": "Is Cleanup Step"
+ },
+ {
+ "default": "0",
+ "fieldname": "wait_for_completion",
+ "fieldtype": "Check",
+ "label": "Wait For Completion"
+ },
+ {
+ "default": "0",
+ "fieldname": "attempts",
+ "fieldtype": "Int",
+ "label": "Attempts"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-03-15 17:35:31.824500",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Physical Backup Restoration Step",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/physical_backup_restoration_step/physical_backup_restoration_step.py b/press/press/doctype/physical_backup_restoration_step/physical_backup_restoration_step.py
new file mode 100644
index 00000000000..a8c6860291c
--- /dev/null
+++ b/press/press/doctype/physical_backup_restoration_step/physical_backup_restoration_step.py
@@ -0,0 +1,37 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+import frappe
+from frappe.model.document import Document
+
+
+class PhysicalBackupRestorationStep(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ attempts: DF.Int
+ duration: DF.Duration | None
+ end: DF.Datetime | None
+ is_async: DF.Check
+ is_cleanup_step: DF.Check
+ method: DF.Data
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ start: DF.Datetime | None
+ status: DF.Literal["Pending", "Scheduled", "Running", "Skipped", "Success", "Failure"]
+ step: DF.Data
+ traceback: DF.Code | None
+ wait_for_completion: DF.Check
+ # end: auto-generated types
+
+ def validate(self):
+ if self.is_async and self.wait_for_completion:
+ frappe.throw("Cannot wait for completion on async kind of step")
diff --git a/press/press/doctype/physical_restoration_test/__init__.py b/press/press/doctype/physical_restoration_test/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/physical_restoration_test/physical_restoration_test.js b/press/press/doctype/physical_restoration_test/physical_restoration_test.js
new file mode 100644
index 00000000000..7e30cc47313
--- /dev/null
+++ b/press/press/doctype/physical_restoration_test/physical_restoration_test.js
@@ -0,0 +1,27 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Physical Restoration Test', {
+ refresh(frm) {
+ if (frm.is_new()) {
+ return;
+ }
+
+ [
+ [__('Sync'), 'sync', false],
+ [__('Start / Resume'), 'start', true],
+ [__('Reset Failed Restorations'), 'reset_failed_restorations', true],
+ ].forEach(([label, method, grouped]) => {
+ frm.add_custom_button(
+ label,
+ () => {
+ frappe.confirm(
+ `Are you sure you want to ${label.toLowerCase()}?`,
+ () => frm.call(method).then(() => frm.refresh()),
+ );
+ },
+ grouped ? __('Actions') : null,
+ );
+ });
+ },
+});
diff --git a/press/press/doctype/physical_restoration_test/physical_restoration_test.json b/press/press/doctype/physical_restoration_test/physical_restoration_test.json
new file mode 100644
index 00000000000..55d7ceb9367
--- /dev/null
+++ b/press/press/doctype/physical_restoration_test/physical_restoration_test.json
@@ -0,0 +1,98 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-02-18 14:30:59.285081",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "backup_group",
+ "test_site",
+ "max_restorations",
+ "column_break_ctsy",
+ "destination_database",
+ "destination_server",
+ "completed",
+ "section_break_nipr",
+ "results"
+ ],
+ "fields": [
+ {
+ "fieldname": "column_break_ctsy",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "test_site",
+ "fieldtype": "Link",
+ "label": "Test Site",
+ "options": "Site",
+ "reqd": 1
+ },
+ {
+ "fieldname": "section_break_nipr",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "results",
+ "fieldtype": "Table",
+ "label": "Results",
+ "options": "Physical Restoration Test Result"
+ },
+ {
+ "fetch_from": "test_site.database_name",
+ "fieldname": "destination_database",
+ "fieldtype": "Data",
+ "label": " Destination Database"
+ },
+ {
+ "fieldname": "destination_server",
+ "fieldtype": "Link",
+ "label": "Destination Server",
+ "options": "Database Server",
+ "read_only": 1
+ },
+ {
+ "fieldname": "max_restorations",
+ "fieldtype": "Int",
+ "label": "Max Restorations",
+ "reqd": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "completed",
+ "fieldtype": "Check",
+ "label": "Completed"
+ },
+ {
+ "fieldname": "backup_group",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Backup Group",
+ "options": "Physical Backup Group",
+ "reqd": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-02-18 16:41:44.401442",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Physical Restoration Test",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/physical_restoration_test/physical_restoration_test.py b/press/press/doctype/physical_restoration_test/physical_restoration_test.py
new file mode 100644
index 00000000000..a1a8797ba2a
--- /dev/null
+++ b/press/press/doctype/physical_restoration_test/physical_restoration_test.py
@@ -0,0 +1,134 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import frappe
+from frappe.model.document import Document
+
+if TYPE_CHECKING:
+ from press.press.doctype.physical_backup_group.physical_backup_group import PhysicalBackupGroup
+ from press.press.doctype.site_update.site_update import PhysicalBackupRestoration
+
+
+class PhysicalRestorationTest(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.physical_restoration_test_result.physical_restoration_test_result import (
+ PhysicalRestorationTestResult,
+ )
+
+ backup_group: DF.Link
+ completed: DF.Check
+ destination_database: DF.Data | None
+ destination_server: DF.Link | None
+ max_restorations: DF.Int
+ results: DF.Table[PhysicalRestorationTestResult]
+ test_site: DF.Link
+ # end: auto-generated types
+
+ def validate(self):
+ if not self.destination_server:
+ self.destination_server = frappe.get_value(
+ "Server", frappe.get_value("Site", self.test_site, "server"), "database_server"
+ )
+ if self.is_new():
+ pass
+
+ def after_insert(self):
+ backup_group: PhysicalBackupGroup = frappe.get_doc("Physical Backup Group", self.backup_group)
+ # set max restorations
+ if not self.max_restorations or self.max_restorations > len(backup_group.site_backups):
+ self.max_restorations = len(backup_group.site_backups)
+
+ # populate results table
+ records = backup_group.site_backups[: self.max_restorations]
+ for record in records:
+ self.append(
+ "results",
+ {
+ "site": record.site,
+ "db_size_mb": record.db_size,
+ "restore_record": self._create_restoration_record(record.backup).name,
+ "status": "Pending",
+ },
+ )
+
+ self.save()
+
+ def _create_restoration_record(self, site_backup: str) -> PhysicalBackupRestoration:
+ return frappe.get_doc(
+ {
+ "doctype": "Physical Backup Restoration",
+ "site": self.test_site,
+ "status": "Pending",
+ "site_backup": site_backup,
+ "source_database": frappe.db.get_value("Site Backup", site_backup, "database_name"),
+ "destination_database": self.destination_database,
+ "destination_server": self.destination_server,
+ "restore_specific_tables": False,
+ "tables_to_restore": "[]",
+ "physical_restoration_test": self.name,
+ }
+ ).insert(ignore_permissions=True)
+
+ @frappe.whitelist()
+ def start(self):
+ self.sync()
+ record = None
+ # check if there is any running restoration
+ for result in self.results:
+ if result.status == "Running":
+ return
+
+ for result in self.results:
+ if result.status == "Pending":
+ record = result
+ break
+ if record:
+ restore_record: PhysicalBackupRestoration = frappe.get_doc(
+ "Physical Backup Restoration", record.restore_record
+ )
+ restore_record.execute()
+ record.status = "Running"
+ record.save()
+ else:
+ self.completed = True
+ self.save()
+ frappe.throw("No pending restoration found")
+
+ @frappe.whitelist()
+ def sync(self):
+ for result in self.results:
+ result.save()
+
+ @frappe.whitelist()
+ def reset_failed_restorations(self):
+ for result in self.results:
+ if result.status == "Failure":
+ result.status = "Pending"
+ # find the backup from the previous restoration
+ site_backup = frappe.db.get_value(
+ "Physical Backup Restoration", result.restore_record, "site_backup"
+ )
+ result.restore_record = self._create_restoration_record(site_backup).name
+ result.duration = None
+ result.save()
+
+
+def trigger_next_restoration(record_id: str):
+ if not frappe.db.exists("Physical Restoration Test", record_id):
+ return
+ record: PhysicalRestorationTest = frappe.get_doc("Physical Restoration Test", record_id)
+ try:
+ record.start()
+ except Exception:
+ frappe.log_error("Physical Restoration Test Exception")
diff --git a/press/press/doctype/physical_restoration_test/test_physical_restoration_test.py b/press/press/doctype/physical_restoration_test/test_physical_restoration_test.py
new file mode 100644
index 00000000000..7e7f7d3151e
--- /dev/null
+++ b/press/press/doctype/physical_restoration_test/test_physical_restoration_test.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class IntegrationTestPhysicalRestorationTest(FrappeTestCase):
+ """
+ Integration tests for PhysicalRestorationTest.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/physical_restoration_test_result/__init__.py b/press/press/doctype/physical_restoration_test_result/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/physical_restoration_test_result/physical_restoration_test_result.json b/press/press/doctype/physical_restoration_test_result/physical_restoration_test_result.json
new file mode 100644
index 00000000000..ba106755c5c
--- /dev/null
+++ b/press/press/doctype/physical_restoration_test_result/physical_restoration_test_result.json
@@ -0,0 +1,69 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-02-18 15:18:37.332620",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "status",
+ "duration",
+ "db_size_mb",
+ "site",
+ "restore_record"
+ ],
+ "fields": [
+ {
+ "fieldname": "site",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Site",
+ "options": "Site",
+ "reqd": 1
+ },
+ {
+ "fieldname": "db_size_mb",
+ "fieldtype": "Int",
+ "in_list_view": 1,
+ "label": "DB Size (MB)",
+ "reqd": 1
+ },
+ {
+ "fieldname": "restore_record",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Restore Record",
+ "options": "Physical Backup Restoration",
+ "reqd": 1
+ },
+ {
+ "fetch_from": "restore_record.duration",
+ "fieldname": "duration",
+ "fieldtype": "Duration",
+ "in_list_view": 1,
+ "label": "Duration",
+ "read_only": 1
+ },
+ {
+ "fetch_from": "restore_record.status",
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Status",
+ "options": "Pending\nRunning\nSuccess\nFailure",
+ "read_only": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-02-18 16:59:59.685437",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Physical Restoration Test Result",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/physical_restoration_test_result/physical_restoration_test_result.py b/press/press/doctype/physical_restoration_test_result/physical_restoration_test_result.py
new file mode 100644
index 00000000000..dd8acb4fc22
--- /dev/null
+++ b/press/press/doctype/physical_restoration_test_result/physical_restoration_test_result.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+# import frappe
+from frappe.model.document import Document
+
+
+class PhysicalRestorationTestResult(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ db_size_mb: DF.Int
+ duration: DF.Duration | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ restore_record: DF.Link
+ site: DF.Link
+ status: DF.Literal["Pending", "Running", "Success", "Failure"]
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/plan/plan.js b/press/press/doctype/plan/plan.js
deleted file mode 100644
index cf745086170..00000000000
--- a/press/press/doctype/plan/plan.js
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2020, Frappe and contributors
-// For license information, please see license.txt
-
-frappe.ui.form.on('Plan', {
- // refresh: function(frm) {
- // }
-});
diff --git a/press/press/doctype/plan/plan.json b/press/press/doctype/plan/plan.json
deleted file mode 100644
index eaf413bc7bc..00000000000
--- a/press/press/doctype/plan/plan.json
+++ /dev/null
@@ -1,230 +0,0 @@
-{
- "actions": [],
- "allow_rename": 1,
- "autoname": "Prompt",
- "creation": "2022-01-28 20:07:37.055861",
- "doctype": "DocType",
- "editable_grid": 1,
- "engine": "InnoDB",
- "field_order": [
- "enabled",
- "section_break_2",
- "document_type",
- "plan_title",
- "interval",
- "column_break_5",
- "price_usd",
- "price_inr",
- "features_section",
- "cpu_time_per_day",
- "max_database_usage",
- "max_storage_usage",
- "column_break_13",
- "is_trial_plan",
- "offsite_backups",
- "private_benches",
- "database_access",
- "servers",
- "cluster",
- "instance_type",
- "column_break_21",
- "vcpu",
- "memory",
- "disk",
- "roles_section",
- "roles"
- ],
- "fields": [
- {
- "fieldname": "plan_title",
- "fieldtype": "Data",
- "label": "Plan Title"
- },
- {
- "default": "1",
- "fieldname": "enabled",
- "fieldtype": "Check",
- "label": "Enabled"
- },
- {
- "fieldname": "price_usd",
- "fieldtype": "Currency",
- "in_list_view": 1,
- "label": "Price (USD)",
- "options": "USD",
- "reqd": 1
- },
- {
- "fieldname": "price_inr",
- "fieldtype": "Currency",
- "in_list_view": 1,
- "label": "Price (INR)",
- "options": "INR",
- "reqd": 1
- },
- {
- "fieldname": "cpu_time_per_day",
- "fieldtype": "Int",
- "in_list_view": 1,
- "label": "CPU Time Per Day"
- },
- {
- "fieldname": "document_type",
- "fieldtype": "Link",
- "label": "Document Type",
- "options": "DocType",
- "reqd": 1
- },
- {
- "fieldname": "interval",
- "fieldtype": "Select",
- "label": "Interval",
- "options": "Daily\nMonthly\nAnnually"
- },
- {
- "depends_on": "eval:doc.document_type == 'Site'",
- "fieldname": "features_section",
- "fieldtype": "Section Break",
- "label": "Site Features"
- },
- {
- "default": "0",
- "fieldname": "offsite_backups",
- "fieldtype": "Check",
- "in_list_view": 1,
- "label": "Offsite Backups"
- },
- {
- "fieldname": "max_database_usage",
- "fieldtype": "Int",
- "label": "Max Database Usage (MiB)"
- },
- {
- "fieldname": "max_storage_usage",
- "fieldtype": "Int",
- "label": "Max Storage Usage (MiB)"
- },
- {
- "fieldname": "column_break_5",
- "fieldtype": "Column Break"
- },
- {
- "fieldname": "section_break_2",
- "fieldtype": "Section Break"
- },
- {
- "fieldname": "roles_section",
- "fieldtype": "Section Break",
- "label": "Roles"
- },
- {
- "fieldname": "roles",
- "fieldtype": "Table",
- "label": "Roles",
- "options": "Has Role"
- },
- {
- "fieldname": "column_break_13",
- "fieldtype": "Column Break"
- },
- {
- "default": "0",
- "fieldname": "private_benches",
- "fieldtype": "Check",
- "label": "Private Benches"
- },
- {
- "default": "0",
- "fieldname": "database_access",
- "fieldtype": "Check",
- "label": "Database Access"
- },
- {
- "default": "0",
- "fieldname": "is_trial_plan",
- "fieldtype": "Check",
- "label": "Is Trial Plan"
- },
- {
- "fieldname": "servers",
- "fieldtype": "Section Break",
- "label": "Servers"
- },
- {
- "fieldname": "cluster",
- "fieldtype": "Link",
- "label": "Cluster",
- "options": "Cluster"
- },
- {
- "fieldname": "instance_type",
- "fieldtype": "Data",
- "label": "Instance Type"
- },
- {
- "fieldname": "column_break_21",
- "fieldtype": "Column Break"
- },
- {
- "fieldname": "vcpu",
- "fieldtype": "Int",
- "label": "vCPU"
- },
- {
- "fieldname": "memory",
- "fieldtype": "Int",
- "label": "Memory"
- },
- {
- "fieldname": "disk",
- "fieldtype": "Int",
- "label": "Disk"
- }
- ],
- "index_web_pages_for_search": 1,
- "links": [],
- "modified": "2022-09-21 11:07:49.585270",
- "modified_by": "Administrator",
- "module": "Press",
- "name": "Plan",
- "naming_rule": "Set by user",
- "owner": "Administrator",
- "permissions": [
- {
- "create": 1,
- "delete": 1,
- "email": 1,
- "export": 1,
- "print": 1,
- "read": 1,
- "report": 1,
- "role": "System Manager",
- "share": 1,
- "write": 1
- },
- {
- "email": 1,
- "export": 1,
- "print": 1,
- "read": 1,
- "report": 1,
- "role": "Press Admin",
- "share": 1
- },
- {
- "email": 1,
- "export": 1,
- "print": 1,
- "read": 1,
- "report": 1,
- "role": "Press Member",
- "share": 1
- }
- ],
- "quick_entry": 1,
- "sort_field": "price_usd",
- "sort_order": "ASC",
- "states": [],
- "title_field": "plan_title",
- "track_changes": 1
-}
\ No newline at end of file
diff --git a/press/press/doctype/plan/plan.py b/press/press/doctype/plan/plan.py
deleted file mode 100644
index 270ae62a886..00000000000
--- a/press/press/doctype/plan/plan.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2020, Frappe and contributors
-# For license information, please see license.txt
-
-
-from typing import List
-
-import frappe
-from frappe.model.document import Document
-from frappe.utils import rounded
-
-
-class Plan(Document):
- @property
- def period(self):
- return frappe.utils.get_last_day(None).day
-
- def get_price_per_day(self, currency):
- price = self.price_inr if currency == "INR" else self.price_usd
- price_per_day = rounded(price / self.period, 2)
- return price_per_day
-
- def get_price_for_interval(self, interval, currency):
- price_per_day = self.get_price_per_day(currency)
-
- if interval == "Daily":
- return price_per_day
-
- if interval == "Monthly":
- return rounded(price_per_day * 30)
-
- @classmethod
- def get_ones_without_offsite_backups(cls) -> List[str]:
- return frappe.get_all("Plan", filters={"offsite_backups": False}, pluck="name")
-
-
-def get_plan_config(name):
- cpu_time = frappe.db.get_value("Plan", name, "cpu_time_per_day")
- if cpu_time and cpu_time > 0:
- return {"rate_limit": {"limit": cpu_time * 3600, "window": 86400}}
- return {}
diff --git a/press/press/doctype/plan/test_plan.py b/press/press/doctype/plan/test_plan.py
deleted file mode 100644
index b73b8164589..00000000000
--- a/press/press/doctype/plan/test_plan.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2020, Frappe and Contributors
-# See license.txt
-
-import unittest
-from datetime import date
-from unittest.mock import patch
-
-import frappe
-
-
-def create_test_plan(
- document_type: str,
- price_usd: float = 10.0,
- price_inr: float = 750.0,
- cpu_time: int = 1,
-):
- """Create test Plan doc."""
- name = frappe.mock("name")
- plan = frappe.get_doc(
- {
- "doctype": "Plan",
- "document_type": document_type,
- "name": f"Test 10 dollar plan {name}",
- "plan_title": name,
- "price_inr": price_inr,
- "price_usd": price_usd,
- "cpu_time_per_day": cpu_time,
- "disk": 50,
- "instance_type": "t2.micro",
- }
- ).insert(ignore_if_duplicate=True)
- plan.reload()
- return plan
-
-
-class TestPlan(unittest.TestCase):
- def setUp(self):
- self.plan = create_test_plan("Site")
-
- def tearDown(self):
- frappe.db.rollback()
-
- def test_period_int(self):
- self.assertIsInstance(self.plan.period, int)
-
- def test_per_day_difference(self):
- per_day_usd = self.plan.get_price_per_day("USD")
- per_day_inr = self.plan.get_price_per_day("INR")
- self.assertIsInstance(per_day_inr, (int, float))
- self.assertIsInstance(per_day_usd, (int, float))
- self.assertNotEqual(per_day_inr, per_day_usd)
-
- def test_dynamic_period(self):
- month_with_29_days = frappe.utils.get_last_day(date(2020, 2, 3))
- month_with_30_days = frappe.utils.get_last_day(date(1997, 4, 3))
-
- with patch.object(frappe.utils, "get_last_day", return_value=month_with_30_days):
- self.assertEqual(self.plan.period, 30)
- per_day_for_30_usd = self.plan.get_price_per_day("USD")
- per_day_for_30_inr = self.plan.get_price_per_day("INR")
-
- with patch.object(frappe.utils, "get_last_day", return_value=month_with_29_days):
- self.assertEqual(self.plan.period, 29)
- per_day_for_29_usd = self.plan.get_price_per_day("USD")
- per_day_for_29_inr = self.plan.get_price_per_day("INR")
-
- self.assertNotEqual(per_day_for_29_usd, per_day_for_30_usd)
- self.assertNotEqual(per_day_for_29_inr, per_day_for_30_inr)
diff --git a/press/press/doctype/plan_change/plan_change.json b/press/press/doctype/plan_change/plan_change.json
index f0f140ecb73..fecf8c22bf7 100644
--- a/press/press/doctype/plan_change/plan_change.json
+++ b/press/press/doctype/plan_change/plan_change.json
@@ -45,14 +45,14 @@
"fieldname": "from_plan",
"fieldtype": "Link",
"label": "From Plan",
- "options": "Plan"
+ "options": "Server Plan"
},
{
"fieldname": "to_plan",
"fieldtype": "Link",
"in_list_view": 1,
"label": "To Plan",
- "options": "Plan",
+ "options": "Server Plan",
"reqd": 1
},
{
@@ -74,7 +74,7 @@
}
],
"links": [],
- "modified": "2022-09-22 05:40:57.705702",
+ "modified": "2024-02-19 14:03:40.310425",
"modified_by": "Administrator",
"module": "Press",
"name": "Plan Change",
diff --git a/press/press/doctype/plan_change/plan_change.py b/press/press/doctype/plan_change/plan_change.py
index f7a35be3ac6..24f2e9661f8 100644
--- a/press/press/doctype/plan_change/plan_change.py
+++ b/press/press/doctype/plan_change/plan_change.py
@@ -7,11 +7,28 @@
class PlanChange(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ document_name: DF.DynamicLink
+ document_type: DF.Link
+ from_plan: DF.Link | None
+ team: DF.Link | None
+ timestamp: DF.Datetime | None
+ to_plan: DF.Link
+ type: DF.Literal["", "Initial Plan", "Upgrade", "Downgrade"]
+ # end: auto-generated types
+
def validate(self):
self.team = frappe.db.get_value(self.document_type, self.document_name, "team")
if self.from_plan and not self.type:
- from_plan_value = frappe.db.get_value("Plan", self.from_plan, "price_usd")
- to_plan_value = frappe.db.get_value("Plan", self.to_plan, "price_usd")
+ from_plan_value = frappe.db.get_value("Server Plan", self.from_plan, "price_usd")
+ to_plan_value = frappe.db.get_value("Server Plan", self.to_plan, "price_usd")
self.type = "Downgrade" if from_plan_value > to_plan_value else "Upgrade"
if self.type == "Initial Plan":
@@ -28,6 +45,7 @@ def create_subscription(self):
frappe.get_doc(
doctype="Subscription",
team=self.team,
+ plan_type="Server Plan",
plan=self.to_plan,
document_type=self.document_type,
document_name=self.document_name,
diff --git a/press/press/doctype/plan_feature/plan_feature.py b/press/press/doctype/plan_feature/plan_feature.py
index e1f2c6aa438..81578d72cdd 100644
--- a/press/press/doctype/plan_feature/plan_feature.py
+++ b/press/press/doctype/plan_feature/plan_feature.py
@@ -6,4 +6,18 @@
class PlanFeature(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ description: DF.Data
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/press_feedback/press_feedback.json b/press/press/doctype/press_feedback/press_feedback.json
index c219071093d..00fd53d87e8 100644
--- a/press/press/doctype/press_feedback/press_feedback.json
+++ b/press/press/doctype/press_feedback/press_feedback.json
@@ -7,7 +7,13 @@
"field_order": [
"team",
"message",
- "route"
+ "route",
+ "team_created_on",
+ "currency",
+ "last_paid_invoice",
+ "column_break_vcbh",
+ "rating",
+ "note"
],
"fields": [
{
@@ -20,7 +26,7 @@
},
{
"fieldname": "message",
- "fieldtype": "Small Text",
+ "fieldtype": "Data",
"in_list_view": 1,
"label": "Message",
"reqd": 1
@@ -29,11 +35,41 @@
"fieldname": "route",
"fieldtype": "Data",
"label": "Route"
+ },
+ {
+ "fieldname": "column_break_vcbh",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "note",
+ "fieldtype": "Small Text",
+ "label": "Note"
+ },
+ {
+ "fieldname": "rating",
+ "fieldtype": "Rating",
+ "label": "Rating"
+ },
+ {
+ "fieldname": "team_created_on",
+ "fieldtype": "Date",
+ "label": "Team Created On"
+ },
+ {
+ "fieldname": "currency",
+ "fieldtype": "Link",
+ "label": "Currency",
+ "options": "Currency"
+ },
+ {
+ "fieldname": "last_paid_invoice",
+ "fieldtype": "Currency",
+ "label": "Last Paid Invoice"
}
],
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2021-02-16 11:00:09.510599",
+ "modified": "2025-02-03 15:48:45.990977",
"modified_by": "Administrator",
"module": "Press",
"name": "Press Feedback",
@@ -63,6 +99,7 @@
"quick_entry": 1,
"sort_field": "modified",
"sort_order": "DESC",
+ "states": [],
"title_field": "team",
"track_changes": 1
}
\ No newline at end of file
diff --git a/press/press/doctype/press_feedback/press_feedback.py b/press/press/doctype/press_feedback/press_feedback.py
index 4ccd0232d84..124d0a511c1 100644
--- a/press/press/doctype/press_feedback/press_feedback.py
+++ b/press/press/doctype/press_feedback/press_feedback.py
@@ -1,11 +1,28 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
-
+from __future__ import annotations
# import frappe
from frappe.model.document import Document
class PressFeedback(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ currency: DF.Link | None
+ last_paid_invoice: DF.Currency
+ message: DF.Data
+ note: DF.SmallText | None
+ rating: DF.Rating
+ route: DF.Data | None
+ team: DF.Link
+ team_created_on: DF.Date | None
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/press_feedback/test_press_feedback.py b/press/press/doctype/press_feedback/test_press_feedback.py
index b5457433b78..8c1f5d86814 100644
--- a/press/press/doctype/press_feedback/test_press_feedback.py
+++ b/press/press/doctype/press_feedback/test_press_feedback.py
@@ -1,11 +1,10 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestPressFeedback(unittest.TestCase):
+class TestPressFeedback(FrappeTestCase):
pass
diff --git a/press/press/doctype/press_job/press_job.js b/press/press/doctype/press_job/press_job.js
index 61a7d093103..e25deb104ec 100644
--- a/press/press/doctype/press_job/press_job.js
+++ b/press/press/doctype/press_job/press_job.js
@@ -2,6 +2,30 @@
// For license information, please see license.txt
frappe.ui.form.on('Press Job', {
- // refresh: function(frm) {
- // }
+ refresh: function (frm) {
+ [
+ [__('Force Continue'), 'force_continue', frm.doc.status === 'Failure'],
+ [__('Force Fail'), 'force_fail', frm.doc.status === 'Running'],
+ [
+ __('Mark Callback Failure Issue Resolved'),
+ 'mark_callback_failure_issue_resolved',
+ frm.doc.callback_failed &&
+ !frm.doc.callback_executed &&
+ !frm.doc.callback_failure_issue_resolved,
+ ],
+ ].forEach(([label, method, condition]) => {
+ if (condition) {
+ frm.add_custom_button(
+ label,
+ () => {
+ frappe.confirm(
+ `Are you sure you want to ${label.toLowerCase()}?`,
+ () => frm.call(method).then(() => frm.refresh()),
+ );
+ },
+ __('Actions'),
+ );
+ }
+ });
+ },
});
diff --git a/press/press/doctype/press_job/press_job.json b/press/press/doctype/press_job/press_job.json
index 244ccdbfe19..c096388c81f 100644
--- a/press/press/doctype/press_job/press_job.json
+++ b/press/press/doctype/press_job/press_job.json
@@ -17,7 +17,15 @@
"server",
"virtual_machine",
"column_break_11",
- "arguments"
+ "arguments",
+ "callback_section",
+ "callback_executed",
+ "callback_failed",
+ "callback_retry_limit_reached",
+ "callback_failure_issue_resolved",
+ "column_break_zynz",
+ "callback_failure_count",
+ "next_callback_retry_at"
],
"fields": [
{
@@ -65,8 +73,9 @@
"read_only": 1
},
{
+ "default": "0",
"fieldname": "duration",
- "fieldtype": "Time",
+ "fieldtype": "Duration",
"in_list_view": 1,
"label": "Duration",
"read_only": 1
@@ -104,8 +113,60 @@
{
"fieldname": "column_break_11",
"fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "callback_failure_count",
+ "fieldtype": "Int",
+ "label": "Callback Failure Count",
+ "read_only": 1,
+ "reqd": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "callback_failed",
+ "fieldtype": "Check",
+ "label": "Callback Failed",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "callback_retry_limit_reached",
+ "fieldtype": "Check",
+ "label": "Callback Retry Limit Reached",
+ "read_only": 1
+ },
+ {
+ "fieldname": "next_callback_retry_at",
+ "fieldtype": "Datetime",
+ "label": "Next Callback Retry At",
+ "read_only": 1
+ },
+ {
+ "fieldname": "callback_section",
+ "fieldtype": "Section Break",
+ "label": "Callback"
+ },
+ {
+ "default": "0",
+ "fieldname": "callback_failure_issue_resolved",
+ "fieldtype": "Check",
+ "label": "Callback Failure Issue Resolved",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_zynz",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "callback_executed",
+ "fieldtype": "Check",
+ "label": "Callback Executed",
+ "read_only": 1
}
],
+ "grid_page_length": 50,
"in_create": 1,
"index_web_pages_for_search": 1,
"links": [
@@ -114,7 +175,7 @@
"link_fieldname": "job"
}
],
- "modified": "2022-11-01 18:37:24.481326",
+ "modified": "2025-07-31 11:51:16.216581",
"modified_by": "Administrator",
"module": "Press",
"name": "Press Job",
@@ -144,8 +205,9 @@
"write": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/press_job/press_job.py b/press/press/doctype/press_job/press_job.py
index 8c4e11dcdda..e952580aadb 100644
--- a/press/press/doctype/press_job/press_job.py
+++ b/press/press/doctype/press_job/press_job.py
@@ -1,17 +1,66 @@
# Copyright (c) 2022, Frappe and contributors
# For license information, please see license.txt
+import json
+
import frappe
from frappe.model.document import Document
-import json
+from frappe.utils import add_days, add_to_date
+
+from press.press.doctype.press_job_step.press_job_step import safe_exec
class PressJob(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ arguments: DF.Code
+ callback_executed: DF.Check
+ callback_failed: DF.Check
+ callback_failure_count: DF.Int
+ callback_failure_issue_resolved: DF.Check
+ callback_retry_limit_reached: DF.Check
+ duration: DF.Duration | None
+ end: DF.Datetime | None
+ job_type: DF.Link
+ name: DF.Int | None
+ next_callback_retry_at: DF.Datetime | None
+ server: DF.DynamicLink | None
+ server_type: DF.Link | None
+ start: DF.Datetime | None
+ status: DF.Literal["Pending", "Running", "Skipped", "Success", "Failure"]
+ virtual_machine: DF.Link | None
+ # end: auto-generated types
+
+ def before_insert(self):
+ frappe.db.get_value(self.server_type, self.server, "status", for_update=True)
+ if existing_jobs := frappe.db.get_all(
+ self.doctype,
+ {
+ "status": ("in", ["Pending", "Running"]),
+ "server_type": self.server_type,
+ "server": self.server,
+ },
+ ["job_type", "status"],
+ ):
+ frappe.throw(
+ f"A {existing_jobs[0].job_type} job is already {existing_jobs[0].status}. Please wait for the same."
+ )
+
def after_insert(self):
self.create_press_job_steps()
self.execute()
def on_update(self):
+ if self.has_value_changed("status"):
+ self.process_callback(save=True)
+
+ def on_change(self):
self.publish_update()
def create_press_job_steps(self):
@@ -25,7 +74,6 @@ def create_press_job_steps(self):
"job_type": self.job_type,
"step_name": step.step_name,
"wait_until_true": step.wait_until_true,
- "duration": "00:00:00",
}
)
doc.insert()
@@ -38,19 +86,17 @@ def execute(self):
def fail(self, arguments=None):
self.status = "Failure"
- pending_steps = frappe.get_all(
- "Press Job Step", {"job": self.name, "status": "Pending"}
- )
+ pending_steps = frappe.get_all("Press Job Step", {"job": self.name, "status": "Pending"})
for step in pending_steps:
frappe.db.set_value("Press Job Step", step.name, "status", "Skipped")
self.end = frappe.utils.now_datetime()
- self.duration = self.end - self.start
+ self.duration = (self.end - self.start).total_seconds()
self.save()
def succeed(self):
self.status = "Success"
self.end = frappe.utils.now_datetime()
- self.duration = self.end - self.start
+ self.duration = (self.end - self.start).total_seconds()
self.save()
@frappe.whitelist()
@@ -69,6 +115,26 @@ def next(self, arguments=None):
frappe.enqueue_doc("Press Job Step", next_step, "execute", enqueue_after_commit=True)
+ @frappe.whitelist()
+ def force_continue(self):
+ for step in frappe.get_all(
+ "Press Job Step",
+ {"job": self.name, "status": ("in", ("Failure", "Skipped"))},
+ pluck="name",
+ ):
+ frappe.db.set_value("Press Job Step", step, "status", "Pending")
+ self.next()
+
+ @frappe.whitelist()
+ def force_fail(self):
+ for step in frappe.get_all(
+ "Press Job Step",
+ {"job": self.name, "status": "Pending"},
+ pluck="name",
+ ):
+ frappe.db.set_value("Press Job Step", step, "status", "Failure")
+ frappe.db.set_value("Press Job", self.name, "status", "Failure")
+
@property
def next_step(self):
return frappe.db.get_value(
@@ -102,4 +168,93 @@ def detail(self):
}
def publish_update(self):
- frappe.publish_realtime("press_job_update", self.detail())
+ frappe.publish_realtime(
+ "press_job_update", doctype=self.doctype, docname=self.name, message=self.detail()
+ )
+
+ @frappe.whitelist()
+ def mark_callback_failure_issue_resolved(self):
+ self.callback_failure_issue_resolved = True
+ self.save()
+
+ def process_callback(self, save: bool = False): # noqa: C901
+ if self.status not in ["Success", "Failure"]:
+ return
+
+ if self.callback_executed or self.callback_failure_issue_resolved:
+ return
+
+ job_type = frappe.db.get_value(
+ "Press Job Type", self.job_type, ["callback_script", "callback_max_retry"], as_dict=True
+ )
+ if not job_type.callback_script:
+ self.callback_executed = True
+ if save:
+ self.save()
+ # No callback script defined, so just mark as executed
+ return
+
+ if self.callback_failed and self.callback_failure_count >= (job_type.callback_max_retry or 0):
+ self.callback_retry_limit_reached = True
+ self.next_callback_retry_at = None
+ if save:
+ self.save()
+ return
+
+ local = {"arguments": frappe._dict(json.loads(self.arguments)), "doc": self}
+ try:
+ safe_exec(job_type.callback_script, _locals=local)
+ self.callback_failed = False
+ self.callback_executed = True
+ self.next_callback_retry_at = None
+ self.callback_failure_issue_resolved = False
+ except Exception:
+ frappe.log_error(f"Error executing callback script for {self.name}")
+ self.callback_failed = True
+ self.callback_failure_count += 1
+ self.next_callback_retry_at = add_to_date(None, minutes=5)
+
+ if save:
+ self.save()
+
+ def on_trash(self):
+ frappe.db.delete("Press Job Step", {"job": self.name})
+
+
+def fail_stuck_press_jobs():
+ jobs = frappe.get_all(
+ "Press Job",
+ filters={
+ "status": ("in", ["Running", "Pending"]),
+ "creation": ("<", add_days(None, -1)),
+ },
+ pluck="name",
+ limit=100,
+ )
+ for job_name in jobs:
+ job = PressJob("Press Job", job_name)
+ job.force_fail()
+ frappe.db.commit()
+
+
+def process_failed_callbacks():
+ jobs = frappe.get_all(
+ "Press Job",
+ filters={
+ "status": ("in", ["Success", "Failure"]),
+ "callback_failed": True,
+ "callback_executed": False,
+ "callback_failure_issue_resolved": False,
+ "callback_retry_limit_reached": False,
+ "next_callback_retry_at": ("<", frappe.utils.now_datetime()),
+ },
+ pluck="name",
+ )
+ for job_name in jobs:
+ frappe.enqueue_doc(
+ "Press Job",
+ job_name,
+ "process_callback",
+ enqueue_after_commit=True,
+ save=True,
+ )
diff --git a/press/press/doctype/press_job_step/press_job_step.json b/press/press/doctype/press_job_step/press_job_step.json
index 7ddb9d8c433..fff57d6a985 100644
--- a/press/press/doctype/press_job_step/press_job_step.json
+++ b/press/press/doctype/press_job_step/press_job_step.json
@@ -67,8 +67,9 @@
"read_only": 1
},
{
+ "default": "0",
"fieldname": "duration",
- "fieldtype": "Time",
+ "fieldtype": "Duration",
"in_list_view": 1,
"label": "Duration",
"read_only": 1
@@ -99,6 +100,7 @@
{
"fieldname": "attempts",
"fieldtype": "Int",
+ "in_list_view": 1,
"label": "Attempts",
"read_only": 1
},
@@ -113,7 +115,7 @@
"in_create": 1,
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2022-11-01 18:36:52.022633",
+ "modified": "2024-08-05 16:56:46.525168",
"modified_by": "Administrator",
"module": "Press",
"name": "Press Job Step",
diff --git a/press/press/doctype/press_job_step/press_job_step.py b/press/press/doctype/press_job_step/press_job_step.py
index b3f6fa3ee5d..d8aba5d195c 100644
--- a/press/press/doctype/press_job_step/press_job_step.py
+++ b/press/press/doctype/press_job_step/press_job_step.py
@@ -1,15 +1,42 @@
# Copyright (c) 2022, Frappe and contributors
# For license information, please see license.txt
+import json
+from typing import TYPE_CHECKING
+
import frappe
from frappe.model.document import Document
from frappe.utils.safe_exec import safe_exec
-import json
+
+if TYPE_CHECKING:
+ from press.press.doctype.press_job.press_job import PressJob
class PressJobStep(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ attempts: DF.Int
+ duration: DF.Duration | None
+ end: DF.Datetime | None
+ job: DF.Link
+ job_type: DF.Link
+ name: DF.Int | None
+ result: DF.Code | None
+ start: DF.Datetime | None
+ status: DF.Literal["Pending", "Running", "Skipped", "Success", "Failure"]
+ step_name: DF.Data
+ traceback: DF.Code | None
+ wait_until_true: DF.Check
+ # end: auto-generated types
+
@frappe.whitelist()
- def execute(self):
+ def execute(self): # noqa: C901
if not self.start:
self.start = frappe.utils.now_datetime()
self.status = "Running"
@@ -18,7 +45,7 @@ def execute(self):
{"parent": self.job_type, "step_name": self.step_name},
"script",
)
- job = frappe.get_doc("Press Job", self.job)
+ job: PressJob = frappe.get_doc("Press Job", self.job)
arguments = json.loads(job.arguments)
try:
local = {"arguments": frappe._dict(arguments), "result": None, "doc": job}
@@ -27,7 +54,9 @@ def execute(self):
if self.wait_until_true:
self.attempts = self.attempts + 1
- if result[0]:
+ if result is None:
+ self.status = "Skipped"
+ elif result[0]:
self.status = "Success"
elif result[1]:
self.status = "Failure"
@@ -37,14 +66,21 @@ def execute(self):
time.sleep(1)
else:
- self.status = "Success"
+ if result is not None and (isinstance(result, (list, tuple))) and len(result) == 2:
+ self.status = "Success" if result[0] else "Failure"
+ if not result[0] and not result[1]:
+ self.status = "Skipped"
+ else:
+ self.status = "Success"
self.result = str(result)
except Exception:
self.status = "Failure"
self.traceback = frappe.get_traceback(with_context=True)
+ if frappe.flags.in_test:
+ raise
self.end = frappe.utils.now_datetime()
- self.duration = self.end - self.start
+ self.duration = (self.end - self.start).total_seconds()
self.save()
if self.status == "Failure":
diff --git a/press/press/doctype/press_job_type/press_job_type.json b/press/press/doctype/press_job_type/press_job_type.json
index 1e6d37cff31..0e350db1652 100644
--- a/press/press/doctype/press_job_type/press_job_type.json
+++ b/press/press/doctype/press_job_type/press_job_type.json
@@ -7,7 +7,9 @@
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
- "steps"
+ "steps",
+ "callback_script",
+ "callback_max_retry"
],
"fields": [
{
@@ -16,11 +18,26 @@
"label": "Steps",
"options": "Press Job Type Step",
"reqd": 1
+ },
+ {
+ "description": "The callback function will be called once Press Job reaches the terminating state [Success, Failure].",
+ "fieldname": "callback_script",
+ "fieldtype": "Code",
+ "label": "Callback Script",
+ "options": "Python"
+ },
+ {
+ "default": "1",
+ "fieldname": "callback_max_retry",
+ "fieldtype": "Int",
+ "in_list_view": 1,
+ "label": "Callback Max Retry"
}
],
+ "grid_page_length": 50,
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2022-09-28 17:51:52.412510",
+ "modified": "2025-07-31 13:52:28.892322",
"modified_by": "Administrator",
"module": "Press",
"name": "Press Job Type",
@@ -40,7 +57,8 @@
"write": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
"states": []
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/press_job_type/press_job_type.py b/press/press/doctype/press_job_type/press_job_type.py
index 834ec4dd7b7..aef0ee11eae 100644
--- a/press/press/doctype/press_job_type/press_job_type.py
+++ b/press/press/doctype/press_job_type/press_job_type.py
@@ -6,4 +6,19 @@
class PressJobType(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.press_job_type_step.press_job_type_step import PressJobTypeStep
+
+ callback_max_retry: DF.Int
+ callback_script: DF.Code | None
+ steps: DF.Table[PressJobTypeStep]
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/press_job_type_step/press_job_type_step.py b/press/press/doctype/press_job_type_step/press_job_type_step.py
index aa01e3756ae..f6256d76fc4 100644
--- a/press/press/doctype/press_job_type_step/press_job_type_step.py
+++ b/press/press/doctype/press_job_type_step/press_job_type_step.py
@@ -6,4 +6,20 @@
class PressJobTypeStep(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ script: DF.Code
+ step_name: DF.Data
+ wait_until_true: DF.Check
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/press_method_permission/__init__.py b/press/press/doctype/press_method_permission/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/press_method_permission/press_method_permission.js b/press/press/doctype/press_method_permission/press_method_permission.js
new file mode 100644
index 00000000000..5a699f816a2
--- /dev/null
+++ b/press/press/doctype/press_method_permission/press_method_permission.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2023, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Press Method Permission", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/press_method_permission/press_method_permission.json b/press/press/doctype/press_method_permission/press_method_permission.json
new file mode 100644
index 00000000000..a85e09b08f2
--- /dev/null
+++ b/press/press/doctype/press_method_permission/press_method_permission.json
@@ -0,0 +1,67 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2023-08-09 21:22:15.511701",
+ "default_view": "List",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "document_type",
+ "checkbox_label",
+ "method"
+ ],
+ "fields": [
+ {
+ "fieldname": "document_type",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Document Type",
+ "options": "DocType",
+ "reqd": 1
+ },
+ {
+ "fieldname": "method",
+ "fieldtype": "Data",
+ "in_filter": 1,
+ "in_list_view": 1,
+ "in_preview": 1,
+ "in_standard_filter": 1,
+ "label": "Method",
+ "reqd": 1
+ },
+ {
+ "fieldname": "checkbox_label",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "in_preview": 1,
+ "in_standard_filter": 1,
+ "label": "Checkbox Label",
+ "reqd": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2023-09-25 21:33:32.729651",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Press Method Permission",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/press_method_permission/press_method_permission.py b/press/press/doctype/press_method_permission/press_method_permission.py
new file mode 100644
index 00000000000..344c07cebc3
--- /dev/null
+++ b/press/press/doctype/press_method_permission/press_method_permission.py
@@ -0,0 +1,39 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+import frappe
+from frappe.model.document import Document
+
+
+class PressMethodPermission(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ checkbox_label: DF.Data
+ document_type: DF.Link
+ method: DF.Data
+ # end: auto-generated types
+
+ pass
+
+
+def available_actions():
+ result = {}
+ doctypes = frappe.get_all(
+ "Press Method Permission", pluck="document_type", distinct=True
+ )
+
+ for doctype in doctypes:
+ result[doctype] = {
+ perm["checkbox_label"]: perm["method"]
+ for perm in frappe.get_all(
+ "Press Method Permission", {"document_type": doctype}, ["checkbox_label", "method"]
+ )
+ }
+
+ return result
diff --git a/press/press/doctype/press_method_permission/test_press_method_permission.py b/press/press/doctype/press_method_permission/test_press_method_permission.py
new file mode 100644
index 00000000000..e3cc7411ab5
--- /dev/null
+++ b/press/press/doctype/press_method_permission/test_press_method_permission.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2023, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestPressMethodPermission(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/press_notification/__init__.py b/press/press/doctype/press_notification/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/press_notification/patches/link_reference_doctype_to_notifications.py b/press/press/doctype/press_notification/patches/link_reference_doctype_to_notifications.py
new file mode 100644
index 00000000000..fd131318335
--- /dev/null
+++ b/press/press/doctype/press_notification/patches/link_reference_doctype_to_notifications.py
@@ -0,0 +1,33 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+import frappe
+from tqdm import tqdm
+
+
+def execute():
+ notifications = frappe.db.get_all(
+ "Press Notification", ["name", "document_type", "document_name"]
+ )
+ for notification in tqdm(notifications):
+ if notification.document_type == "Agent Job":
+ reference_doctype = "Site"
+ reference_doc = frappe.db.get_value("Agent Job", notification.document_name, "site")
+ if not reference_doc:
+ reference_doctype = "Server"
+ reference_doc = frappe.db.get_value(
+ "Agent Job", notification.document_name, "server"
+ )
+
+ elif notification.document_type == "Deploy Candidate":
+ reference_doctype = "Release Group"
+ reference_doc = frappe.db.get_value(
+ "Deploy Candidate", notification.document_name, "group"
+ )
+
+ frappe.db.set_value(
+ "Press Notification",
+ notification.name,
+ {"reference_doctype": reference_doctype, "reference_name": reference_doc},
+ update_modified=False,
+ )
diff --git a/press/press/doctype/press_notification/press_notification.js b/press/press/doctype/press_notification/press_notification.js
new file mode 100644
index 00000000000..3b9fb9cdddf
--- /dev/null
+++ b/press/press/doctype/press_notification/press_notification.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2023, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Press Notification", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/press_notification/press_notification.json b/press/press/doctype/press_notification/press_notification.json
new file mode 100644
index 00000000000..fba80da452a
--- /dev/null
+++ b/press/press/doctype/press_notification/press_notification.json
@@ -0,0 +1,209 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-07-05 10:54:14.431684",
+ "default_view": "List",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "team",
+ "document_type",
+ "reference_doctype",
+ "column_break_brxc",
+ "type",
+ "document_name",
+ "reference_name",
+ "interaction_section",
+ "is_actionable",
+ "read",
+ "column_break_rada",
+ "is_addressed",
+ "message_section",
+ "title",
+ "message",
+ "traceback",
+ "assistance_url",
+ "class"
+ ],
+ "fields": [
+ {
+ "default": "0",
+ "fieldname": "read",
+ "fieldtype": "Check",
+ "label": "Read"
+ },
+ {
+ "fieldname": "type",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Type",
+ "options": "Site Update\nSite Migrate\nVersion Upgrade\nBench Deploy\nSite Recovery\nAgent Job Failure\nDowntime/Performance\nSupport Access\nAuto Scale",
+ "read_only": 1,
+ "reqd": 1
+ },
+ {
+ "fieldname": "message",
+ "fieldtype": "Long Text",
+ "label": "Message",
+ "read_only": 1
+ },
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "To Team",
+ "options": "Team",
+ "read_only": 1,
+ "reqd": 1
+ },
+ {
+ "fieldname": "document_type",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Document Type",
+ "options": "DocType",
+ "read_only": 1,
+ "reqd": 1
+ },
+ {
+ "fieldname": "document_name",
+ "fieldtype": "Dynamic Link",
+ "in_list_view": 1,
+ "label": "Document Name",
+ "options": "document_type",
+ "read_only": 1,
+ "reqd": 1
+ },
+ {
+ "depends_on": "eval:doc.traceback",
+ "fieldname": "traceback",
+ "fieldtype": "Code",
+ "label": "Traceback",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_brxc",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "interaction_section",
+ "fieldtype": "Section Break",
+ "label": "User Interaction"
+ },
+ {
+ "default": "0",
+ "depends_on": "eval:doc.is_actionable",
+ "description": "Actionable notifications can be rectified by the user. For example if a deploy breaks due to incompatible app version.",
+ "fieldname": "is_actionable",
+ "fieldtype": "Check",
+ "label": "Is Actionable",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_rada",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "message_section",
+ "fieldtype": "Section Break",
+ "label": "Message"
+ },
+ {
+ "depends_on": "eval:doc.title",
+ "fieldname": "title",
+ "fieldtype": "Small Text",
+ "label": "Title",
+ "read_only": 1
+ },
+ {
+ "depends_on": "eval:doc.assistance_url",
+ "description": "Meant to be used if the notification is actionable. The URL can point to documentation on how to resolve the issue.",
+ "fieldname": "assistance_url",
+ "fieldtype": "Data",
+ "label": "Assitance URL",
+ "read_only": 1
+ },
+ {
+ "default": "Info",
+ "fieldname": "class",
+ "fieldtype": "Select",
+ "label": "Class",
+ "options": "Info\nSuccess\nWarning\nError",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "depends_on": "eval:doc.is_actionable",
+ "description": "Actionable notifications can be addressed by the user.\n\nUsed to prevent an action until the issue has been addressed. For example preventing further deploy until an incompatible app version has been fixed.",
+ "fieldname": "is_addressed",
+ "fieldtype": "Check",
+ "label": "Is Addressed",
+ "read_only": 1
+ },
+ {
+ "description": "For filtering notifications in the dashboard",
+ "fieldname": "reference_doctype",
+ "fieldtype": "Link",
+ "hidden": 1,
+ "label": "Reference DocType",
+ "options": "DocType"
+ },
+ {
+ "description": "For filtering notifications in the dashboard",
+ "fieldname": "reference_name",
+ "fieldtype": "Dynamic Link",
+ "hidden": 1,
+ "label": "Reference Name",
+ "options": "reference_doctype"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-10-22 11:00:39.063349",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Press Notification",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/press_notification/press_notification.py b/press/press/doctype/press_notification/press_notification.py
new file mode 100644
index 00000000000..259f7b968e2
--- /dev/null
+++ b/press/press/doctype/press_notification/press_notification.py
@@ -0,0 +1,126 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+import frappe
+from frappe.model.document import Document
+
+from press.api.client import dashboard_whitelist
+from press.press.doctype.communication_info.communication_info import get_communication_info
+
+
+class PressNotification(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ assistance_url: DF.Data | None
+ document_name: DF.DynamicLink
+ document_type: DF.Link
+ is_actionable: DF.Check
+ is_addressed: DF.Check
+ message: DF.LongText | None
+ read: DF.Check
+ reference_doctype: DF.Link | None
+ reference_name: DF.DynamicLink | None
+ team: DF.Link
+ title: DF.SmallText | None
+ traceback: DF.Code | None
+ type: DF.Literal[
+ "Site Update",
+ "Site Migrate",
+ "Version Upgrade",
+ "Bench Deploy",
+ "Site Recovery",
+ "Agent Job Failure",
+ "Downtime/Performance",
+ "Support Access",
+ "Auto Scale",
+ ]
+ # end: auto-generated types
+
+ dashboard_fields = (
+ "team",
+ "document_type",
+ "class",
+ "type",
+ "document_name",
+ "is_actionable",
+ "read",
+ "is_addressed",
+ "title",
+ "message",
+ "traceback",
+ "assistance_url",
+ )
+
+ def after_insert(self):
+ if frappe.local.dev_server:
+ return
+
+ user = frappe.db.get_value("Team", self.team, "user")
+ if user == "Administrator":
+ return
+
+ if self.type == "Bench Deploy":
+ self.send_bench_deploy_failed(get_communication_info("Email", "General", "Team", self.team))
+
+ def send_bench_deploy_failed(self, mails: list[str]):
+ """Skip emails in case of warning"""
+ if getattr(self, "class", None) == "Warning":
+ return
+
+ group_name = frappe.db.get_value("Deploy Candidate Build", self.document_name, "group")
+ rg_title = frappe.db.get_value("Release Group", group_name, "title")
+
+ frappe.sendmail(
+ recipients=mails,
+ subject=f"Bench Deploy Failed - {rg_title}",
+ template="bench_deploy_failure",
+ args={
+ "message": self.title,
+ "link": f"dashboard/groups/{group_name}/deploys/{self.document_name}",
+ },
+ )
+
+ @dashboard_whitelist()
+ def mark_as_addressed(self):
+ self.read = True
+ self.is_addressed = True
+ self.save()
+ frappe.db.commit()
+
+ @dashboard_whitelist()
+ def mark_as_read(self):
+ self.db_set("read", True)
+
+
+def create_new_notification(team, type, document_type, document_name, message):
+ if not frappe.db.exists("Press Notification", {"document_name": document_name}):
+ if document_type == "Agent Job":
+ reference_doctype = "Site"
+ reference_doc = frappe.db.get_value("Agent Job", document_name, "site")
+ if not reference_doc:
+ reference_doctype = "Server"
+ reference_doc = frappe.db.get_value("Agent Job", document_name, "server")
+ elif document_type == "Deploy Candidate":
+ reference_doctype = "Release Group"
+ reference_doc = frappe.db.get_value("Deploy Candidate", document_name, "group")
+
+ frappe.get_doc(
+ {
+ "doctype": "Press Notification",
+ "team": team,
+ "type": type,
+ "document_type": document_type,
+ "document_name": document_name or 0,
+ "message": message,
+ "reference_doctype": reference_doctype,
+ "reference_name": reference_doc,
+ }
+ ).insert()
+ frappe.publish_realtime("press_notification", doctype="Press Notification", message={"team": team})
diff --git a/press/press/doctype/press_notification/test_press_notification.py b/press/press/doctype/press_notification/test_press_notification.py
new file mode 100644
index 00000000000..81cc8593c93
--- /dev/null
+++ b/press/press/doctype/press_notification/test_press_notification.py
@@ -0,0 +1,59 @@
+# Copyright (c) 2023, Frappe and Contributors
+# See license.txt
+
+import frappe
+from frappe.tests.utils import FrappeTestCase
+
+from press.api.notifications import get_unread_count
+from press.press.doctype.agent_job.agent_job import poll_pending_jobs
+from press.press.doctype.agent_job.test_agent_job import fake_agent_job
+from press.press.doctype.app.test_app import create_test_app
+from press.press.doctype.deploy_candidate_difference.test_deploy_candidate_difference import (
+ create_test_deploy_candidate_differences,
+)
+from press.press.doctype.release_group.test_release_group import (
+ create_test_release_group,
+)
+from press.press.doctype.site.test_site import create_test_bench, create_test_site
+
+
+class TestPressNotification(FrappeTestCase):
+ def setUp(self):
+ super().setUp()
+
+ app1 = create_test_app() # frappe
+ app2 = create_test_app("app2", "App 2")
+ app3 = create_test_app("app3", "App 3")
+ self.apps = [app1, app2, app3]
+
+ def tearDown(self):
+ frappe.db.rollback()
+
+ def test_notification_is_created_when_agent_job_fails(self):
+ group = create_test_release_group(self.apps)
+ bench1 = create_test_bench(group=group)
+ bench2 = create_test_bench(group=group, server=bench1.server)
+
+ create_test_deploy_candidate_differences(bench2.candidate) # for site update to be available
+
+ site = create_test_site(bench=bench1.name)
+
+ self.assertEqual(frappe.db.count("Press Notification"), 0)
+ with (
+ fake_agent_job(
+ "Update Site Pull",
+ "Failure",
+ ),
+ fake_agent_job(
+ "Recover Failed Site Update",
+ "Success",
+ ),
+ ):
+ site.schedule_update()
+ poll_pending_jobs()
+
+ notification = frappe.get_last_doc("Press Notification")
+ self.assertEqual(notification.type, "Site Update")
+ # api test is added here since it's trivial
+ # move to separate file if it gets more complex
+ self.assertEqual(get_unread_count(), 1)
diff --git a/press/press/doctype/press_permission_group/__init__.py b/press/press/doctype/press_permission_group/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/press_permission_group/press_permission_group.js b/press/press/doctype/press_permission_group/press_permission_group.js
new file mode 100644
index 00000000000..0e891accca4
--- /dev/null
+++ b/press/press/doctype/press_permission_group/press_permission_group.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2023, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Press Permission Group", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/press_permission_group/press_permission_group.json b/press/press/doctype/press_permission_group/press_permission_group.json
new file mode 100644
index 00000000000..7719f87ae90
--- /dev/null
+++ b/press/press/doctype/press_permission_group/press_permission_group.json
@@ -0,0 +1,103 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2023-08-07 12:47:43.330395",
+ "default_view": "List",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "team",
+ "column_break_kbnh",
+ "title",
+ "section_break_rknu",
+ "users",
+ "permissions"
+ ],
+ "fields": [
+ {
+ "fieldname": "title",
+ "fieldtype": "Data",
+ "label": "Title",
+ "reqd": 1
+ },
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Team",
+ "options": "Team",
+ "reqd": 1
+ },
+ {
+ "fieldname": "users",
+ "fieldtype": "Table",
+ "label": "Users",
+ "options": "Press Permission Group User"
+ },
+ {
+ "fieldname": "column_break_kbnh",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "section_break_rknu",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "permissions",
+ "fieldtype": "JSON",
+ "label": "Permissions"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2024-03-27 06:03:36.540752",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Press Permission Group",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "show_title_field_in_link": 1,
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": [],
+ "title_field": "title",
+ "track_changes": 1
+}
\ No newline at end of file
diff --git a/press/press/doctype/press_permission_group/press_permission_group.py b/press/press/doctype/press_permission_group/press_permission_group.py
new file mode 100644
index 00000000000..dd5c15d72ad
--- /dev/null
+++ b/press/press/doctype/press_permission_group/press_permission_group.py
@@ -0,0 +1,351 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+from typing import ClassVar
+
+import frappe
+from frappe.model.document import Document
+
+from press.api.client import dashboard_whitelist
+
+DEFAULT_PERMISSIONS = {
+ "*": {"*": {"*": True}} # all doctypes # all documents # all methods
+}
+
+
+class PressPermissionGroup(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.press_permission_group_user.press_permission_group_user import (
+ PressPermissionGroupUser,
+ )
+
+ permissions: DF.JSON | None
+ team: DF.Link
+ title: DF.Data
+ users: DF.Table[PressPermissionGroupUser]
+ # end: auto-generated types
+
+ dashboard_fields: ClassVar = ["title", "users"]
+
+ def get_doc(self, doc):
+ if doc.users:
+ values = {
+ d.name: d
+ for d in frappe.db.get_all(
+ "User",
+ filters={"name": ["in", [user.user for user in doc.users]]},
+ fields=["name", "full_name", "user_image"],
+ )
+ }
+ doc.users = [d.as_dict() for d in doc.users]
+ for user in doc.users:
+ user.full_name = values.get(user.user, {}).get("full_name")
+ user.user_image = values.get(user.user, {}).get("user_image")
+ return doc
+
+ def validate(self):
+ self.validate_permissions()
+ self.validate_users()
+
+ def validate_permissions(self): # noqa: C901
+ permissions = frappe.parse_json(self.permissions)
+ if not permissions:
+ self.permissions = DEFAULT_PERMISSIONS
+ return
+
+ for doctype, doctype_perms in permissions.items():
+ if doctype not in get_all_restrictable_doctypes() and doctype != "*":
+ frappe.throw(f"{doctype} is not a valid doctype.")
+
+ if not isinstance(doctype_perms, dict):
+ frappe.throw(
+ f"Invalid perms for {doctype}. Rule must be key-value pairs of document name and document perms."
+ )
+
+ for doc_name, doc_perms in doctype_perms.items():
+ if not isinstance(doc_perms, dict):
+ frappe.throw(
+ f"Invalid perms for {doctype} {doc_name}. Rule must be key-value pairs of method and permission."
+ )
+
+ if doctype == "*":
+ continue
+
+ restrictable_methods = get_all_restrictable_methods(doctype)
+ if not restrictable_methods:
+ frappe.throw(f"{doctype} does not have any restrictable methods.")
+
+ for method, _permitted in doc_perms.items():
+ if method != "*" and method not in restrictable_methods:
+ frappe.throw(f"{method} is not a restrictable method of {doctype}")
+
+ def validate_users(self):
+ for user in self.users:
+ if user.user == "Administrator":
+ continue
+ user_belongs_to_team = frappe.db.exists("Team Member", {"parent": self.team, "user": user.user})
+ if not user_belongs_to_team:
+ frappe.throw(f"{user.user} does not belong to {self.team}")
+
+ @dashboard_whitelist()
+ def delete(self):
+ super().delete()
+
+ @dashboard_whitelist()
+ def get_users(self):
+ user_names = [user.user for user in self.users]
+ if not user_names:
+ return []
+
+ return frappe.db.get_all(
+ "User",
+ filters={"name": ["in", user_names], "enabled": 1},
+ fields=[
+ "name",
+ "first_name",
+ "last_name",
+ "full_name",
+ "user_image",
+ "name as email",
+ ],
+ )
+
+ @dashboard_whitelist()
+ def add_user(self, user):
+ user_belongs_to_group = self.get("users", {"user": user})
+ if user_belongs_to_group:
+ frappe.throw(f"{user} already belongs to {self.title}")
+
+ user_is_team_owner = frappe.db.exists("Team", {"name": self.team, "user": user})
+ if user_is_team_owner:
+ frappe.throw(f"{user} cannot be added to {self.title} because they are the owner of {self.team}")
+
+ self.append("users", {"user": user})
+ self.save()
+
+ @dashboard_whitelist()
+ def remove_user(self, user):
+ user_belongs_to_group = self.get("users", {"user": user})
+ if not user_belongs_to_group:
+ frappe.throw(f"{user} does not belong to {self.name}")
+
+ for row in self.users:
+ if row.user == user:
+ self.remove(row)
+ break
+ self.save()
+
+ @dashboard_whitelist()
+ def get_all_document_permissions(self, doctype: str) -> list:
+ """
+ Get the permissions for the specified document type or all restrictable document types.
+
+ :param doctype: The doctype for which permissions are to be retrieved.
+ :return: A list of dictionaries containing the document type, document name, and permissions for each document.
+ """
+ from press.api.client import get_list
+
+ user = frappe.session.user
+ user_belongs_to_group = self.get("users", {"user": user})
+ user_is_team_owner = frappe.db.exists("Team", {"name": self.team, "user": user})
+ if not (frappe.local.system_user() or user_belongs_to_group or user_is_team_owner):
+ frappe.throw(f"{user} does not belong to {self.name}")
+
+ if doctype not in get_all_restrictable_doctypes():
+ frappe.throw(f"{doctype} is not a valid restrictable doctype.")
+
+ restrictable_methods = get_all_restrictable_methods(doctype)
+ if not restrictable_methods:
+ frappe.throw(f"{doctype} does not have any restrictable methods.")
+
+ options = []
+ fields = ["name", "title"] if doctype != "Site" else ["name"]
+ docs = get_list(doctype=doctype, fields=fields, limit=9999)
+
+ for doc in docs:
+ permitted_methods = get_permitted_methods(doctype, doc.name, group_names=[self.name])
+ doc_perms = []
+ for method, label in restrictable_methods.items():
+ is_permitted = method in permitted_methods
+ doc_perms.append(
+ {
+ "label": label,
+ "method": method,
+ "permitted": is_permitted,
+ }
+ )
+ options.append(
+ {
+ "document_type": doctype,
+ "document_name": doc.title or doc.name,
+ "permissions": doc_perms,
+ }
+ )
+
+ return options
+
+ @dashboard_whitelist()
+ def update_permissions(self, updated_permissions):
+ cur_permissions = frappe.parse_json(self.permissions)
+ for updated_doctype, updated_doctype_perms in updated_permissions.items():
+ if updated_doctype not in cur_permissions:
+ cur_permissions[updated_doctype] = {}
+
+ for updated_docname, updated_docperms in updated_doctype_perms.items():
+ if updated_docname == "*":
+ cur_permissions[updated_doctype] = {"*": updated_docperms}
+ continue
+ if updated_docname not in cur_permissions[updated_doctype]:
+ cur_permissions[updated_doctype][updated_docname] = {}
+
+ for method, permitted in updated_docperms.items():
+ cur_permissions[updated_doctype][updated_docname][method] = permitted
+
+ self.permissions = cur_permissions
+ self.save()
+
+
+def has_method_permission(doctype: str, name: str, method: str, group_names: list | None = None):
+ if frappe.local.system_user():
+ return True
+
+ user = frappe.session.user
+
+ if doctype not in get_all_restrictable_doctypes():
+ return True
+
+ if method not in get_all_restrictable_methods(doctype):
+ return True
+
+ if not group_names:
+ group_names = get_permission_groups(user)
+
+ if not group_names:
+ # user does not have any restricted permissions set in any group
+ return True
+
+ if method in get_permitted_methods(doctype, name, group_names):
+ return True
+
+ return False
+
+
+def get_permitted_methods(doctype: str, name: str, group_names: list | None = None) -> list:
+ user = frappe.session.user
+
+ if doctype not in get_all_restrictable_doctypes():
+ frappe.throw(f"{doctype} is not a valid restrictable doctype.")
+
+ permissions_by_group = {}
+ permission_groups = group_names or get_permission_groups(user)
+ for group_name in set(permission_groups):
+ permissions_by_group[group_name] = get_method_perms_for_group(doctype, name, group_name)
+
+ method_perms = resolve_doc_permissions(doctype, permissions_by_group)
+ permitted_methods = [method for method, permitted in method_perms.items() if permitted]
+ return list(set(permitted_methods))
+
+
+def get_method_perms_for_group(doctype: str, name: str, group_name: str) -> list:
+ permissions = frappe.db.get_value("Press Permission Group", group_name, "permissions")
+
+ if not permissions:
+ # this group allows all methods of all documents
+ return {"*": True}
+
+ permissions = frappe.parse_json(permissions)
+ doctype_perms = permissions.get(doctype, None) or permissions.get("*", None)
+ if not doctype_perms:
+ # this group allows all methods of all documents
+ return {"*": True}
+
+ doc_perms = doctype_perms.get(name, None) or doctype_perms.get("*", None)
+ if not doc_perms:
+ # this group allows all methods of this document
+ return {"*": True}
+
+ return doc_perms
+
+
+def resolve_doc_permissions(doctype, permissions_by_group: dict) -> dict: # noqa: C901
+ """
+ Permission Resolution Logic:
+ - if a group has *: True and another group has *: False, then all the methods are allowed
+ - if a group has *: True and another group has 'method': False, then that method is restricted
+ - if a group has 'method': True and another group has 'method': False, then that method is allowed
+ """
+ method_perms = {}
+
+ all_methods = get_all_restrictable_methods(doctype)
+ all_restricted = {method: False for method in all_methods}
+ all_allowed = {method: True for method in all_methods}
+
+ # first we parse the wildcard permissions
+ # if any group has *: True, then all methods are allowed
+ for _group_name, permissions in permissions_by_group.items():
+ if permissions.get("*", None) is None:
+ continue
+ if permissions.get("*", None) is True:
+ method_perms = all_allowed
+ break
+ if permissions.get("*", None) is False:
+ method_perms = all_restricted
+
+ # now we restrict all the methods that are explicitly restricted
+ # so that we can allow all the methods that are explicitly allowed later
+ for _group_name, permissions in permissions_by_group.items():
+ for method, permitted in permissions.items():
+ if not permitted and method != "*":
+ method_perms[method] = False
+
+ # now we allow all the methods that are explicitly allowed
+ for _group_name, permissions in permissions_by_group.items():
+ for method, permitted in permissions.items():
+ if permitted and method != "*":
+ method_perms[method] = True
+
+ return method_perms
+
+
+def get_all_restrictable_doctypes() -> list:
+ return ["Site", "Release Group"]
+
+
+def get_all_restrictable_methods(doctype: str) -> list:
+ methods = {
+ "Site": {
+ # method: label,
+ "get_doc": " View", # so that this comes up first in sort order
+ "archive": "Drop",
+ "migrate": "Migrate",
+ "activate": "Activate",
+ "reinstall": "Reinstall",
+ "deactivate": "Deactivate",
+ "enable_database_access": "Database",
+ "restore_site_from_files": "Restore",
+ },
+ "Release Group": {
+ "get_doc": " View",
+ "restart": "Restart",
+ },
+ }
+ return methods.get(doctype, {})
+
+
+def get_permission_groups(user: str | None = None) -> list:
+ if not user:
+ user = frappe.session.user
+
+ return frappe.get_all(
+ "Press Permission Group User",
+ filters={"user": user},
+ pluck="parent",
+ distinct=True,
+ )
diff --git a/press/press/doctype/press_permission_group/test_press_permission_group.py b/press/press/doctype/press_permission_group/test_press_permission_group.py
new file mode 100644
index 00000000000..cecbd54ca46
--- /dev/null
+++ b/press/press/doctype/press_permission_group/test_press_permission_group.py
@@ -0,0 +1,156 @@
+# Copyright (c) 2023, Frappe and Contributors
+# See license.txt
+
+import frappe
+from frappe.tests.utils import FrappeTestCase
+
+from press.press.doctype.press_permission_group.press_permission_group import (
+ get_all_restrictable_methods,
+ has_method_permission,
+)
+from press.press.doctype.team.test_team import create_test_team
+
+
+class TestPressPermissionGroup(FrappeTestCase):
+ def setUp(self):
+ super().setUp()
+
+ frappe.set_user("Administrator")
+ frappe.db.delete("Press Permission Group")
+ self.team_user = create_user("team@example.com")
+ self.team = create_test_team(self.team_user.email)
+ self.team_member = create_user("user123@example.com")
+ self.team.append("team_members", {"user": self.team_member.name})
+ self.team.save()
+ self.perm_group = create_permission_group(self.team.name)
+ self.perm_group2 = create_permission_group(self.team.name)
+
+ def tearDown(self):
+ frappe.set_user("Administrator")
+ frappe.delete_doc("Press Permission Group", self.perm_group.name, force=True)
+ frappe.delete_doc("Press Permission Group", self.perm_group2.name, force=True)
+ frappe.delete_doc("Team", self.team.name, force=True)
+ frappe.delete_doc("User", self.team_member.name, force=True)
+ frappe.delete_doc("User", self.team_user.name, force=True)
+ frappe.local._current_team = None
+
+ def test_add_user(self):
+ self.perm_group.add_user(self.team_member.name)
+ perm_group_users = self.perm_group.get_users()
+ perm_group_user_exists = any(self.team_member.name == pg_user.name for pg_user in perm_group_users)
+ self.assertTrue(perm_group_user_exists)
+ self.assertRaises(frappe.ValidationError, self.perm_group.add_user, self.team_member.name)
+
+ def test_remove_user(self):
+ self.perm_group.add_user(self.team_member.name)
+ self.perm_group.remove_user(self.team_member.name)
+ perm_group_users = self.perm_group.get_users()
+ perm_group_user_exists = any(self.team_member.name == pg_user.name for pg_user in perm_group_users)
+ self.assertFalse(perm_group_user_exists)
+ self.assertRaises(frappe.ValidationError, self.perm_group.remove_user, self.team_member.name)
+
+ def test_update_permissions(self):
+ frappe.set_user("Administrator")
+ self.perm_group.add_user(self.team_member.name)
+ self.perm_group.update_permissions({"Site": {"*": {"*": True}}})
+ frappe.set_user(self.team_member.name)
+ self.assertEqual(has_method_permission("Site", "site1.test", "reinstall"), True)
+
+ frappe.set_user("Administrator")
+ self.perm_group.update_permissions({"Site": {"site1.test": {"*": True, "reinstall": False}}})
+ frappe.set_user(self.team_member.name)
+ self.assertEqual(has_method_permission("Site", "site1.test", "reinstall"), False)
+
+ def test_update_permissions_with_invalid_doctype(self):
+ frappe.set_user("Administrator")
+ self.assertRaises(
+ frappe.ValidationError,
+ self.perm_group.update_permissions,
+ {"Invalid Doctype": {"*": {"*": True}}},
+ )
+
+ def test_update_permissions_with_invalid_method(self):
+ frappe.set_user("Administrator")
+ self.assertRaises(
+ frappe.ValidationError,
+ self.perm_group.update_permissions,
+ {"Site": {"*": {"invalid_method": True}}},
+ )
+
+ def test_unrestricted_method_should_be_allowed(self):
+ frappe.set_user("Administrator")
+ self.perm_group.add_user(self.team_member.name)
+ frappe.set_user(self.team_member.name)
+ self.assertEqual(has_method_permission("Site", "site1.test", "create"), True)
+
+ def test_most_permissive_permission_should_be_allowed(self):
+ frappe.set_user("Administrator")
+ self.perm_group2.add_user(self.team_member.name)
+ self.perm_group2.update_permissions({"Site": {"*": {"*": False}}})
+ self.perm_group.add_user(self.team_member.name)
+ self.perm_group.update_permissions({"Site": {"*": {"*": True}}})
+ frappe.set_user(self.team_member.name)
+ self.assertEqual(has_method_permission("Site", "site1.test", "reinstall"), True)
+
+ def test_specific_permission_should_be_allowed(self):
+ frappe.set_user("Administrator")
+ self.perm_group2.add_user(self.team_member.name)
+ self.perm_group2.update_permissions({"Site": {"*": {"*": False}}})
+ self.perm_group.add_user(self.team_member.name)
+ self.perm_group.update_permissions({"Site": {"site1.test": {"reinstall": True}}})
+ frappe.set_user(self.team_member.name)
+ self.assertEqual(has_method_permission("Site", "site1.test", "reinstall"), True)
+
+ def test_get_all_document_permissions(self):
+ # Test case 1: User belongs to the permission group
+ frappe.set_user("Administrator")
+ self.perm_group.add_user(self.team_member.name)
+ self.perm_group.update_permissions({"Site": {"*": {"*": True}}})
+
+ site = frappe.new_doc("Site")
+ site.name = "site1.test"
+ site.team = self.team.name
+ site.db_insert()
+
+ frappe.set_user(self.team_member.name)
+ frappe.local._current_team = self.team
+ permissions = self.perm_group.get_all_document_permissions("Site")
+ self.assertEqual(len(permissions), 1)
+ self.assertEqual(permissions[0]["document_type"], "Site")
+ self.assertEqual(permissions[0]["document_name"], "site1.test")
+ site_restrictable_methods = get_all_restrictable_methods("Site")
+ self.assertEqual(len(permissions[0]["permissions"]), len(site_restrictable_methods))
+
+ # Test case 2: User does not belong to the permission group
+ frappe.set_user("user@example.com")
+ self.assertRaises(frappe.ValidationError, self.perm_group.get_all_document_permissions, "Site")
+
+ # Test case 3: Invalid restrictable doctype
+ frappe.set_user("Administrator")
+ self.assertRaises(
+ frappe.ValidationError,
+ self.perm_group.get_all_document_permissions,
+ "InvalidDoctype",
+ )
+
+ # Test case 4: No restrictable methods for the doctype
+ self.assertRaises(frappe.ValidationError, self.perm_group.get_all_document_permissions, "DocType2")
+
+
+# utils
+def create_permission_group(team):
+ doc = frappe.new_doc("Press Permission Group")
+ doc.title = "Test Group"
+ doc.team = team
+ doc.save()
+ return doc
+
+
+def create_user(email):
+ if frappe.db.exists("User", email):
+ return frappe.get_doc("User", email)
+ user = frappe.new_doc("User")
+ user.email = email
+ user.first_name = email.split("@")[0]
+ user.save()
+ return user
diff --git a/press/press/doctype/press_permission_group_user/__init__.py b/press/press/doctype/press_permission_group_user/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/press_permission_group_user/press_permission_group_user.json b/press/press/doctype/press_permission_group_user/press_permission_group_user.json
new file mode 100644
index 00000000000..fc40d7dec3b
--- /dev/null
+++ b/press/press/doctype/press_permission_group_user/press_permission_group_user.json
@@ -0,0 +1,35 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2023-08-07 13:20:37.357116",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "user"
+ ],
+ "fields": [
+ {
+ "fieldname": "user",
+ "fieldtype": "Link",
+ "in_filter": 1,
+ "in_list_view": 1,
+ "in_preview": 1,
+ "in_standard_filter": 1,
+ "label": "User",
+ "options": "User"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2023-08-07 13:21:16.669052",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Press Permission Group User",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/press_permission_group_user/press_permission_group_user.py b/press/press/doctype/press_permission_group_user/press_permission_group_user.py
new file mode 100644
index 00000000000..566b8c175c8
--- /dev/null
+++ b/press/press/doctype/press_permission_group_user/press_permission_group_user.py
@@ -0,0 +1,23 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class PressPermissionGroupUser(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ user: DF.Link | None
+ # end: auto-generated types
+
+ dashboard_fields = ["user"]
diff --git a/press/press/doctype/press_role/__init__.py b/press/press/doctype/press_role/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/press_role/patches/change_fields_from_enable_to_allow.py b/press/press/doctype/press_role/patches/change_fields_from_enable_to_allow.py
new file mode 100644
index 00000000000..a026e9c8c37
--- /dev/null
+++ b/press/press/doctype/press_role/patches/change_fields_from_enable_to_allow.py
@@ -0,0 +1,10 @@
+# Copyright (c) 2024, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+
+import frappe
+
+
+def execute():
+ frappe.db.sql(
+ "UPDATE `tabPress Role` SET allow_billing = enable_billing, allow_apps = enable_apps"
+ )
diff --git a/press/press/doctype/press_role/patches/migrate_permissions.py b/press/press/doctype/press_role/patches/migrate_permissions.py
new file mode 100644
index 00000000000..4d332f534f7
--- /dev/null
+++ b/press/press/doctype/press_role/patches/migrate_permissions.py
@@ -0,0 +1,62 @@
+# Copyright (c) 2024, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+
+import frappe
+
+from press.utils import _system_user
+
+
+def execute():
+ frappe.local.system_user = _system_user
+
+ teams = frappe.get_all(
+ "Team",
+ filters={"enabled": 1},
+ pluck="name",
+ )
+ for team in teams:
+ migrate_group_permissions(team)
+
+
+def migrate_group_permissions(team):
+ groups = frappe.qb.get_query(
+ "Press Permission Group",
+ fields=["name", "title", "team", {"users": ["user"]}],
+ filters={"team": team},
+ ).run(as_dict=1)
+
+ for group in groups:
+ old_group_permissions = frappe.get_all(
+ "Press User Permission",
+ filters={"group": group.name, "type": "Group"},
+ fields=["document_type", "document_name"],
+ distinct=True,
+ )
+
+ if not old_group_permissions:
+ continue
+
+ if frappe.db.exists("Press Role", {"title": group.title, "team": group.team}):
+ continue
+
+ role = frappe.new_doc("Press Role")
+ role.title = group.title
+ role.team = team
+ role.enable_billing = 1
+ role.enable_apps = 1
+ for row in group.users:
+ role.append("users", {"user": row.user})
+ role.insert()
+
+ for perm in old_group_permissions:
+ if perm.document_type not in ["Site", "Release Group", "Server"]:
+ continue
+ fieldname = perm.document_type.lower().replace(" ", "_")
+ frappe.get_doc(
+ {
+ "doctype": "Press Role Permission",
+ "role": role.name,
+ "team": team,
+ fieldname: perm.document_name,
+ }
+ ).insert()
diff --git a/press/press/doctype/press_role/patches/to_resources.py b/press/press/doctype/press_role/patches/to_resources.py
new file mode 100644
index 00000000000..4456f6271c2
--- /dev/null
+++ b/press/press/doctype/press_role/patches/to_resources.py
@@ -0,0 +1,28 @@
+import frappe
+
+
+def execute():
+ maps = {
+ "site": "Site",
+ "server": "Server",
+ "release_group": "Release Group",
+ }
+
+ for permission in frappe.get_all("Press Role Permission", fields=["role", *maps.keys()]):
+ role = frappe.get_doc("Press Role", permission.role)
+ to_append = filter(
+ lambda x: x.get("document_name"),
+ map(
+ lambda key: {
+ "document_type": maps[key],
+ "document_name": permission[key],
+ },
+ maps.keys(),
+ ),
+ )
+
+ for item in to_append:
+ role.append("resources", item)
+
+ role.save()
+ frappe.db.commit()
diff --git a/press/press/doctype/press_role/press_role.js b/press/press/doctype/press_role/press_role.js
new file mode 100644
index 00000000000..67680cd8d00
--- /dev/null
+++ b/press/press/doctype/press_role/press_role.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Press Role", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/press_role/press_role.json b/press/press/doctype/press_role/press_role.json
new file mode 100644
index 00000000000..d75daf6f385
--- /dev/null
+++ b/press/press/doctype/press_role/press_role.json
@@ -0,0 +1,216 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "autoname": "hash",
+ "creation": "2024-05-13 11:44:03.637522",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "title",
+ "column_break_qnnn",
+ "team",
+ "section_break_yvqq",
+ "admin_access",
+ "allow_billing",
+ "allow_apps",
+ "allow_partner",
+ "column_break_todb",
+ "allow_site_creation",
+ "allow_bench_creation",
+ "allow_server_creation",
+ "allow_webhook_configuration",
+ "partner_permissions_section",
+ "allow_dashboard",
+ "allow_leads",
+ "column_break_hocg",
+ "allow_customer",
+ "allow_contribution",
+ "section_break_zdiv",
+ "users",
+ "section_break_wcgd",
+ "resources"
+ ],
+ "fields": [
+ {
+ "fieldname": "title",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Title",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_qnnn",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "label": "Team",
+ "options": "Team",
+ "reqd": 1
+ },
+ {
+ "fieldname": "section_break_zdiv",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "users",
+ "fieldtype": "Table",
+ "label": "Users",
+ "options": "Press Role User"
+ },
+ {
+ "fieldname": "section_break_yvqq",
+ "fieldtype": "Section Break",
+ "label": "Additional Permissions"
+ },
+ {
+ "fieldname": "column_break_todb",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "allow_billing",
+ "fieldtype": "Check",
+ "label": "Allow Billing"
+ },
+ {
+ "default": "0",
+ "fieldname": "allow_apps",
+ "fieldtype": "Check",
+ "label": "Allow Apps"
+ },
+ {
+ "default": "0",
+ "fieldname": "allow_site_creation",
+ "fieldtype": "Check",
+ "label": "Allow Site Creation"
+ },
+ {
+ "default": "0",
+ "fieldname": "allow_bench_creation",
+ "fieldtype": "Check",
+ "label": "Allow Bench Creation"
+ },
+ {
+ "default": "0",
+ "fieldname": "allow_server_creation",
+ "fieldtype": "Check",
+ "label": "Allow Server Creation"
+ },
+ {
+ "default": "0",
+ "fieldname": "allow_partner",
+ "fieldtype": "Check",
+ "label": "Allow Partner"
+ },
+ {
+ "default": "0",
+ "fieldname": "admin_access",
+ "fieldtype": "Check",
+ "label": "Admin Access"
+ },
+ {
+ "default": "0",
+ "fieldname": "allow_webhook_configuration",
+ "fieldtype": "Check",
+ "label": "Allow Webhook Configuration"
+ },
+ {
+ "depends_on": "allow_partner",
+ "fieldname": "partner_permissions_section",
+ "fieldtype": "Section Break",
+ "label": "Partner Permissions"
+ },
+ {
+ "default": "0",
+ "fieldname": "allow_dashboard",
+ "fieldtype": "Check",
+ "label": "Allow Dashboard"
+ },
+ {
+ "default": "0",
+ "fieldname": "allow_leads",
+ "fieldtype": "Check",
+ "label": "Allow Leads"
+ },
+ {
+ "fieldname": "column_break_hocg",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "allow_customer",
+ "fieldtype": "Check",
+ "label": "Allow Customer"
+ },
+ {
+ "default": "0",
+ "fieldname": "allow_contribution",
+ "fieldtype": "Check",
+ "label": "Allow Contribution"
+ },
+ {
+ "fieldname": "section_break_wcgd",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "resources",
+ "fieldtype": "Table",
+ "label": "Resources",
+ "options": "Press Role Resource"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2026-01-23 12:41:33.964051",
+ "modified_by": "hello@ssiyad.com",
+ "module": "Press",
+ "name": "Press Role",
+ "naming_rule": "Random",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "show_title_field_in_link": 1,
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": [],
+ "title_field": "title"
+}
diff --git a/press/press/doctype/press_role/press_role.py b/press/press/doctype/press_role/press_role.py
new file mode 100644
index 00000000000..85ea5d81d71
--- /dev/null
+++ b/press/press/doctype/press_role/press_role.py
@@ -0,0 +1,223 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import frappe
+from frappe import _
+from frappe.model.document import Document
+from frappe.query_builder.functions import Count
+
+from press.api.client import dashboard_whitelist
+from press.guards import team_guard
+from press.utils import get_current_team
+
+if TYPE_CHECKING:
+ from press.press.doctype.team.team import Team
+
+
+class PressRole(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.press_role_resource.press_role_resource import PressRoleResource
+ from press.press.doctype.press_role_user.press_role_user import PressRoleUser
+
+ admin_access: DF.Check
+ allow_apps: DF.Check
+ allow_bench_creation: DF.Check
+ allow_billing: DF.Check
+ allow_contribution: DF.Check
+ allow_customer: DF.Check
+ allow_dashboard: DF.Check
+ allow_leads: DF.Check
+ allow_partner: DF.Check
+ allow_server_creation: DF.Check
+ allow_site_creation: DF.Check
+ allow_webhook_configuration: DF.Check
+ resources: DF.Table[PressRoleResource]
+ team: DF.Link
+ title: DF.Data
+ users: DF.Table[PressRoleUser]
+ # end: auto-generated types
+
+ dashboard_fields = (
+ "admin_access",
+ "allow_apps",
+ "allow_bench_creation",
+ "allow_billing",
+ "allow_contribution",
+ "allow_customer",
+ "allow_dashboard",
+ "allow_leads",
+ "allow_partner",
+ "allow_server_creation",
+ "allow_site_creation",
+ "allow_webhook_configuration",
+ "resources",
+ "team",
+ "title",
+ "users",
+ )
+
+ @team_guard.only_admin()
+ def validate(self):
+ self.validate_duplicate_title()
+
+ def validate_duplicate_title(self):
+ exists = frappe.db.exists({"doctype": "Press Role", "title": self.title, "team": self.team})
+ if self.is_new() and exists:
+ message = _("Role with title {0} already exists in this team").format(self.title)
+ frappe.throw(message, frappe.DuplicateEntryError)
+
+ def add_press_admin_role(self, user):
+ user = frappe.get_doc("User", user)
+ user.append_roles("Press Admin")
+ user.save(ignore_permissions=True)
+
+ def remove_press_admin_role(self, user):
+ if frappe.db.exists("Team", {"enabled": 1, "user": user}):
+ return
+ user = frappe.get_doc("User", user)
+ existing_roles = {d.role: d for d in user.get("roles")}
+ if "Press Admin" in existing_roles:
+ user.get("roles").remove(existing_roles["Press Admin"])
+ user.save(ignore_permissions=True)
+
+ @dashboard_whitelist()
+ @team_guard.only_admin(skip=lambda _, args: args.get("skip_validations", False))
+ @team_guard.only_member(
+ user=lambda _, args: str(args.get("user")),
+ error_message=_("User is not a member of the team"),
+ )
+ def add_user(self, user, skip_validations=False):
+ user_dict = {"user": user}
+ if self.get("users", user_dict):
+ message = _("{0} already belongs to {1}").format(user, self.title)
+ frappe.throw(message, frappe.ValidationError)
+ self.append("users", user_dict)
+ self.save()
+ if self.admin_access or self.allow_billing:
+ self.add_press_admin_role(user)
+
+ @dashboard_whitelist()
+ @team_guard.only_admin()
+ def remove_user(self, user):
+ users = self.get("users", {"user": user})
+ if not users:
+ message = _("User {0} does not belong to {1}").format(user, self.title)
+ frappe.throw(message, frappe.ValidationError)
+ self.remove(users.pop())
+ self.save()
+ if self.admin_access or self.allow_billing:
+ self.remove_press_admin_role(user)
+
+ @dashboard_whitelist()
+ @team_guard.only_admin(skip=lambda _, args: args.get("skip_validations", False))
+ def add_resource(self, resources: list[dict[str, str]]):
+ for resource in resources:
+ document_type = resource["document_type"]
+ document_name = resource["document_name"]
+ resource_dict = {"document_type": document_type, "document_name": document_name}
+ if self.get("resources", resource_dict):
+ message = _("{0} already belongs to {1}").format(document_name, self.title)
+ frappe.throw(message, frappe.ValidationError)
+ self.append("resources", resource_dict)
+ self.save()
+
+ @dashboard_whitelist()
+ @team_guard.only_admin()
+ def remove_resource(self, document_type: str, document_name: str):
+ resources = self.get("resources", {"document_type": document_type, "document_name": document_name})
+ if not resources:
+ message = _("Resource {0} does not belong to {1}").format(document_name, self.title)
+ frappe.throw(message, frappe.ValidationError)
+ self.remove(resources.pop())
+ self.save()
+
+ @dashboard_whitelist()
+ @team_guard.only_owner()
+ def delete(self, *_args, **_kwargs):
+ return super().delete()
+
+ def on_trash(self) -> None:
+ frappe.db.delete("Account Request Press Role", {"press_role": self.name})
+
+
+def create_user_resource(document: Document, _):
+ user = frappe.session.user
+ team: Team = get_current_team(get_doc=True)
+
+ roles_enabled = bool(
+ frappe.db.exists(
+ {
+ "doctype": "Press Role",
+ "team": team.name,
+ }
+ )
+ )
+
+ if (
+ (not user)
+ or (not roles_enabled)
+ or (not user_has_roles())
+ or team.is_team_owner()
+ or team.is_admin_user()
+ ):
+ return
+
+ title = user + " / " + document.name
+
+ role_exists = bool(
+ frappe.db.exists(
+ {
+ "doctype": "Press Role",
+ "team": team.name,
+ "title": title,
+ }
+ )
+ )
+
+ if role_exists:
+ return
+
+ frappe.get_doc(
+ {
+ "doctype": "Press Role",
+ "title": title,
+ "team": team.name,
+ "users": [
+ {
+ "user": user,
+ }
+ ],
+ "resources": [
+ {
+ "document_type": document.doctype,
+ "document_name": document.name,
+ }
+ ],
+ }
+ ).save(ignore_permissions=True)
+
+
+def user_has_roles() -> bool:
+ PressRole = frappe.qb.DocType("Press Role")
+ PressRoleUser = frappe.qb.DocType("Press Role User")
+ return (
+ frappe.qb.from_(PressRole)
+ .inner_join(PressRoleUser)
+ .on(PressRole.name == PressRoleUser.parent)
+ .where(PressRole.team == get_current_team())
+ .where(PressRoleUser.user == frappe.session.user)
+ .select(Count(PressRole.name).as_("role_count"))
+ .run(as_dict=True)
+ .pop()
+ .get("role_count", 0)
+ ) > 0
diff --git a/press/press/doctype/press_role/test_press_role.py b/press/press/doctype/press_role/test_press_role.py
new file mode 100644
index 00000000000..2dbb10cff83
--- /dev/null
+++ b/press/press/doctype/press_role/test_press_role.py
@@ -0,0 +1,81 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+import frappe
+from frappe.model.naming import make_autoname
+from frappe.tests.utils import FrappeTestCase
+
+from press.press.doctype.team.test_team import create_test_team
+
+
+class TestPressRole(FrappeTestCase):
+ def setUp(self):
+ frappe.set_user("Administrator")
+ frappe.db.delete("Press Role")
+ self.team_user = create_user("team@example.com")
+ self.team = create_test_team(self.team_user.email)
+ self.team.user = "Administrator"
+ self.team_member = create_user("user123@example.com")
+ self.team.append("team_members", {"user": self.team_member.name})
+ self.team.save()
+ self.external_team_member = create_user("external@example.com")
+ self.admin_perm_role = create_permission_role(self.team.name)
+ self.perm_role = create_permission_role(self.team.name)
+ self.perm_role2 = create_permission_role(self.team.name)
+
+ def tearDown(self):
+ frappe.set_user("Administrator")
+ frappe.delete_doc("Press Role", self.perm_role.name, force=True)
+ frappe.delete_doc("Press Role", self.perm_role2.name, force=True)
+ frappe.delete_doc("Team", self.team.name, force=True)
+ frappe.delete_doc("User", self.team_member.name, force=True)
+ frappe.delete_doc("User", self.team_user.name, force=True)
+ frappe.local._current_team = None
+
+ @property
+ def team_doc(self):
+ return frappe.get_doc("Team", self.team.name)
+
+ def test_add_user(self):
+ self.perm_role.add_user(self.team_member.name)
+ perm_role_users = get_users(self.perm_role)
+ perm_role_user_exists = any(
+ self.team_member.name == perm_role_user.user for perm_role_user in perm_role_users
+ )
+ self.assertTrue(perm_role_user_exists)
+ self.assertRaises(frappe.ValidationError, self.perm_role.add_user, self.team_member.name)
+
+ def test_remove_user(self):
+ self.perm_role.add_user(self.team_member.name)
+ self.perm_role.remove_user(self.team_member.name)
+ perm_role_users = get_users(self.perm_role)
+ perm_role_user_exists = any(
+ self.team_member.name == perm_role_user.user for perm_role_user in perm_role_users
+ )
+ self.assertFalse(perm_role_user_exists)
+ self.assertRaises(frappe.ValidationError, self.perm_role.remove_user, self.team_member.name)
+
+
+# utils
+def create_permission_role(team, allow_site_creation=0):
+ doc = frappe.new_doc("Press Role")
+ doc.title = make_autoname("Test-Role-.###")
+ doc.team = team
+ doc.allow_site_creation = allow_site_creation
+ doc.save()
+
+ return doc
+
+
+def create_user(email):
+ if frappe.db.exists("User", email):
+ return frappe.get_doc("User", email)
+ user = frappe.new_doc("User")
+ user.email = email
+ user.first_name = email.split("@")[0]
+ user.save()
+ return user
+
+
+def get_users(role):
+ return role.users
diff --git a/press/press/doctype/press_role_permission/__init__.py b/press/press/doctype/press_role_permission/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/press_role_permission/press_role_permission.js b/press/press/doctype/press_role_permission/press_role_permission.js
new file mode 100644
index 00000000000..56744a5d0e8
--- /dev/null
+++ b/press/press/doctype/press_role_permission/press_role_permission.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Press Role Permission", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/press_role_permission/press_role_permission.json b/press/press/doctype/press_role_permission/press_role_permission.json
new file mode 100644
index 00000000000..d2294b9f41c
--- /dev/null
+++ b/press/press/doctype/press_role_permission/press_role_permission.json
@@ -0,0 +1,106 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-05-13 11:34:13.051627",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "team",
+ "role",
+ "column_break_ayow",
+ "site",
+ "release_group",
+ "server"
+ ],
+ "fields": [
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Team",
+ "options": "Team",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_ayow",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "role",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Role",
+ "options": "Press Role",
+ "reqd": 1
+ },
+ {
+ "fieldname": "site",
+ "fieldtype": "Link",
+ "label": "Site",
+ "options": "Site",
+ "search_index": 1
+ },
+ {
+ "fieldname": "release_group",
+ "fieldtype": "Link",
+ "label": "Release Group",
+ "options": "Release Group"
+ },
+ {
+ "fieldname": "server",
+ "fieldtype": "Link",
+ "label": "Server",
+ "options": "Server"
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-03-17 12:39:57.349710",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Press Role Permission",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/press_role_permission/press_role_permission.py b/press/press/doctype/press_role_permission/press_role_permission.py
new file mode 100644
index 00000000000..6b2960aa05a
--- /dev/null
+++ b/press/press/doctype/press_role_permission/press_role_permission.py
@@ -0,0 +1,82 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+import frappe
+from frappe.model.document import Document
+
+from press.api.client import dashboard_whitelist
+
+
+class PressRolePermission(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ release_group: DF.Link | None
+ role: DF.Link
+ server: DF.Link | None
+ site: DF.Link | None
+ team: DF.Link
+ # end: auto-generated types
+
+ dashboard_fields = ("site", "release_group", "server", "role")
+
+ def before_insert(self):
+ if (
+ not frappe.local.system_user()
+ and frappe.session.user != frappe.db.get_value("Team", self.team, "user")
+ and not is_user_part_of_admin_role()
+ ):
+ frappe.throw("Only the team owner or admin can create role permissions")
+
+ if frappe.db.exists(
+ "Press Role Permission",
+ {
+ "role": self.role,
+ "team": self.team,
+ "site": self.site,
+ "release_group": self.release_group,
+ "server": self.server,
+ },
+ ):
+ frappe.throw("Role Permission already exists")
+
+ @dashboard_whitelist()
+ def delete(self):
+ if (
+ not frappe.local.system_user()
+ and frappe.session.user != frappe.get_cached_value("Team", self.team, "user")
+ and not is_user_part_of_admin_role()
+ ):
+ frappe.throw("Only the team owner or admin can delete this role permission")
+
+ super().delete()
+
+
+def is_user_part_of_admin_role(user: str | None = None) -> bool:
+ """Check if the user is part of any admin role."""
+ from press.utils import get_current_team
+
+ if not user:
+ user = frappe.session.user
+
+ team = get_current_team()
+
+ admin_roles = frappe.get_all(
+ "Press Role",
+ filters={"team": team, "admin_access": 1},
+ fields=["name"],
+ )
+
+ users = frappe.get_all(
+ "Press Role User",
+ filters={"parent": ["in", [role.name for role in admin_roles]], "user": user},
+ fields=["name"],
+ )
+
+ return bool(users)
diff --git a/press/press/doctype/press_role_permission/test_press_role_permission.py b/press/press/doctype/press_role_permission/test_press_role_permission.py
new file mode 100644
index 00000000000..f0da26db243
--- /dev/null
+++ b/press/press/doctype/press_role_permission/test_press_role_permission.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestPressRolePermission(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/press_role_resource/__init__.py b/press/press/doctype/press_role_resource/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/press_role_resource/press_role_resource.json b/press/press/doctype/press_role_resource/press_role_resource.json
new file mode 100644
index 00000000000..e3f0ef3fb9d
--- /dev/null
+++ b/press/press/doctype/press_role_resource/press_role_resource.json
@@ -0,0 +1,45 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-12-17 21:37:40.933463",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "document_type",
+ "document_name"
+ ],
+ "fields": [
+ {
+ "fieldname": "document_type",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Document Type",
+ "options": "DocType",
+ "reqd": 1
+ },
+ {
+ "fieldname": "document_name",
+ "fieldtype": "Dynamic Link",
+ "in_list_view": 1,
+ "label": "Document Name",
+ "options": "document_type",
+ "reqd": 1
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-12-17 21:40:04.499256",
+ "modified_by": "hello@ssiyad.com",
+ "module": "Press",
+ "name": "Press Role Resource",
+ "owner": "Administrator",
+ "permissions": [],
+ "row_format": "Dynamic",
+ "rows_threshold_for_grid_search": 20,
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/press_role_resource/press_role_resource.py b/press/press/doctype/press_role_resource/press_role_resource.py
new file mode 100644
index 00000000000..52ce6af2c08
--- /dev/null
+++ b/press/press/doctype/press_role_resource/press_role_resource.py
@@ -0,0 +1,24 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class PressRoleResource(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ document_name: DF.DynamicLink
+ document_type: DF.Link
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/press_role_user/__init__.py b/press/press/doctype/press_role_user/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/press_role_user/press_role_user.json b/press/press/doctype/press_role_user/press_role_user.json
new file mode 100644
index 00000000000..10496124a5d
--- /dev/null
+++ b/press/press/doctype/press_role_user/press_role_user.json
@@ -0,0 +1,49 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-05-13 11:45:15.610737",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "user",
+ "full_name",
+ "user_image"
+ ],
+ "fields": [
+ {
+ "fieldname": "user",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "User",
+ "options": "User",
+ "reqd": 1
+ },
+ {
+ "fetch_from": "user.full_name",
+ "fieldname": "full_name",
+ "fieldtype": "Data",
+ "label": "Full Name",
+ "read_only": 1
+ },
+ {
+ "fetch_from": "user.user_image",
+ "fieldname": "user_image",
+ "fieldtype": "Attach Image",
+ "label": "User Image",
+ "read_only": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2024-05-13 12:25:17.802189",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Press Role User",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/press_role_user/press_role_user.py b/press/press/doctype/press_role_user/press_role_user.py
new file mode 100644
index 00000000000..72a606ea88a
--- /dev/null
+++ b/press/press/doctype/press_role_user/press_role_user.py
@@ -0,0 +1,25 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class PressRoleUser(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ full_name: DF.Data | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ user: DF.Link
+ user_image: DF.AttachImage | None
+ # end: auto-generated types
+
+ dashboard_fields = ["user", "full_name", "user_image"]
diff --git a/press/press/doctype/press_settings/patches/set_minimum_rebuild_memory.py b/press/press/doctype/press_settings/patches/set_minimum_rebuild_memory.py
new file mode 100644
index 00000000000..4cb0b1fe77c
--- /dev/null
+++ b/press/press/doctype/press_settings/patches/set_minimum_rebuild_memory.py
@@ -0,0 +1,19 @@
+# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+
+
+import typing
+
+import frappe
+
+if typing.TYPE_CHECKING:
+ from press.press.doctype.press_settings.press_settings import PressSettings
+
+
+def execute():
+ frappe.reload_doctype("Press Settings")
+ settings: PressSettings = frappe.get_single("Press Settings")
+
+ if not settings.minimum_rebuild_memory:
+ settings.minimum_rebuild_memory = 2
+ settings.save()
diff --git a/press/press/doctype/press_settings/patches/set_redis_cache_size.py b/press/press/doctype/press_settings/patches/set_redis_cache_size.py
new file mode 100644
index 00000000000..0684ded4c3e
--- /dev/null
+++ b/press/press/doctype/press_settings/patches/set_redis_cache_size.py
@@ -0,0 +1,11 @@
+import frappe
+from frappe.core.utils import find
+
+
+def execute():
+ frappe.reload_doctype("Press Settings")
+ settings = frappe.get_single("Press Settings")
+ if not settings.redis_cache_size:
+ redis_cache_size_field = find(settings.meta.fields, lambda x: x.fieldname == "redis_cache_size")
+ settings.redis_cache_size = redis_cache_size_field.default
+ settings.save()
diff --git a/press/press/doctype/press_settings/press_settings.json b/press/press/doctype/press_settings/press_settings.json
index d45c290236f..50d9e9688ca 100644
--- a/press/press/doctype/press_settings/press_settings.json
+++ b/press/press/doctype/press_settings/press_settings.json
@@ -8,26 +8,43 @@
"domain",
"cluster",
"trial_sites_count",
+ "press_trial_plan",
"column_break_2",
"bench_configuration",
"billing_tab",
+ "free_credits_usd",
+ "free_credits_inr",
+ "column_break_cpry",
+ "micro_debit_charge_usd",
+ "micro_debit_charge_inr",
+ "column_break_wrqp",
+ "usage_record_creation_batch_size",
+ "default_server_plan_type",
+ "invoicing_section",
+ "invoicing_column",
+ "gst_percentage",
+ "npo_discount",
+ "autoscale_discount",
+ "column_break_qfwx",
+ "print_format",
+ "ic_key",
"stripe_settings_section",
- "stripe_inr_plan_id",
"stripe_publishable_key",
+ "stripe_secret_key",
+ "column_break_26",
"create_stripe_plans",
"stripe_product_id",
"stripe_usd_plan_id",
+ "stripe_inr_plan_id",
+ "column_break_yhwz",
"create_stripe_webhook",
"stripe_webhook_endpoint_id",
"stripe_webhook_secret",
- "column_break_26",
- "stripe_secret_key",
- "free_credits_inr",
- "free_credits_usd",
"ngrok_auth_token",
"razorpay_settings_section",
"razorpay_key_id",
"razorpay_webhook_secret",
+ "paypal_enabled",
"column_break_123",
"razorpay_key_secret",
"erpnext_authentication",
@@ -36,6 +53,7 @@
"erpnext_api_secret",
"column_break_38",
"frappeio_authentication_section",
+ "disable_frappe_auth",
"frappe_url",
"frappeio_api_key",
"column_break_39",
@@ -56,19 +74,39 @@
"backup_offset",
"column_break_48",
"backup_limit",
+ "max_failed_backup_attempts_in_a_day",
+ "physical_backups_section",
+ "disable_physical_backup",
+ "max_concurrent_physical_restorations",
"docker_tab",
"section_break_59",
"docker_registry_url",
"docker_registry_namespace",
+ "docker_s3_access_key",
"column_break_64",
"docker_registry_username",
"docker_registry_password",
+ "docker_s3_secret_key",
+ "asset_store_section",
+ "asset_store_access_key",
+ "asset_store_secret_access_key",
+ "asset_store_endpoint",
+ "asset_store_region",
+ "asset_store_bucket_name",
+ "use_asset_store",
"docker_build_section",
+ "suspend_builds",
"clone_directory",
"build_directory",
+ "build_server",
+ "minimum_rebuild_memory",
"column_break_66",
"code_server",
"code_server_password",
+ "use_app_cache",
+ "compress_app_cache",
+ "use_delta_builds",
+ "use_agent_job_callbacks",
"auto_update_section",
"auto_update_queue_size",
"remote_files_section",
@@ -118,16 +156,35 @@
"github_app_public_link",
"github_webhook_secret",
"github_access_token",
+ "deploy_marker",
"section_break_41",
+ "column_break_tcmy",
+ "column_break_edst",
"github_app_private_key",
+ "aws_section",
+ "aws_access_key_id",
+ "column_break_agig",
+ "aws_secret_access_key",
+ "twilio_section",
+ "twilio_account_sid",
+ "twilio_api_key_sid",
+ "twilio_api_key_secret",
+ "column_break_kxuj",
+ "twilio_phone_number",
+ "spamd_section",
+ "enable_spam_check",
+ "spamd_endpoint",
+ "column_break_xhfy",
+ "spamd_api_key",
+ "spamd_api_secret",
"marketplace_tab",
"marketplace_settings_section",
"max_allowed_screenshots",
- "allow_developer_account",
"threshold",
"commission",
"usd_rate",
"app_include_script",
+ "github_pat_token",
"plausible_column",
"plausible_url",
"plausible_site_id",
@@ -135,8 +192,10 @@
"infrastructure_tab",
"agent_section",
"agent_repository_owner",
+ "agent_sentry_dsn",
"column_break_105",
"agent_github_access_token",
+ "branch",
"lets_encrypt_section",
"certbot_directory",
"webroot_directory",
@@ -146,17 +205,60 @@
"use_staging_ca",
"ssh_section",
"ssh_certificate_authority",
+ "bench_section",
+ "redis_cache_size",
+ "set_redis_password",
"monitoring_section",
"monitor_server",
"monitor_token",
"press_monitoring_password",
- "column_break_100",
+ "send_telegram_notifications",
+ "column_break_jlzi",
"log_server",
"telegram_alert_chat_id",
"telegram_alerts_chat_group",
+ "send_email_notifications",
+ "email_recipients",
+ "section_break_nloq",
+ "servers_using_alternative_http_port_for_communication",
+ "auto_scale_section",
+ "shared_directory",
+ "cool_off_period",
"feature_flags_tab",
"verify_cards_with_micro_charge",
- "enable_google_oauth"
+ "enable_google_oauth",
+ "realtime_job_updates",
+ "disable_agent_job_deduplication",
+ "disable_binlog_indexer_service",
+ "column_break_rdlr",
+ "disable_auto_retry",
+ "disallow_disposable_emails",
+ "enable_email_pre_verification",
+ "execute_incident_action",
+ "enable_server_snapshot_recovery",
+ "section_break_jstu",
+ "enable_app_grouping",
+ "default_apps",
+ "partner_tab",
+ "partnership_fees_section",
+ "partnership_fee_usd",
+ "column_break_yxrj",
+ "partnership_fee_inr",
+ "section_break_dhzi",
+ "drive_resource_link",
+ "frappe_school_authentication_section",
+ "school_url",
+ "school_api_key",
+ "column_break_uxxz",
+ "school_api_secret",
+ "hybrid_server_tab",
+ "hybrid_cluster",
+ "hybrid_domain",
+ "tls_renewal_queue_size",
+ "code_spaces_tab",
+ "spaces_domain",
+ "security_tab",
+ "wazuh_server"
],
"fields": [
{
@@ -196,6 +298,7 @@
"label": "Billing"
},
{
+ "collapsible": 1,
"fieldname": "stripe_settings_section",
"fieldtype": "Section Break",
"label": "Stripe Settings"
@@ -590,7 +693,7 @@
"fieldname": "erpnext_plan",
"fieldtype": "Link",
"label": "ERPNext Plan",
- "options": "Plan"
+ "options": "Site Plan"
},
{
"fieldname": "erpnext_group",
@@ -624,7 +727,7 @@
"fieldname": "staging_plan",
"fieldtype": "Link",
"label": "Staging Plan",
- "options": "Plan"
+ "options": "Site Plan"
},
{
"default": "24",
@@ -745,6 +848,7 @@
"read_only": 1
},
{
+ "default": "fc-deploy",
"fieldname": "column_break_36",
"fieldtype": "Column Break"
},
@@ -795,12 +899,6 @@
"label": "Max number of Allowed Screenshots",
"non_negative": 1
},
- {
- "default": "0",
- "fieldname": "allow_developer_account",
- "fieldtype": "Check",
- "label": "Allow Developer Account from Dashboard"
- },
{
"fieldname": "infrastructure_tab",
"fieldtype": "Tab Break",
@@ -895,10 +993,6 @@
"label": "Monitor Server",
"options": "Monitor Server"
},
- {
- "fieldname": "column_break_100",
- "fieldtype": "Column Break"
- },
{
"fieldname": "monitor_token",
"fieldtype": "Data",
@@ -935,7 +1029,7 @@
{
"fieldname": "usd_rate",
"fieldtype": "Float",
- "label": "Commission USD Rate"
+ "label": "USD Rate"
},
{
"fieldname": "press_monitoring_password",
@@ -979,11 +1073,571 @@
"fieldname": "plausible_site_id",
"fieldtype": "Data",
"label": "Plausible site id"
+ },
+ {
+ "fieldname": "code_spaces_tab",
+ "fieldtype": "Tab Break",
+ "label": "Code Spaces"
+ },
+ {
+ "fieldname": "spaces_domain",
+ "fieldtype": "Link",
+ "label": "Spaces Domain",
+ "options": "Root Domain"
+ },
+ {
+ "default": "0",
+ "fieldname": "suspend_builds",
+ "fieldtype": "Check",
+ "label": "Suspend Builds",
+ "read_only": 1
+ },
+ {
+ "fieldname": "aws_section",
+ "fieldtype": "Section Break",
+ "label": "AWS"
+ },
+ {
+ "fieldname": "aws_access_key_id",
+ "fieldtype": "Data",
+ "label": "AWS Access Key ID"
+ },
+ {
+ "fieldname": "column_break_agig",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "aws_secret_access_key",
+ "fieldtype": "Password",
+ "label": "AWS Secret Access Key"
+ },
+ {
+ "fieldname": "twilio_section",
+ "fieldtype": "Section Break",
+ "label": "Twilio"
+ },
+ {
+ "fieldname": "twilio_account_sid",
+ "fieldtype": "Data",
+ "label": "Twilio Account SID"
+ },
+ {
+ "fieldname": "column_break_kxuj",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "twilio_phone_number",
+ "fieldtype": "Phone",
+ "label": "Twilio Phone Number"
+ },
+ {
+ "fieldname": "invoicing_column",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "gst_percentage",
+ "fieldtype": "Float",
+ "label": "GST Percentage"
+ },
+ {
+ "fieldname": "column_break_tcmy",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "column_break_edst",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "twilio_api_key_sid",
+ "fieldtype": "Data",
+ "label": "Twilio API Key SID"
+ },
+ {
+ "fieldname": "twilio_api_key_secret",
+ "fieldtype": "Password",
+ "label": "Twilio API Key Secret"
+ },
+ {
+ "fieldname": "invoicing_section",
+ "fieldtype": "Section Break",
+ "label": "Invoicing"
+ },
+ {
+ "fieldname": "column_break_qfwx",
+ "fieldtype": "Column Break"
+ },
+ {
+ "description": "Fetched from frappe.io",
+ "fieldname": "print_format",
+ "fieldtype": "Data",
+ "label": "Print Format"
+ },
+ {
+ "default": "0",
+ "description": "Uses Bench get-app cache for faster image builds. Will be set only if Bench version is 5.22.1 or later.",
+ "fieldname": "use_app_cache",
+ "fieldtype": "Check",
+ "label": "Use App Cache"
+ },
+ {
+ "default": "0",
+ "depends_on": "eval: doc.use_app_cache",
+ "description": "Use Gzip to compress bench get-app artifacts before caching.",
+ "fieldname": "compress_app_cache",
+ "fieldtype": "Check",
+ "label": "Compress App Cache"
+ },
+ {
+ "fieldname": "column_break_rdlr",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "realtime_job_updates",
+ "fieldtype": "Check",
+ "label": "Realtime Job Updates"
+ },
+ {
+ "default": "0",
+ "description": "Quickens builds by fetching app changes without rebuilding app if app rebuild is not required.",
+ "fieldname": "use_delta_builds",
+ "fieldtype": "Check",
+ "label": "Use Delta Builds"
+ },
+ {
+ "fieldname": "hybrid_server_tab",
+ "fieldtype": "Tab Break",
+ "label": "Hybrid Server"
+ },
+ {
+ "fieldname": "hybrid_cluster",
+ "fieldtype": "Link",
+ "label": "Hybrid Cluster",
+ "options": "Cluster"
+ },
+ {
+ "fieldname": "hybrid_domain",
+ "fieldtype": "Link",
+ "label": "Hybrid Domain",
+ "options": "Root Domain"
+ },
+ {
+ "default": "0",
+ "fieldname": "disable_auto_retry",
+ "fieldtype": "Check",
+ "label": "Disable Auto Retry"
+ },
+ {
+ "default": "1",
+ "fieldname": "disable_agent_job_deduplication",
+ "fieldtype": "Check",
+ "label": "Disable Agent Job Deduplication"
+ },
+ {
+ "fieldname": "agent_sentry_dsn",
+ "fieldtype": "Data",
+ "label": "Agent Sentry DSN"
+ },
+ {
+ "fieldname": "build_server",
+ "fieldtype": "Link",
+ "label": "Build Server",
+ "options": "Server"
+ },
+ {
+ "default": "10",
+ "fieldname": "tls_renewal_queue_size",
+ "fieldtype": "Int",
+ "label": "TLS Renewal Queue Size"
+ },
+ {
+ "default": "80",
+ "fieldname": "micro_debit_charge_inr",
+ "fieldtype": "Currency",
+ "label": "Micro Debit Charge (INR)",
+ "precision": "0"
+ },
+ {
+ "default": "1",
+ "fieldname": "micro_debit_charge_usd",
+ "fieldtype": "Currency",
+ "label": "Micro Debit Charge (USD)",
+ "precision": "0"
+ },
+ {
+ "default": "master",
+ "fieldname": "branch",
+ "fieldtype": "Data",
+ "label": "Branch"
+ },
+ {
+ "fieldname": "column_break_yhwz",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "column_break_cpry",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "column_break_wrqp",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "500",
+ "fieldname": "usage_record_creation_batch_size",
+ "fieldtype": "Int",
+ "label": "Usage Record Creation Batch Size"
+ },
+ {
+ "fieldname": "press_trial_plan",
+ "fieldtype": "Link",
+ "label": "Press Trial Plan",
+ "options": "Site Plan"
+ },
+ {
+ "fieldname": "section_break_jstu",
+ "fieldtype": "Section Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "enable_app_grouping",
+ "fieldtype": "Check",
+ "label": "Enable App Grouping"
+ },
+ {
+ "fieldname": "default_apps",
+ "fieldtype": "Table",
+ "label": "Default Apps",
+ "options": "App Group"
+ },
+ {
+ "default": "0",
+ "fieldname": "enable_email_pre_verification",
+ "fieldtype": "Check",
+ "label": "Enable Email Pre-Verification"
+ },
+ {
+ "fieldname": "bench_section",
+ "fieldtype": "Section Break",
+ "label": "Bench"
+ },
+ {
+ "default": "512",
+ "fieldname": "redis_cache_size",
+ "fieldtype": "Int",
+ "label": "Redis Cache Size (MB)"
+ },
+ {
+ "fieldname": "partner_tab",
+ "fieldtype": "Tab Break",
+ "label": "Partner"
+ },
+ {
+ "fieldname": "partnership_fees_section",
+ "fieldtype": "Section Break",
+ "label": "Partnership Fees"
+ },
+ {
+ "fieldname": "partnership_fee_usd",
+ "fieldtype": "Int",
+ "label": "Partnership Fee USD"
+ },
+ {
+ "fieldname": "column_break_yxrj",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "partnership_fee_inr",
+ "fieldtype": "Int",
+ "label": "Partnership Fee INR"
+ },
+ {
+ "fieldname": "github_pat_token",
+ "fieldtype": "Data",
+ "label": "Github PAT Token"
+ },
+ {
+ "default": "1",
+ "fieldname": "disable_physical_backup",
+ "fieldtype": "Check",
+ "label": "Disable Physical Backup"
+ },
+ {
+ "fieldname": "physical_backups_section",
+ "fieldtype": "Section Break",
+ "label": "Physical Backups"
+ },
+ {
+ "fieldname": "spamd_section",
+ "fieldtype": "Section Break",
+ "label": "Spamd"
+ },
+ {
+ "default": "0",
+ "fieldname": "enable_spam_check",
+ "fieldtype": "Check",
+ "label": "Enable Spam Check"
+ },
+ {
+ "fieldname": "spamd_endpoint",
+ "fieldtype": "Data",
+ "label": "Spamd Endpoint"
+ },
+ {
+ "fieldname": "column_break_xhfy",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "spamd_api_key",
+ "fieldtype": "Data",
+ "label": "Spamd API Key"
+ },
+ {
+ "fieldname": "spamd_api_secret",
+ "fieldtype": "Password",
+ "label": "Spamd API Secret"
+ },
+ {
+ "default": "2",
+ "fieldname": "max_concurrent_physical_restorations",
+ "fieldtype": "Int",
+ "label": "Max Concurrent Physical Restorations"
+ },
+ {
+ "default": "1",
+ "fieldname": "send_telegram_notifications",
+ "fieldtype": "Check",
+ "label": "Send Telegram Notifications"
+ },
+ {
+ "fieldname": "column_break_jlzi",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "send_email_notifications",
+ "fieldtype": "Check",
+ "label": "Send Email Notifications"
+ },
+ {
+ "depends_on": "eval: doc.send_email_notifications== true",
+ "fieldname": "email_recipients",
+ "fieldtype": "Small Text",
+ "label": "Email Recipients"
+ },
+ {
+ "default": "1",
+ "fieldname": "use_agent_job_callbacks",
+ "fieldtype": "Check",
+ "label": "Use Agent Job Callbacks"
+ },
+ {
+ "default": "2",
+ "fieldname": "minimum_rebuild_memory",
+ "fieldtype": "Int",
+ "label": "Minimum Rebuild Memory (GB)",
+ "non_negative": 1
+ },
+ {
+ "description": "Max attempts we will do on a site with failed attempts",
+ "fieldname": "max_failed_backup_attempts_in_a_day",
+ "fieldtype": "Int",
+ "label": "Max Failed Backup Attempts In A Day"
+ },
+ {
+ "default": "0",
+ "fieldname": "disable_frappe_auth",
+ "fieldtype": "Check",
+ "label": "Disable Frappe Auth"
+ },
+ {
+ "fieldname": "section_break_nloq",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "servers_using_alternative_http_port_for_communication",
+ "fieldtype": "Small Text",
+ "label": "Servers Using Alternative HTTP Port For Communication"
+ },
+ {
+ "description": "Add value in percent (%). e.g. 10",
+ "fieldname": "npo_discount",
+ "fieldtype": "Float",
+ "label": "NPO Discount"
+ },
+ {
+ "default": "0",
+ "fieldname": "execute_incident_action",
+ "fieldtype": "Check",
+ "label": "Execute Incident Action"
+ },
+ {
+ "default": "0",
+ "fieldname": "enable_server_snapshot_recovery",
+ "fieldtype": "Check",
+ "label": "Enable Server Snapshot Recovery"
+ },
+ {
+ "fieldname": "docker_s3_access_key",
+ "fieldtype": "Data",
+ "label": "Docker S3 Access Key"
+ },
+ {
+ "fieldname": "docker_s3_secret_key",
+ "fieldtype": "Password",
+ "label": "Docker S3 Secret Key"
+ },
+ {
+ "default": "0",
+ "description": "Enable via RazorPay dashboard",
+ "fieldname": "paypal_enabled",
+ "fieldtype": "Check",
+ "label": "PayPal Enabled"
+ },
+ {
+ "default": "1",
+ "description": "Set redis password common site config and redis configs at release group level",
+ "fieldname": "set_redis_password",
+ "fieldtype": "Check",
+ "label": "Set Redis Password"
+ },
+ {
+ "default": "1",
+ "fieldname": "disallow_disposable_emails",
+ "fieldtype": "Check",
+ "label": "Disallow disposable emails"
+ },
+ {
+ "fieldname": "drive_resource_link",
+ "fieldtype": "Data",
+ "label": "Drive Resource Link"
+ },
+ {
+ "fieldname": "ic_key",
+ "fieldtype": "Password",
+ "label": "IC Key"
+ },
+ {
+ "fieldname": "section_break_dhzi",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "auto_scale_section",
+ "fieldtype": "Section Break",
+ "label": "Auto Scale"
+ },
+ {
+ "default": "/home/frappe/shared",
+ "fieldname": "shared_directory",
+ "fieldtype": "Data",
+ "label": "Shared Directory"
+ },
+ {
+ "default": "600",
+ "description": "Time between two autoscale events (up scale or down scale)",
+ "fieldname": "cool_off_period",
+ "fieldtype": "Int",
+ "label": "Cool off period"
+ },
+ {
+ "fieldname": "deploy_marker",
+ "fieldtype": "Data",
+ "label": "Deploy Marker",
+ "options": "If found in commit message deploy will be triggered"
+ },
+ {
+ "fieldname": "security_tab",
+ "fieldtype": "Tab Break",
+ "label": "Security"
+ },
+ {
+ "fieldname": "wazuh_server",
+ "fieldtype": "Data",
+ "label": "Wazuh Server"
+ },
+ {
+ "fieldname": "autoscale_discount",
+ "fieldtype": "Float",
+ "label": "Autoscale Discount"
+ },
+ {
+ "default": "1",
+ "fieldname": "disable_binlog_indexer_service",
+ "fieldtype": "Check",
+ "label": "Disable Binlog Indexer Service"
+ },
+ {
+ "fieldname": "default_server_plan_type",
+ "fieldtype": "Link",
+ "label": "Default Server Plan Type",
+ "options": "Server Plan Type"
+ },
+ {
+ "fieldname": "frappe_school_authentication_section",
+ "fieldtype": "Section Break",
+ "label": "Frappe School Authentication"
+ },
+ {
+ "fieldname": "school_url",
+ "fieldtype": "Data",
+ "label": "School URL"
+ },
+ {
+ "fieldname": "school_api_key",
+ "fieldtype": "Data",
+ "label": "School API Key"
+ },
+ {
+ "fieldname": "column_break_uxxz",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "school_api_secret",
+ "fieldtype": "Password",
+ "label": "School API Secret"
+ },
+ {
+ "collapsible": 1,
+ "fieldname": "asset_store_section",
+ "fieldtype": "Section Break",
+ "label": "Asset Store"
+ },
+ {
+ "default": "0",
+ "description": "Will only use this asset storage backend if checked.",
+ "fieldname": "use_asset_store",
+ "fieldtype": "Check",
+ "label": "Use Asset Store"
+ },
+ {
+ "fieldname": "asset_store_access_key",
+ "fieldtype": "Data",
+ "label": "Asset Store Access Key"
+ },
+ {
+ "fieldname": "asset_store_secret_access_key",
+ "fieldtype": "Password",
+ "label": "Asset Store Secret Access Key"
+ },
+ {
+ "fieldname": "asset_store_endpoint",
+ "fieldtype": "Data",
+ "label": "Asset Store Endpoint"
+ },
+ {
+ "fieldname": "asset_store_bucket_name",
+ "fieldtype": "Data",
+ "label": "Asset Store Bucket Name "
+ },
+ {
+ "fieldname": "asset_store_region",
+ "fieldtype": "Data",
+ "label": "Asset Store Region"
}
],
"issingle": 1,
"links": [],
- "modified": "2023-07-01 13:06:23.438512",
+ "modified": "2026-01-17 11:49:01.799269",
"modified_by": "Administrator",
"module": "Press",
"name": "Press Settings",
@@ -1001,8 +1655,9 @@
}
],
"quick_entry": 1,
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/press_settings/press_settings.py b/press/press/doctype/press_settings/press_settings.py
index 4b42d60390a..c2a116b56b7 100644
--- a/press/press/doctype/press_settings/press_settings.py
+++ b/press/press/doctype/press_settings/press_settings.py
@@ -1,18 +1,225 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
-
+import boto3
import frappe
from boto3.session import Session
from frappe.model.document import Document
-from frappe.utils import get_url
+from frappe.utils import get_url, validate_email_address
+from twilio.rest import Client
from press.api.billing import get_stripe
+from press.press.doctype.telegram_message.telegram_message import TelegramMessage
from press.telegram_utils import Telegram
class PressSettings(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.app_group.app_group import AppGroup
+ from press.press.doctype.erpnext_app.erpnext_app import ERPNextApp
+
+ agent_github_access_token: DF.Data | None
+ agent_repository_owner: DF.Data | None
+ agent_sentry_dsn: DF.Data | None
+ app_include_script: DF.Data | None
+ asset_store_access_key: DF.Data | None
+ asset_store_bucket_name: DF.Data | None
+ asset_store_endpoint: DF.Data | None
+ asset_store_region: DF.Data | None
+ asset_store_secret_access_key: DF.Password | None
+ auto_update_queue_size: DF.Int
+ autoscale_discount: DF.Float
+ aws_access_key_id: DF.Data | None
+ aws_s3_bucket: DF.Data | None
+ aws_secret_access_key: DF.Password | None
+ backup_interval: DF.Int
+ backup_limit: DF.Int
+ backup_offset: DF.Int
+ backup_region: DF.Data | None
+ backup_rotation_scheme: DF.Literal["FIFO", "Grandfather-father-son"]
+ bench_configuration: DF.Code
+ branch: DF.Data | None
+ build_directory: DF.Data | None
+ build_server: DF.Link | None
+ central_migration_server: DF.Link | None
+ certbot_directory: DF.Data
+ clone_directory: DF.Data | None
+ cluster: DF.Link | None
+ code_server: DF.Data | None
+ code_server_password: DF.Data | None
+ commission: DF.Float
+ compress_app_cache: DF.Check
+ cool_off_period: DF.Int
+ data_40: DF.Data | None
+ default_apps: DF.Table[AppGroup]
+ default_outgoing_id: DF.Data | None
+ default_outgoing_pass: DF.Data | None
+ default_server_plan_type: DF.Link | None
+ deploy_marker: DF.Data | None
+ disable_agent_job_deduplication: DF.Check
+ disable_auto_retry: DF.Check
+ disable_binlog_indexer_service: DF.Check
+ disable_frappe_auth: DF.Check
+ disable_physical_backup: DF.Check
+ disallow_disposable_emails: DF.Check
+ docker_registry_namespace: DF.Data | None
+ docker_registry_password: DF.Data | None
+ docker_registry_url: DF.Data | None
+ docker_registry_username: DF.Data | None
+ docker_s3_access_key: DF.Data | None
+ docker_s3_secret_key: DF.Password | None
+ domain: DF.Link | None
+ drive_resource_link: DF.Data | None
+ eff_registration_email: DF.Data
+ email_recipients: DF.SmallText | None
+ enable_app_grouping: DF.Check
+ enable_email_pre_verification: DF.Check
+ enable_google_oauth: DF.Check
+ enable_server_snapshot_recovery: DF.Check
+ enable_site_pooling: DF.Check
+ enable_spam_check: DF.Check
+ enforce_storage_limits: DF.Check
+ erpnext_api_key: DF.Data | None
+ erpnext_api_secret: DF.Password | None
+ erpnext_apps: DF.Table[ERPNextApp]
+ erpnext_cluster: DF.Link | None
+ erpnext_domain: DF.Link | None
+ erpnext_group: DF.Link | None
+ erpnext_plan: DF.Link | None
+ erpnext_url: DF.Data | None
+ execute_incident_action: DF.Check
+ frappe_url: DF.Data | None
+ frappeio_api_key: DF.Data | None
+ frappeio_api_secret: DF.Password | None
+ free_credits_inr: DF.Currency
+ free_credits_usd: DF.Currency
+ github_access_token: DF.Data | None
+ github_app_client_id: DF.Data | None
+ github_app_client_secret: DF.Data | None
+ github_app_id: DF.Data | None
+ github_app_private_key: DF.Code | None
+ github_app_public_link: DF.Data | None
+ github_pat_token: DF.Data | None
+ github_webhook_secret: DF.Data | None
+ gst_percentage: DF.Float
+ hybrid_cluster: DF.Link | None
+ hybrid_domain: DF.Link | None
+ ic_key: DF.Password | None
+ log_server: DF.Link | None
+ mailgun_api_key: DF.Data | None
+ max_allowed_screenshots: DF.Int
+ max_concurrent_physical_restorations: DF.Int
+ max_failed_backup_attempts_in_a_day: DF.Int
+ micro_debit_charge_inr: DF.Currency
+ micro_debit_charge_usd: DF.Currency
+ minimum_rebuild_memory: DF.Int
+ monitor_server: DF.Link | None
+ monitor_token: DF.Data | None
+ ngrok_auth_token: DF.Data | None
+ npo_discount: DF.Float
+ offsite_backups_access_key_id: DF.Data | None
+ offsite_backups_count: DF.Int
+ offsite_backups_provider: DF.Literal["AWS S3"]
+ offsite_backups_secret_access_key: DF.Password | None
+ partnership_fee_inr: DF.Int
+ partnership_fee_usd: DF.Int
+ paypal_enabled: DF.Check
+ plausible_api_key: DF.Password | None
+ plausible_site_id: DF.Data | None
+ plausible_url: DF.Data | None
+ press_monitoring_password: DF.Password | None
+ press_trial_plan: DF.Link | None
+ print_format: DF.Data | None
+ publish_docs: DF.Check
+ razorpay_key_id: DF.Data | None
+ razorpay_key_secret: DF.Password | None
+ razorpay_webhook_secret: DF.Data | None
+ realtime_job_updates: DF.Check
+ redis_cache_size: DF.Int
+ remote_access_key_id: DF.Data | None
+ remote_link_expiry: DF.Int
+ remote_secret_access_key: DF.Password | None
+ remote_uploads_bucket: DF.Data | None
+ root_domain: DF.Data | None
+ rsa_key_size: DF.Literal["2048", "3072", "4096"]
+ school_api_key: DF.Data | None
+ school_api_secret: DF.Password | None
+ school_url: DF.Data | None
+ send_email_notifications: DF.Check
+ send_telegram_notifications: DF.Check
+ servers_using_alternative_http_port_for_communication: DF.SmallText | None
+ set_redis_password: DF.Check
+ shared_directory: DF.Data | None
+ spaces_domain: DF.Link | None
+ spamd_api_key: DF.Data | None
+ spamd_api_secret: DF.Password | None
+ spamd_endpoint: DF.Data | None
+ ssh_certificate_authority: DF.Link | None
+ staging_expiry: DF.Int
+ staging_plan: DF.Link | None
+ standby_pool_size: DF.Int
+ standby_queue_size: DF.Int
+ stripe_inr_plan_id: DF.Data | None
+ stripe_product_id: DF.Data | None
+ stripe_publishable_key: DF.Data | None
+ stripe_secret_key: DF.Password | None
+ stripe_usd_plan_id: DF.Data | None
+ stripe_webhook_endpoint_id: DF.Data | None
+ stripe_webhook_secret: DF.Data | None
+ suspend_builds: DF.Check
+ telegram_alert_chat_id: DF.Data | None
+ telegram_alerts_chat_group: DF.Link | None
+ telegram_bot_token: DF.Data | None
+ telegram_chat_id: DF.Data | None
+ threshold: DF.Float
+ tls_renewal_queue_size: DF.Int
+ trial_sites_count: DF.Int
+ twilio_account_sid: DF.Data | None
+ twilio_api_key_secret: DF.Password | None
+ twilio_api_key_sid: DF.Data | None
+ twilio_phone_number: DF.Phone | None
+ usage_record_creation_batch_size: DF.Int
+ usd_rate: DF.Float
+ use_agent_job_callbacks: DF.Check
+ use_app_cache: DF.Check
+ use_asset_store: DF.Check
+ use_delta_builds: DF.Check
+ use_staging_ca: DF.Check
+ verify_cards_with_micro_charge: DF.Literal["No", "Only INR", "Only USD", "Both INR and USD"]
+ wazuh_server: DF.Data | None
+ webroot_directory: DF.Data | None
+ # end: auto-generated types
+
+ dashboard_fields = (
+ "partnership_fee_inr",
+ "partnership_fee_usd",
+ )
+
+ def validate(self):
+ if self.max_concurrent_physical_restorations > 5:
+ frappe.throw("Max Concurrent Physical Restorations should be less than 5")
+
+ if self.send_email_notifications:
+ if self.email_recipients:
+ # Split the comma-separated emails into a list
+ email_list = [email.strip() for email in self.email_recipients.split(",")]
+ for email in email_list:
+ if not validate_email_address(email):
+ frappe.throw(f"Invalid email address: {email}")
+ else:
+ frappe.throw("Email Recipients List can not be empty")
+
+ if self.minimum_rebuild_memory < 2:
+ frappe.throw("Minimum rebuild memory needs to be 2 GB or more.")
+
@frappe.whitelist()
def create_stripe_webhook(self):
stripe = get_stripe()
@@ -30,6 +237,8 @@ def create_stripe_webhook(self):
"invoice.payment_succeeded",
"invoice.payment_failed",
"invoice.finalized",
+ "mandate.updated",
+ "setup_intent.succeeded",
],
)
self.stripe_webhook_endpoint_id = webhook["id"]
@@ -43,7 +252,7 @@ def get_github_app_manifest(self):
app_name = f"Frappe Cloud {frappe.generate_hash(length=6).upper()}"
else:
app_name = "Frappe Cloud"
- manifest = {
+ return {
"name": app_name,
"url": "https://frappe.cloud",
"hook_attributes": {"url": get_url("api/method/press.api.github.hook")},
@@ -61,11 +270,10 @@ def get_github_app_manifest(self):
"request_oauth_on_install": True,
"setup_on_update": True,
}
- return manifest
@property
def boto3_offsite_backup_session(self) -> Session:
- """Get new preconfigured boto3 session for offisite backup provider."""
+ """Get new preconfigured boto3 session for offsite backup provider."""
return Session(
aws_access_key_id=self.offsite_backups_access_key_id,
aws_secret_access_key=self.get_password(
@@ -74,6 +282,14 @@ def boto3_offsite_backup_session(self) -> Session:
region_name="ap-south-1",
)
+ @property
+ def boto3_iam_client(self):
+ return boto3.client(
+ "iam",
+ aws_access_key_id=self.aws_access_key_id,
+ aws_secret_access_key=self.get_password("aws_secret_access_key"),
+ )
+
@classmethod
def is_offsite_setup(cls):
return any(
@@ -87,3 +303,20 @@ def is_offsite_setup(cls):
@property
def telegram(self):
return Telegram
+
+ @property
+ def telegram_message(self):
+ return TelegramMessage
+
+ @property
+ def twilio_client(self) -> Client:
+ account_sid = self.twilio_account_sid
+ api_key_sid = self.twilio_api_key_sid
+ api_key_secret = self.get_password("twilio_api_key_secret")
+ return Client(api_key_sid, api_key_secret, account_sid)
+
+ def get_default_apps(self):
+ if hasattr(self, "enable_app_grouping") and hasattr(self, "default_apps"): # noqa
+ if self.enable_app_grouping:
+ return [app.app for app in self.default_apps]
+ return []
diff --git a/press/press/doctype/press_settings/test_press_settings.py b/press/press/doctype/press_settings/test_press_settings.py
index d4f8fadf46f..7a6d1b765e8 100644
--- a/press/press/doctype/press_settings/test_press_settings.py
+++ b/press/press/doctype/press_settings/test_press_settings.py
@@ -1,10 +1,9 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
from press.press.doctype.cluster.test_cluster import create_test_cluster
@@ -35,18 +34,17 @@ def create_test_press_settings():
}
).insert(ignore_if_duplicate=True)
- settings = frappe.get_doc(
- {
- "doctype": "Press Settings",
- "domain": "fc.dev",
- "bench_configuration": "{}",
- "rsa_key_size": "2048",
- "certbot_directory": ".certbot",
- "eff_registration_email": frappe.mock("email"),
- }
- ).insert()
+ settings = frappe.get_single("Press Settings")
+ settings.domain = "fc.dev"
+ settings.bench_configuration = "{}"
+ settings.rsa_key_size = 2048
+ settings.certbot_directory = ".certbot"
+ settings.eff_registration_email = frappe.mock("email")
+ settings.max_concurrent_physical_restorations = 2
+ settings.minimum_rebuild_memory = 2
+ settings.save()
return settings
-class TestPressSettings(unittest.TestCase):
+class TestPressSettings(FrappeTestCase):
pass
diff --git a/press/press/doctype/press_tag/press_tag.json b/press/press/doctype/press_tag/press_tag.json
index 6c17e4a3e1a..c6484409be1 100644
--- a/press/press/doctype/press_tag/press_tag.json
+++ b/press/press/doctype/press_tag/press_tag.json
@@ -34,7 +34,7 @@
],
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2023-07-02 14:47:42.775981",
+ "modified": "2024-05-30 13:40:56.625943",
"modified_by": "Administrator",
"module": "Press",
"name": "Press Tag",
@@ -51,9 +51,35 @@
"role": "System Manager",
"share": 1,
"write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1,
+ "write": 1
}
],
+ "show_title_field_in_link": 1,
"sort_field": "modified",
"sort_order": "DESC",
- "states": []
+ "states": [],
+ "title_field": "tag"
}
\ No newline at end of file
diff --git a/press/press/doctype/press_tag/press_tag.py b/press/press/doctype/press_tag/press_tag.py
index 98fb98927a4..70472f80c0a 100644
--- a/press/press/doctype/press_tag/press_tag.py
+++ b/press/press/doctype/press_tag/press_tag.py
@@ -6,4 +6,17 @@
class PressTag(Document):
- pass
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ doctype_name: DF.Link | None
+ tag: DF.Data | None
+ team: DF.Link | None
+ # end: auto-generated types
+
+ dashboard_fields = ["tag", "doctype_name", "team"]
diff --git a/press/press/doctype/press_tag/test_press_tag.py b/press/press/doctype/press_tag/test_press_tag.py
index 6c26b6ab253..30422a60813 100644
--- a/press/press/doctype/press_tag/test_press_tag.py
+++ b/press/press/doctype/press_tag/test_press_tag.py
@@ -12,11 +12,11 @@ def create_and_add_test_tag(name: str, doctype: str, tag: str = "test_tag"):
{
"doctype": "Press Tag",
"doctype_name": doctype,
- "team": create_test_team(),
+ "team": create_test_team().name,
"tag": tag,
}
).insert(ignore_permissions=True)
- doc = frappe.get_doc(doctype, name).append("tags", {"tag": test_tag})
+ doc = frappe.get_doc(doctype, name).append("tags", {"tag": test_tag.name})
doc.save()
return test_tag
diff --git a/press/press/doctype/press_user_permission/__init__.py b/press/press/doctype/press_user_permission/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/press_user_permission/press_user_permission.js b/press/press/doctype/press_user_permission/press_user_permission.js
new file mode 100644
index 00000000000..08ca9060ba5
--- /dev/null
+++ b/press/press/doctype/press_user_permission/press_user_permission.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2023, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Press User Permission", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/press_user_permission/press_user_permission.json b/press/press/doctype/press_user_permission/press_user_permission.json
new file mode 100644
index 00000000000..1f30c0209c2
--- /dev/null
+++ b/press/press/doctype/press_user_permission/press_user_permission.json
@@ -0,0 +1,108 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2023-08-04 15:08:48.970377",
+ "default_view": "List",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "type",
+ "user",
+ "group",
+ "document_type",
+ "document_name",
+ "action",
+ "config"
+ ],
+ "fields": [
+ {
+ "depends_on": "eval: doc.type == \"User\" || doc.type == \"Config\";",
+ "fieldname": "user",
+ "fieldtype": "Link",
+ "in_filter": 1,
+ "in_list_view": 1,
+ "in_preview": 1,
+ "in_standard_filter": 1,
+ "label": "User",
+ "options": "User",
+ "search_index": 1
+ },
+ {
+ "depends_on": "eval: doc.type != 'Config';",
+ "fieldname": "document_type",
+ "fieldtype": "Link",
+ "in_filter": 1,
+ "in_list_view": 1,
+ "in_preview": 1,
+ "in_standard_filter": 1,
+ "label": "Document Type",
+ "options": "DocType"
+ },
+ {
+ "depends_on": "eval: doc.type != 'Config';",
+ "fieldname": "document_name",
+ "fieldtype": "Dynamic Link",
+ "in_filter": 1,
+ "in_list_view": 1,
+ "in_preview": 1,
+ "in_standard_filter": 1,
+ "label": "Document Name",
+ "options": "document_type"
+ },
+ {
+ "depends_on": "eval: doc.type != 'Config';",
+ "fieldname": "action",
+ "fieldtype": "Data",
+ "in_filter": 1,
+ "in_list_view": 1,
+ "in_preview": 1,
+ "in_standard_filter": 1,
+ "label": "Action"
+ },
+ {
+ "default": "User",
+ "fieldname": "type",
+ "fieldtype": "Select",
+ "label": "Type",
+ "options": "User\nGroup\nConfig"
+ },
+ {
+ "depends_on": "eval: doc.type == \"Group\";",
+ "fieldname": "group",
+ "fieldtype": "Link",
+ "label": "Group",
+ "options": "Press Permission Group"
+ },
+ {
+ "depends_on": "eval: doc.type == 'Config';",
+ "fieldname": "config",
+ "fieldtype": "JSON",
+ "label": "config"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2023-12-15 15:48:28.325993",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Press User Permission",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/press_user_permission/press_user_permission.py b/press/press/doctype/press_user_permission/press_user_permission.py
new file mode 100644
index 00000000000..38e7d8a4dac
--- /dev/null
+++ b/press/press/doctype/press_user_permission/press_user_permission.py
@@ -0,0 +1,111 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+
+import frappe
+from frappe.model.document import Document
+
+ALLOWED_CONFIG_PERMS = ["global", "restricted"]
+
+
+class PressUserPermission(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ action: DF.Data | None
+ config: DF.JSON | None
+ document_name: DF.DynamicLink | None
+ document_type: DF.Link | None
+ group: DF.Link | None
+ type: DF.Literal["User", "Group", "Config"]
+ user: DF.Link | None
+ # end: auto-generated types
+
+ def validate(self):
+ if self.type == "Config":
+ self.validate_config()
+
+ def validate_config(self):
+ config = frappe.parse_json(self.config)
+ if not set(config.keys()).issubset(set(ALLOWED_CONFIG_PERMS)):
+ frappe.throw(f"Invalid config key. Allowed keys are: {format(ALLOWED_CONFIG_PERMS)}")
+
+
+def has_user_permission(doc: str, name: str, action: str, groups: list | None = None):
+ groups = groups or []
+ user = frappe.session.user
+ allowed = False
+
+ if not groups:
+ groups = frappe.get_all("Press Permission Group User", {"user": user}, pluck="parent")
+
+ # part of a group with access
+ if frappe.db.exists(
+ "Press User Permission",
+ {
+ "type": "Group",
+ "group": ("in", groups),
+ "document_type": doc,
+ "document_name": name,
+ "action": action,
+ },
+ ):
+ allowed = True
+
+ # user has granular perm access
+ if frappe.db.exists(
+ "Press User Permission",
+ {
+ "type": "User",
+ "user": user,
+ "document_type": doc,
+ "document_name": name,
+ "action": action,
+ },
+ ):
+ allowed = True
+
+ # has config perm access
+ config = frappe.db.get_value(
+ "Press User Permission", {"user": user, "type": "Config"}, "config", as_dict=True
+ )
+ if config:
+ allowed = check_config_perm(frappe.parse_json(config["config"]), doc, name, action, allowed)
+
+ return allowed
+
+
+def check_config_perm(config: dict, doctype: str, name: str, action: str, allowed: bool):
+ perm_types = config.keys()
+
+ if "global" in perm_types:
+ allowed = has_config_perm(config["global"], doctype, name, action, allowed, "global")
+
+ if "restricted" in perm_types:
+ allowed = has_config_perm(config["restricted"], doctype, name, action, allowed, "restricted")
+
+ return allowed
+
+
+def has_config_perm(config: dict, doctype: str, name: str, action: str, allowed: bool, ptype: str):
+ if doctype in config:
+ docnames = config[doctype].keys()
+ if name in docnames:
+ name = name
+ elif "*" in docnames:
+ name = "*"
+ else:
+ return allowed
+
+ if action in config[doctype][name] or "*" in config[doctype][name]:
+ if ptype == "restricted":
+ allowed = False
+ elif ptype == "global":
+ allowed = True
+
+ return allowed
diff --git a/press/press/doctype/press_user_permission/test_press_user_permission.py b/press/press/doctype/press_user_permission/test_press_user_permission.py
new file mode 100644
index 00000000000..0c7ce2a2c9d
--- /dev/null
+++ b/press/press/doctype/press_user_permission/test_press_user_permission.py
@@ -0,0 +1,78 @@
+# Copyright (c) 2023, Frappe and Contributors
+# See license.txt
+
+import frappe
+from frappe.tests.utils import FrappeTestCase
+
+from press.press.doctype.press_user_permission.press_user_permission import (
+ has_user_permission,
+)
+from press.press.doctype.site.test_site import create_test_site
+from press.press.doctype.team.test_team import create_test_team
+
+
+class TestPressUserPermission(FrappeTestCase):
+ def setUp(self):
+ super().setUp()
+
+ self.team = create_test_team()
+ self.site = create_test_site(subdomain="testpermsite")
+
+ def tearDown(self):
+ frappe.set_user("Administrator")
+ frappe.db.rollback()
+
+ def test_press_user_permission(self):
+ self.assertFalse(has_user_permission("Site", self.site.name, "press.api.site.login"))
+
+ frappe.get_doc(
+ doctype="Press User Permission",
+ type="User",
+ user=frappe.session.user,
+ document_type="Site",
+ document_name=self.site.name,
+ action="press.api.site.login",
+ ).insert(ignore_permissions=True)
+
+ self.assertTrue(has_user_permission("Site", self.site.name, "press.api.site.login"))
+ self.assertFalse(has_user_permission("Site", self.site.name, "press.api.site.migrate"))
+
+ def test_press_group_permission(self):
+ group = frappe.get_doc(doctype="Press Permission Group", team=self.team.name, title="Test Group")
+ group.append("users", {"user": frappe.session.user})
+ group.insert(ignore_permissions=True)
+
+ frappe.get_doc(
+ doctype="Press User Permission",
+ type="Group",
+ group=group.name,
+ document_type="Site",
+ document_name=self.site.name,
+ action="press.api.site.overview",
+ ).insert(ignore_permissions=True)
+
+ self.assertTrue(
+ has_user_permission("Site", self.site.name, "press.api.site.overview", groups=[group.name])
+ )
+ self.assertFalse(
+ has_user_permission("Site", self.site.name, "press.api.site.migrate", groups=[group.name])
+ )
+
+ def test_press_config_permission(self):
+ perms = {
+ "global": {
+ "Site": {"*": "press.api.site.login"},
+ },
+ "restricted": {"Site": {"test.frappe.dev": "press.api.site.migrate"}},
+ }
+ frappe.get_doc(
+ doctype="Press User Permission",
+ type="Config",
+ config=frappe.as_json(perms),
+ user=frappe.session.user,
+ ).insert(ignore_permissions=True)
+
+ self.assertTrue(has_user_permission("Site", self.site.name, "press.api.site.login"))
+ self.assertFalse(has_user_permission("Site", "sometest.frappe.dev", "press.api.site.restore"))
+ self.assertFalse(has_user_permission("Site", "test.frappe.dev", "press.api.site.migrate"))
+ self.assertTrue(has_user_permission("Site", "test.frappe.dev", "press.api.site.login"))
diff --git a/press/press/doctype/press_webhook/__init__.py b/press/press/doctype/press_webhook/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/press_webhook/press_webhook.js b/press/press/doctype/press_webhook/press_webhook.js
new file mode 100644
index 00000000000..335323b5f0e
--- /dev/null
+++ b/press/press/doctype/press_webhook/press_webhook.js
@@ -0,0 +1,46 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Press Webhook', {
+ refresh(frm) {
+ let webhook = frm.get_doc();
+
+ if (!webhook.enabled) {
+ frm.add_custom_button(
+ __('Activate'),
+ () => {
+ frm.call('activate').then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ }
+ });
+ },
+ __('Actions'),
+ );
+ } else {
+ frm.add_custom_button(
+ __('Disable'),
+ () => {
+ frm.call('disable').then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ }
+ });
+ },
+ __('Actions'),
+ );
+
+ frm.add_custom_button(
+ __('Disable and Notify'),
+ () => {
+ frm.call('disable_and_notify').then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ }
+ });
+ },
+ __('Actions'),
+ );
+ }
+ },
+});
diff --git a/press/press/doctype/press_webhook/press_webhook.json b/press/press/doctype/press_webhook/press_webhook.json
new file mode 100644
index 00000000000..f4daeadfd4c
--- /dev/null
+++ b/press/press/doctype/press_webhook/press_webhook.json
@@ -0,0 +1,94 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-09-18 14:32:26.332089",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "enabled",
+ "team",
+ "endpoint",
+ "secret",
+ "section_break_xbfh",
+ "events"
+ ],
+ "fields": [
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Team",
+ "options": "Team",
+ "reqd": 1
+ },
+ {
+ "description": "Will be added in X-Webhook-Secret header of webhook request",
+ "fieldname": "secret",
+ "fieldtype": "Data",
+ "label": "Secret",
+ "not_nullable": 1
+ },
+ {
+ "fieldname": "section_break_xbfh",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "events",
+ "fieldtype": "Table",
+ "label": "Events",
+ "options": "Press Webhook Selected Event",
+ "reqd": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "enabled",
+ "fieldtype": "Check",
+ "label": "Enabled",
+ "read_only_depends_on": "eval: !doc.enabled"
+ },
+ {
+ "fieldname": "endpoint",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Endpoint",
+ "reqd": 1,
+ "search_index": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2024-09-23 15:06:57.848414",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Press Webhook",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/press_webhook/press_webhook.py b/press/press/doctype/press_webhook/press_webhook.py
new file mode 100644
index 00000000000..ebdd528ead6
--- /dev/null
+++ b/press/press/doctype/press_webhook/press_webhook.py
@@ -0,0 +1,190 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+import contextlib
+import ipaddress
+import json
+from urllib.parse import urlparse
+
+import frappe
+import frappe.query_builder
+import frappe.query_builder.functions
+import requests
+from frappe.model.document import Document
+
+from press.api.client import dashboard_whitelist
+from press.guards import role_guard
+from press.overrides import get_permission_query_conditions_for_doctype
+from press.utils import is_valid_hostname
+
+
+class PressWebhook(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.press_webhook_selected_event.press_webhook_selected_event import (
+ PressWebhookSelectedEvent,
+ )
+
+ enabled: DF.Check
+ endpoint: DF.Data
+ events: DF.Table[PressWebhookSelectedEvent]
+ secret: DF.Data
+ team: DF.Link
+ # end: auto-generated types
+
+ DOCTYPE = "Press Webhook"
+ dashboard_fields = ("enabled", "endpoint", "events")
+
+ @role_guard.action()
+ def validate(self):
+ # maximum 5 webhooks per team
+ if self.is_new() and frappe.db.count("Press Webhook", {"team": self.team}) > 5:
+ frappe.throw("You have reached the maximum number of webhooks per team")
+
+ if self.has_value_changed("endpoint"):
+ self.enabled = 0
+ # should have atleast one event selected
+ if not self.events:
+ frappe.throw("At least one event should be selected")
+ # validate endpoint url format
+ self.validate_endpoint_url_format()
+ # check for duplicate webhooks
+ webhooks = frappe.get_all(
+ "Press Webhook",
+ filters={"team": self.team, "endpoint": self.endpoint, "name": ("!=", self.name)},
+ pluck="name",
+ )
+ if len(webhooks) != 0:
+ frappe.throw("You have already added webhook for this endpoint")
+
+ def validate_endpoint_url_format(self):
+ url = urlparse(self.endpoint)
+ if not url.netloc:
+ frappe.throw("Endpoint should be a valid url")
+
+ # protocol should be http or https
+ if url.scheme not in ["http", "https"]:
+ frappe.throw("Endpoint should start with http:// or https://")
+
+ # dont allow query params
+ if url.query:
+ frappe.throw("Endpoint should not have query params")
+
+ isIPAddress = False
+ # If endpoint target is ip address, it should be a public ip address
+ with contextlib.suppress(ValueError):
+ ip = ipaddress.ip_address(url.hostname)
+ isIPAddress = True
+ if not ip.is_global:
+ frappe.throw("Endpoint address should be a public ip or domain")
+
+ if not isIPAddress:
+ # domain should be a fqdn
+ if not is_valid_hostname(url.hostname):
+ frappe.throw("Endpoint address should be a valid domain")
+
+ # Endpoint can't be any local domain
+ if not frappe.conf.developer_mode and ("localhost" in url.hostname or ".local" in url.hostname):
+ frappe.throw("Endpoint can't be localhost or local domain")
+
+ @dashboard_whitelist()
+ def validate_endpoint(self) -> dict:
+ response = ""
+ response_status_code = 0
+ payload = {"event": "Webhook Validate", "data": {}}
+ try:
+ req = requests.post(
+ self.endpoint,
+ timeout=5,
+ json=payload,
+ headers={"X-Webhook-Secret": self.secret},
+ )
+ response = req.text or ""
+ response_status_code = req.status_code
+ except requests.exceptions.ConnectionError:
+ response = "Failed to connect to the webhook endpoint"
+ except requests.exceptions.SSLError:
+ response = "SSL Error. Please check if SSL the certificate of the webhook is valid."
+ except (requests.exceptions.Timeout, requests.exceptions.ConnectTimeout):
+ response = "Request Timeout. Please check if the webhook is reachable."
+ except Exception as e:
+ response = str(e)
+
+ return frappe._dict(
+ {
+ "success": response_status_code >= 200 and response_status_code < 300,
+ "request": json.dumps(payload, indent=2),
+ "response": response,
+ "response_status_code": response_status_code,
+ }
+ )
+
+ @dashboard_whitelist()
+ def activate(self):
+ result = self.validate_endpoint()
+ if result.get("success"):
+ self.enabled = 1
+ self.save()
+ frappe.msgprint("Webhook activated successfully")
+ else:
+ message = f"
Status Code - {result.response_status_code}
Response - {result.response}"
+ frappe.throw(title="Webhook endpoint is invalid", msg=message)
+
+ @dashboard_whitelist()
+ def disable(self):
+ self.enabled = False
+ self.save()
+
+ @dashboard_whitelist()
+ def disable_and_notify(self):
+ self.disable()
+ email = frappe.db.get_value("Team", self.team, "user")
+ if not email:
+ return
+ if frappe.conf.developer_mode:
+ print(f"Emailing {email}")
+ print(f"{self.name} webhook has been disabled")
+ return
+
+ frappe.sendmail(
+ recipients=email,
+ subject="Important: Your Configured Webhook on Frappe Cloud is disabled",
+ template="press_webhook_disabled",
+ args={"endpoint": self.endpoint},
+ now=True,
+ )
+
+ @dashboard_whitelist()
+ def delete(self):
+ frappe.db.sql("delete from `tabPress Webhook Attempt` where webhook = %s", (self.name,))
+ frappe.delete_doc("Press Webhook", self.name)
+
+
+get_permission_query_conditions = get_permission_query_conditions_for_doctype("Site")
+
+
+def auto_disable_high_delivery_failure_webhooks():
+ # In past hour, if 70% of webhook deliveries has failed, disable the webhook and notify the user
+ data = frappe.db.sql(
+ """
+SELECT `endpoint`
+FROM `tabPress Webhook Attempt`
+WHERE `creation` >= NOW() - INTERVAL 1 HOUR
+GROUP BY `endpoint`
+HAVING (COUNT(CASE WHEN `status` = 'Failed' THEN 1 END) / COUNT(*)) * 100 > 70;
+""",
+ as_dict=True,
+ )
+ endpoints = [row.endpoint for row in data]
+ doc_names = frappe.get_all("Press Webhook", filters={"endpoint": ("in", endpoints)}, pluck="name")
+ for doc_name in doc_names:
+ doc = frappe.get_doc("Press Webhook", doc_name)
+ doc.disable_and_notify()
diff --git a/press/press/doctype/press_webhook/test_press_webhook.py b/press/press/doctype/press_webhook/test_press_webhook.py
new file mode 100644
index 00000000000..bc999001520
--- /dev/null
+++ b/press/press/doctype/press_webhook/test_press_webhook.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestPressWebhook(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/press_webhook_attempt/__init__.py b/press/press/doctype/press_webhook_attempt/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/press_webhook_attempt/press_webhook_attempt.js b/press/press/doctype/press_webhook_attempt/press_webhook_attempt.js
new file mode 100644
index 00000000000..667b4d86293
--- /dev/null
+++ b/press/press/doctype/press_webhook_attempt/press_webhook_attempt.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Press Webhook Attempt", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/press_webhook_attempt/press_webhook_attempt.json b/press/press/doctype/press_webhook_attempt/press_webhook_attempt.json
new file mode 100644
index 00000000000..78983931315
--- /dev/null
+++ b/press/press/doctype/press_webhook_attempt/press_webhook_attempt.json
@@ -0,0 +1,95 @@
+{
+ "actions": [],
+ "creation": "2024-09-19 09:33:43.941516",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "status",
+ "column_break_aqtt",
+ "timestamp",
+ "section_break_kvpa",
+ "column_break_uhnt",
+ "webhook",
+ "column_break_wpgi",
+ "endpoint",
+ "section_break_nnsg",
+ "response_status_code",
+ "response_body"
+ ],
+ "fields": [
+ {
+ "fieldname": "webhook",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Webhook",
+ "options": "Press Webhook",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_uhnt",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "column_break_aqtt",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Status",
+ "options": "Sent\nFailed",
+ "reqd": 1
+ },
+ {
+ "fieldname": "section_break_nnsg",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "response_status_code",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Response Status Code"
+ },
+ {
+ "fieldname": "response_body",
+ "fieldtype": "Small Text",
+ "label": "Response Body",
+ "read_only": 1
+ },
+ {
+ "fieldname": "endpoint",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Endpoint",
+ "reqd": 1
+ },
+ {
+ "fieldname": "section_break_kvpa",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "column_break_wpgi",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "timestamp",
+ "fieldtype": "Datetime",
+ "in_list_view": 1,
+ "label": "Timestamp",
+ "reqd": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2024-09-25 10:59:44.304591",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Press Webhook Attempt",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/press_webhook_attempt/press_webhook_attempt.py b/press/press/doctype/press_webhook_attempt/press_webhook_attempt.py
new file mode 100644
index 00000000000..fc5965605b6
--- /dev/null
+++ b/press/press/doctype/press_webhook_attempt/press_webhook_attempt.py
@@ -0,0 +1,34 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+import frappe
+from frappe.model.document import Document
+
+
+class PressWebhookAttempt(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ endpoint: DF.Data
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ response_body: DF.SmallText | None
+ response_status_code: DF.Data | None
+ status: DF.Literal["Sent", "Failed"]
+ timestamp: DF.Datetime
+ webhook: DF.Link
+ # end: auto-generated types
+
+
+def has_permission(doc, ptype, user):
+ if ptype != "read":
+ return False
+ return frappe.get_doc("Press Webhook", doc.webhook).has_permission("read", user)
diff --git a/press/press/doctype/press_webhook_attempt/test_press_webhook_attempt.py b/press/press/doctype/press_webhook_attempt/test_press_webhook_attempt.py
new file mode 100644
index 00000000000..574fd4bf0ba
--- /dev/null
+++ b/press/press/doctype/press_webhook_attempt/test_press_webhook_attempt.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestPressWebhookAttempt(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/press_webhook_event/__init__.py b/press/press/doctype/press_webhook_event/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/press_webhook_event/press_webhook_event.js b/press/press/doctype/press_webhook_event/press_webhook_event.js
new file mode 100644
index 00000000000..6383b1452fa
--- /dev/null
+++ b/press/press/doctype/press_webhook_event/press_webhook_event.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Press Webhook Event", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/press_webhook_event/press_webhook_event.json b/press/press/doctype/press_webhook_event/press_webhook_event.json
new file mode 100644
index 00000000000..6b01b96d716
--- /dev/null
+++ b/press/press/doctype/press_webhook_event/press_webhook_event.json
@@ -0,0 +1,61 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "autoname": "field:title",
+ "creation": "2024-09-18 14:35:06.487107",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "enabled",
+ "title",
+ "description"
+ ],
+ "fields": [
+ {
+ "default": "0",
+ "fieldname": "enabled",
+ "fieldtype": "Check",
+ "label": "Enabled"
+ },
+ {
+ "fieldname": "description",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Description",
+ "reqd": 1
+ },
+ {
+ "fieldname": "title",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Title",
+ "reqd": 1,
+ "unique": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2024-09-18 15:21:38.898742",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Press Webhook Event",
+ "naming_rule": "By fieldname",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/press_webhook_event/press_webhook_event.py b/press/press/doctype/press_webhook_event/press_webhook_event.py
new file mode 100644
index 00000000000..cce42b56e35
--- /dev/null
+++ b/press/press/doctype/press_webhook_event/press_webhook_event.py
@@ -0,0 +1,23 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class PressWebhookEvent(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ description: DF.Data
+ enabled: DF.Check
+ title: DF.Data
+ # end: auto-generated types
+
+ DOCTYPE = "Press Webhook Event"
+ dashboard_fields = ("name", "description")
diff --git a/press/press/doctype/press_webhook_event/test_press_webhook_event.py b/press/press/doctype/press_webhook_event/test_press_webhook_event.py
new file mode 100644
index 00000000000..431e74a71cb
--- /dev/null
+++ b/press/press/doctype/press_webhook_event/test_press_webhook_event.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestPressWebhookEvent(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/press_webhook_log/__init__.py b/press/press/doctype/press_webhook_log/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/press_webhook_log/press_webhook_log.js b/press/press/doctype/press_webhook_log/press_webhook_log.js
new file mode 100644
index 00000000000..64bc7d54175
--- /dev/null
+++ b/press/press/doctype/press_webhook_log/press_webhook_log.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Press Webhook Log", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/press_webhook_log/press_webhook_log.json b/press/press/doctype/press_webhook_log/press_webhook_log.json
new file mode 100644
index 00000000000..6564c1ebe00
--- /dev/null
+++ b/press/press/doctype/press_webhook_log/press_webhook_log.json
@@ -0,0 +1,114 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-09-18 16:34:27.110549",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "status",
+ "event",
+ "column_break_qqjj",
+ "team",
+ "section_break_vyzs",
+ "retries",
+ "column_break_btzb",
+ "next_retry_at",
+ "section_break_wqvc",
+ "attempts",
+ "section_break_bkbk",
+ "request_payload"
+ ],
+ "fields": [
+ {
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Status",
+ "options": "Pending\nQueued\nSent\nPartially Sent\nFailed",
+ "reqd": 1
+ },
+ {
+ "fieldname": "event",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Event",
+ "options": "Press Webhook Event",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_qqjj",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Team",
+ "options": "Team",
+ "reqd": 1
+ },
+ {
+ "fieldname": "section_break_bkbk",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "section_break_wqvc",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "section_break_vyzs",
+ "fieldtype": "Section Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "retries",
+ "fieldtype": "Int",
+ "label": "Retries"
+ },
+ {
+ "fieldname": "column_break_btzb",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "next_retry_at",
+ "fieldtype": "Datetime",
+ "label": "Next Retry At"
+ },
+ {
+ "fieldname": "request_payload",
+ "fieldtype": "JSON",
+ "label": "Request Payload",
+ "reqd": 1
+ },
+ {
+ "fieldname": "attempts",
+ "fieldtype": "Table",
+ "label": "Attempts",
+ "options": "Press Webhook Attempt"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2024-09-25 16:11:01.102311",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Press Webhook Log",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/press_webhook_log/press_webhook_log.py b/press/press/doctype/press_webhook_log/press_webhook_log.py
new file mode 100644
index 00000000000..7de8288e550
--- /dev/null
+++ b/press/press/doctype/press_webhook_log/press_webhook_log.py
@@ -0,0 +1,210 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+import json
+
+import frappe
+import requests
+from frappe.model.document import Document
+from frappe.utils import add_to_date, now
+
+from press.overrides import get_permission_query_conditions_for_doctype
+
+
+class PressWebhookLog(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.press_webhook_attempt.press_webhook_attempt import PressWebhookAttempt
+
+ attempts: DF.Table[PressWebhookAttempt]
+ event: DF.Link
+ next_retry_at: DF.Datetime | None
+ request_payload: DF.JSON
+ retries: DF.Int
+ status: DF.Literal["Pending", "Queued", "Sent", "Partially Sent", "Failed"]
+ team: DF.Link
+ # end: auto-generated types
+
+ def validate(self):
+ if not self.next_retry_at:
+ self.next_retry_at = frappe.utils.now()
+
+ def _send_webhook_call(self, webhook_name, payload, url, secret, save: bool = True) -> bool:
+ response = ""
+ response_status_code = 0
+ try:
+ req = requests.post(
+ url,
+ json=payload,
+ headers={"X-Webhook-Secret": secret},
+ timeout=5,
+ )
+ response = req.text or ""
+ response_status_code = req.status_code
+ except requests.exceptions.ConnectionError:
+ response = "Failed to connect to the webhook endpoint"
+ except requests.exceptions.SSLError:
+ response = "SSL Error. Please check if SSL the certificate of the webhook is valid."
+ except (requests.exceptions.Timeout, requests.exceptions.ConnectTimeout):
+ response = "Request Timeout. Please check if the webhook is reachable."
+ except Exception as e:
+ response = str(e)
+
+ sent = response_status_code >= 200 and response_status_code < 300
+
+ self.append(
+ "attempts",
+ {
+ "endpoint": url,
+ "webhook": webhook_name,
+ "status": "Sent" if sent else "Failed",
+ "response_body": response,
+ "response_status_code": response_status_code,
+ "timestamp": frappe.utils.now(),
+ },
+ )
+ if save:
+ self.save()
+
+ return sent
+
+ def schedule_retry(self, save: True):
+ self.retries = self.retries + 1
+ self.next_retry_at = add_to_date(now(), minutes=2**self.retries)
+ if save:
+ self.save()
+
+ def send(self):
+ if len(self.attempts) == 0:
+ self._process_webhook_call()
+ return
+
+ # Try failed attempts
+ self._retry_failed_attempts()
+
+ def _process_webhook_call(self):
+ try:
+ PressWebhookSelectedEvent = frappe.qb.DocType("Press Webhook Selected Event")
+ PressWebhook = frappe.qb.DocType("Press Webhook")
+ query = (
+ frappe.qb.from_(PressWebhookSelectedEvent)
+ .select(PressWebhook.name, PressWebhook.endpoint, PressWebhook.secret)
+ .left_join(PressWebhook)
+ .on(PressWebhookSelectedEvent.parent == PressWebhook.name)
+ .where(PressWebhookSelectedEvent.event == self.event)
+ .where(PressWebhook.team == self.team)
+ .where(PressWebhook.enabled == 1)
+ )
+ webhooks = query.run(as_dict=True)
+ payload = json.loads(self.request_payload)
+ total = len(webhooks)
+ sent = 0
+ for webhook in webhooks:
+ isSent = self._send_webhook_call(
+ webhook.name,
+ payload,
+ webhook.endpoint,
+ webhook.secret,
+ save=False,
+ )
+ if isSent:
+ sent += 1
+
+ if total == 0:
+ self.status = "Sent"
+ else:
+ if sent == total:
+ self.status = "Sent"
+ elif sent != total and sent != 0:
+ self.status = "Partially Sent"
+ else:
+ self.status = "Failed"
+ self.schedule_retry(save=False)
+ except Exception:
+ self.status = "Failed"
+ self.schedule_retry(save=False)
+
+ self.save()
+
+ def _retry_failed_attempts(self):
+ webhook_call_status = frappe._dict()
+ for record in self.attempts:
+ if record.status == "Failed" and webhook_call_status.get(record.webhook, "") != "Sent":
+ webhook_call_status[record.webhook] = "Failed"
+ if record.status == "Sent":
+ webhook_call_status[record.webhook] = "Sent"
+
+ # filter out webhooks that need to be retried
+ webhooks_to_retry = [
+ webhook for webhook in webhook_call_status if webhook_call_status[webhook] == "Failed"
+ ]
+
+ sent = 0
+ payload = json.loads(self.request_payload)
+
+ for webhook in webhooks_to_retry:
+ webhook_data = frappe.get_value(
+ "Press Webhook", record.webhook, ["endpoint", "secret"], as_dict=True
+ )
+ is_sent = self._send_webhook_call(
+ webhook,
+ payload,
+ webhook_data.endpoint,
+ webhook_data.secret,
+ )
+ if is_sent:
+ sent += 1
+
+ if len(webhooks_to_retry) == 0 or sent == len(webhooks_to_retry):
+ self.status = "Sent"
+ elif (len(webhook_call_status) - len(webhooks_to_retry) > 0) or sent > 0:
+ self.status = "Partially Sent"
+ self.schedule_retry(save=False)
+ else:
+ self.status = "Failed"
+ self.schedule_retry(save=False)
+
+ self.save()
+
+
+get_permission_query_conditions = get_permission_query_conditions_for_doctype("Press Webhook Log")
+
+
+def process():
+ records = frappe.get_all(
+ "Press Webhook Log",
+ filters={
+ "status": ["in", ["Pending", "Failed", "Partially Sent"]],
+ "retries": ["<=", 3],
+ "next_retry_at": ["<=", frappe.utils.now()],
+ },
+ pluck="name",
+ limit=100,
+ )
+ # set status of these records to Queued
+ frappe.db.set_value("Press Webhook Log", {"name": ("in", records)}, "status", "Queued")
+ # enqueue these records
+ for record in records:
+ frappe.enqueue_doc(
+ "Press Webhook Log",
+ record,
+ method="send",
+ queue="default",
+ job_id=f"press_webhook_log:{record}",
+ deduplicate=True,
+ )
+
+
+def clean_logs_older_than_24_hours():
+ names = frappe.get_all(
+ "Press Webhook Log", filters={"creation": ["<", frappe.utils.add_days(None, -1)]}, pluck="name"
+ )
+ frappe.delete_doc("Press Webhook Log", names)
diff --git a/press/press/doctype/press_webhook_log/test_press_webhook_log.py b/press/press/doctype/press_webhook_log/test_press_webhook_log.py
new file mode 100644
index 00000000000..632d40f67fd
--- /dev/null
+++ b/press/press/doctype/press_webhook_log/test_press_webhook_log.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestPressWebhookLog(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/press_webhook_selected_event/__init__.py b/press/press/doctype/press_webhook_selected_event/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/press_webhook_selected_event/press_webhook_selected_event.json b/press/press/doctype/press_webhook_selected_event/press_webhook_selected_event.json
new file mode 100644
index 00000000000..321cf625334
--- /dev/null
+++ b/press/press/doctype/press_webhook_selected_event/press_webhook_selected_event.json
@@ -0,0 +1,33 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-09-18 15:16:58.232474",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "event"
+ ],
+ "fields": [
+ {
+ "fieldname": "event",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Event",
+ "options": "Press Webhook Event",
+ "reqd": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2024-09-18 15:22:11.876264",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Press Webhook Selected Event",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/press_webhook_selected_event/press_webhook_selected_event.py b/press/press/doctype/press_webhook_selected_event/press_webhook_selected_event.py
new file mode 100644
index 00000000000..df94b558e61
--- /dev/null
+++ b/press/press/doctype/press_webhook_selected_event/press_webhook_selected_event.py
@@ -0,0 +1,23 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class PressWebhookSelectedEvent(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ event: DF.Link
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/process_snapshot/__init__.py b/press/press/doctype/process_snapshot/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/process_snapshot/process_snapshot.js b/press/press/doctype/process_snapshot/process_snapshot.js
new file mode 100644
index 00000000000..b03e4598ebd
--- /dev/null
+++ b/press/press/doctype/process_snapshot/process_snapshot.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Process Snapshot", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/process_snapshot/process_snapshot.json b/press/press/doctype/process_snapshot/process_snapshot.json
new file mode 100644
index 00000000000..7ce49c9800d
--- /dev/null
+++ b/press/press/doctype/process_snapshot/process_snapshot.json
@@ -0,0 +1,62 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-04-01 15:41:25.224836",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "server",
+ "bench",
+ "dump"
+ ],
+ "fields": [
+ {
+ "fieldname": "server",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Server",
+ "options": "Server",
+ "reqd": 1
+ },
+ {
+ "fieldname": "bench",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Bench",
+ "options": "Bench",
+ "reqd": 1
+ },
+ {
+ "fieldname": "dump",
+ "fieldtype": "JSON",
+ "label": "Dump",
+ "read_only": 1
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-04-08 16:43:12.154769",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Process Snapshot",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/process_snapshot/process_snapshot.py b/press/press/doctype/process_snapshot/process_snapshot.py
new file mode 100644
index 00000000000..651933b9fdc
--- /dev/null
+++ b/press/press/doctype/process_snapshot/process_snapshot.py
@@ -0,0 +1,44 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+import json
+import typing
+
+import frappe
+import requests
+from frappe.model.document import Document
+
+if typing.TYPE_CHECKING:
+ from press.press.doctype.server.server import Server
+
+
+class ProcessSnapshot(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ bench: DF.Link
+ dump: DF.JSON | None
+ server: DF.Link
+ # end: auto-generated types
+
+ def check_bench_on_server(self):
+ if not frappe.get_value("Bench", {"name": self.bench, "server": self.server}):
+ frappe.throw(f"{self.bench} does not exist on server {self.server}")
+
+ def validate(self):
+ self.check_bench_on_server()
+
+ def after_insert(self):
+ server: Server = frappe.get_doc("Server", self.server)
+ try:
+ self.dump = json.dumps(server.agent.get_snapshot(self.bench), indent=2)
+ except requests.exceptions.HTTPError as e:
+ self.dump = json.dumps({"error": str(e)}, indent=2)
+ self.save()
diff --git a/press/press/doctype/process_snapshot/test_process_snapshot.py b/press/press/doctype/process_snapshot/test_process_snapshot.py
new file mode 100644
index 00000000000..5703f7430ed
--- /dev/null
+++ b/press/press/doctype/process_snapshot/test_process_snapshot.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class IntegrationTestProcessSnapshot(FrappeTestCase):
+ """
+ Integration tests for ProcessSnapshot.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/prometheus_alert_rule/prometheus_alert_rule.json b/press/press/doctype/prometheus_alert_rule/prometheus_alert_rule.json
index 4480bd39ebd..463036a63af 100644
--- a/press/press/doctype/prometheus_alert_rule/prometheus_alert_rule.json
+++ b/press/press/doctype/prometheus_alert_rule/prometheus_alert_rule.json
@@ -8,6 +8,7 @@
"field_order": [
"severity",
"for",
+ "enabled",
"column_break_2",
"description",
"section_break_4",
@@ -24,7 +25,13 @@
"preview_section",
"alert_preview",
"column_break_21",
- "route_preview"
+ "route_preview",
+ "reaction_tab",
+ "press_job_type",
+ "ignore_on_clusters",
+ "column_break_oetk",
+ "only_on_shared",
+ "silent"
],
"fields": [
{
@@ -38,8 +45,7 @@
"fieldname": "expression",
"fieldtype": "Code",
"in_list_view": 1,
- "label": "Expression",
- "reqd": 1
+ "label": "Expression"
},
{
"default": "{}",
@@ -140,6 +146,47 @@
"fieldtype": "Code",
"label": "Route Preview",
"read_only": 1
+ },
+ {
+ "default": "1",
+ "fieldname": "enabled",
+ "fieldtype": "Check",
+ "in_list_view": 1,
+ "label": "Enabled"
+ },
+ {
+ "fieldname": "reaction_tab",
+ "fieldtype": "Tab Break",
+ "label": "Reaction"
+ },
+ {
+ "fieldname": "press_job_type",
+ "fieldtype": "Link",
+ "label": "Press Job Type",
+ "options": "Press Job Type"
+ },
+ {
+ "fieldname": "column_break_oetk",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "only_on_shared",
+ "fieldtype": "Check",
+ "label": "Only on Shared"
+ },
+ {
+ "fieldname": "ignore_on_clusters",
+ "fieldtype": "Table MultiSelect",
+ "label": "Ignore on Clusters",
+ "options": "Prometheus Alert Rule Cluster"
+ },
+ {
+ "default": "0",
+ "description": "Don't forward to telegram (Only for reactions)",
+ "fieldname": "silent",
+ "fieldtype": "Check",
+ "label": "Silent"
}
],
"index_web_pages_for_search": 1,
@@ -149,10 +196,11 @@
"link_fieldname": "alert"
}
],
- "modified": "2021-06-09 13:47:54.283546",
+ "modified": "2026-01-16 14:24:38.200768",
"modified_by": "Administrator",
"module": "Press",
"name": "Prometheus Alert Rule",
+ "naming_rule": "Set by user",
"owner": "Administrator",
"permissions": [
{
@@ -166,8 +214,34 @@
"role": "System Manager",
"share": 1,
"write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1,
+ "write": 1
}
],
+ "row_format": "Dynamic",
+ "rows_threshold_for_grid_search": 20,
"sort_field": "modified",
- "sort_order": "DESC"
+ "sort_order": "DESC",
+ "states": []
}
\ No newline at end of file
diff --git a/press/press/doctype/prometheus_alert_rule/prometheus_alert_rule.py b/press/press/doctype/prometheus_alert_rule/prometheus_alert_rule.py
index 0ac4cd1b391..2a38247d222 100644
--- a/press/press/doctype/prometheus_alert_rule/prometheus_alert_rule.py
+++ b/press/press/doctype/prometheus_alert_rule/prometheus_alert_rule.py
@@ -1,17 +1,58 @@
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
+
+import json
+from typing import TYPE_CHECKING
+
import frappe
import yaml
-import json
+from frappe.core.utils import find
from frappe.model.document import Document
+
from press.agent import Agent
+if TYPE_CHECKING:
+ from press.press.doctype.server.server import Server
+
class PrometheusAlertRule(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.prometheus_alert_rule_cluster.prometheus_alert_rule_cluster import (
+ PrometheusAlertRuleCluster,
+ )
+
+ alert_preview: DF.Code | None
+ annotations: DF.Code
+ description: DF.Data
+ enabled: DF.Check
+ expression: DF.Code | None
+ group_by: DF.Code
+ group_interval: DF.Data
+ group_wait: DF.Data
+ ignore_on_clusters: DF.TableMultiSelect[PrometheusAlertRuleCluster]
+ labels: DF.Code
+ only_on_shared: DF.Check
+ press_job_type: DF.Link | None
+ repeat_interval: DF.Data
+ route_preview: DF.Code | None
+ severity: DF.Literal["Critical", "Warning", "Information"]
+ silent: DF.Check
+ # end: auto-generated types
+
def validate(self):
self.alert_preview = yaml.dump(self.get_rule())
self.route_preview = yaml.dump(self.get_route())
+ if self.enabled and not self.expression:
+ frappe.throw("Enabled alert rules require an expression")
def get_rule(self):
labels = json.loads(self.labels)
@@ -20,24 +61,22 @@ def get_rule(self):
annotations = json.loads(self.annotations)
annotations.update({"description": self.description})
- rule = {
+ return {
"alert": self.name,
"expr": self.expression,
"for": self.get("for"),
"labels": labels,
"annotations": annotations,
}
- return rule
def get_route(self):
- route = {
+ return {
"group_by": json.loads(self.group_by),
"group_wait": self.group_wait,
"group_interval": self.group_interval,
"repeat_interval": self.repeat_interval,
"matchers": [f'alertname="{self.name}"'],
}
- return route
def on_update(self):
rules = yaml.dump(self.get_rules())
@@ -50,7 +89,7 @@ def on_update(self):
def get_rules(self):
rules_dict = {"groups": [{"name": "All", "rules": []}]}
- rules = frappe.get_all(self.doctype)
+ rules = frappe.get_all(self.doctype, {"enabled": True})
for rule in rules:
rule_doc = frappe.get_doc(self.doctype, rule.name)
rules_dict["groups"][0]["rules"].append(rule_doc.get_rule())
@@ -58,21 +97,69 @@ def get_rules(self):
return rules_dict
def get_routes(self):
+ webhook_token = frappe.db.get_value(
+ "Monitor Server", frappe.db.get_single_value("Press Settings", "monitor_server"), "webhook_token"
+ )
+
+ callback_url = frappe.utils.get_url("api/method/press.api.monitoring.alert")
+ if webhook_token:
+ callback_url = f"{callback_url}?webhook_token={webhook_token}"
+
routes_dict = {
"route": {"receiver": "web.hook", "routes": []},
"receivers": [
{
"name": "web.hook",
- "webhook_configs": [
- {"url": frappe.utils.get_url("api/method/press.api.monitoring.alert")}
- ],
+ "webhook_configs": [{"url": callback_url}],
}
],
}
- rules = frappe.get_all(self.doctype)
+ rules = frappe.get_all(self.doctype, {"enabled": True})
for rule in rules:
rule_doc = frappe.get_doc(self.doctype, rule.name)
routes_dict["route"]["routes"].append(rule_doc.get_route())
return routes_dict
+
+ def react(self, instance_type: str, instance: str, labels: dict | None = None):
+ return self.run_press_job(self.press_job_type, instance_type, instance, labels)
+
+ def run_press_job(
+ self, job_name: str, server_type: str, server_name: str, labels: dict | None = None, arguments=None
+ ):
+ server: "Server" = frappe.get_doc(server_type, server_name)
+ if self.only_on_shared and not server.public:
+ return None
+ if find(self.ignore_on_clusters, lambda x: x.cluster == server.cluster):
+ return None
+
+ if arguments is None:
+ arguments = {}
+
+ if not labels:
+ labels = {}
+
+ arguments.update({"labels": labels})
+
+ if existing_jobs := frappe.get_all(
+ "Press Job",
+ {
+ "status": ("in", ["Pending", "Running"]),
+ "server_type": server_type,
+ "server": server_name,
+ },
+ pluck="name",
+ ):
+ return frappe.get_doc("Press Job", existing_jobs[0])
+
+ return frappe.get_doc(
+ {
+ "doctype": "Press Job",
+ "job_type": job_name,
+ "server_type": server_type,
+ "server": server_name,
+ "virtual_machine": server.virtual_machine,
+ "arguments": json.dumps(arguments, indent=2, sort_keys=True),
+ }
+ ).insert()
diff --git a/press/press/doctype/prometheus_alert_rule/test_prometheus_alert_rule.py b/press/press/doctype/prometheus_alert_rule/test_prometheus_alert_rule.py
index 9591372cdce..8e596d149b6 100644
--- a/press/press/doctype/prometheus_alert_rule/test_prometheus_alert_rule.py
+++ b/press/press/doctype/prometheus_alert_rule/test_prometheus_alert_rule.py
@@ -1,9 +1,33 @@
# Copyright (c) 2021, Frappe and Contributors
# See license.txt
-# import frappe
-import unittest
+from unittest.mock import Mock, patch
+import frappe
+from frappe.tests.utils import FrappeTestCase
-class TestPrometheusAlertRule(unittest.TestCase):
+from press.agent import Agent
+from press.press.doctype.prometheus_alert_rule.prometheus_alert_rule import PrometheusAlertRule
+
+
+@patch.object(Agent, "update_monitor_rules", new=Mock())
+def create_test_prometheus_alert_rule(name="Sites Down") -> PrometheusAlertRule:
+ return frappe.get_doc( # type: ignore
+ {
+ "doctype": "Prometheus Alert Rule",
+ "name": name,
+ "description": "Sites didn't respond with http 200",
+ "severity": "Critical",
+ "group_wait": "1m",
+ "group_interval": "1m",
+ "repeat_interval": "1h",
+ "group_by": '["alertname", "cluster", "server", "instance"]',
+ "expression": 'probe_success{job="site"} == 0 and probe_http_status_code != 429',
+ "for": "4m",
+ "enable_reactions": True,
+ },
+ ).insert(ignore_if_duplicate=True)
+
+
+class TestPrometheusAlertRule(FrappeTestCase):
pass
diff --git a/press/press/doctype/prometheus_alert_rule_cluster/__init__.py b/press/press/doctype/prometheus_alert_rule_cluster/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/prometheus_alert_rule_cluster/prometheus_alert_rule_cluster.json b/press/press/doctype/prometheus_alert_rule_cluster/prometheus_alert_rule_cluster.json
new file mode 100644
index 00000000000..9853c29c431
--- /dev/null
+++ b/press/press/doctype/prometheus_alert_rule_cluster/prometheus_alert_rule_cluster.json
@@ -0,0 +1,33 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-05-24 16:13:21.415426",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "cluster"
+ ],
+ "fields": [
+ {
+ "fieldname": "cluster",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Cluster",
+ "options": "Cluster",
+ "reqd": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2024-05-24 16:31:37.894227",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Prometheus Alert Rule Cluster",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/prometheus_alert_rule_cluster/prometheus_alert_rule_cluster.py b/press/press/doctype/prometheus_alert_rule_cluster/prometheus_alert_rule_cluster.py
new file mode 100644
index 00000000000..fe69605eb45
--- /dev/null
+++ b/press/press/doctype/prometheus_alert_rule_cluster/prometheus_alert_rule_cluster.py
@@ -0,0 +1,23 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class PrometheusAlertRuleCluster(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ cluster: DF.Link
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/proxy_server/proxy_server.js b/press/press/doctype/proxy_server/proxy_server.js
index bb369d25540..9fb91cbed56 100644
--- a/press/press/doctype/proxy_server/proxy_server.js
+++ b/press/press/doctype/proxy_server/proxy_server.js
@@ -8,8 +8,26 @@ frappe.ui.form.on('Proxy Server', {
[__('Ping Ansible'), 'ping_ansible', true],
[__('Ping Ansible Unprepared'), 'ping_ansible_unprepared', true],
[__('Update Agent'), 'update_agent', true, frm.doc.is_server_setup],
+ [
+ __('Update Agent Ansible'),
+ 'update_agent_ansible',
+ true,
+ frm.doc.is_server_setup,
+ ],
+ [
+ __('Install Filebeat'),
+ 'install_filebeat',
+ true,
+ frm.doc.is_server_setup,
+ ],
[__('Prepare Server'), 'prepare_server', true, !frm.doc.is_server_setup],
[__('Setup Server'), 'setup_server', true, !frm.doc.is_server_setup],
+ [
+ __('Get AWS Static IP'),
+ 'get_aws_static_ip',
+ false,
+ frm.doc.provider === 'AWS EC2',
+ ],
[
__('Setup SSH Proxy'),
'setup_ssh_proxy',
@@ -34,6 +52,12 @@ frappe.ui.form.on('Proxy Server', {
true,
frm.doc.is_server_setup,
],
+ [
+ __('Show Agent Password'),
+ 'show_agent_password',
+ false,
+ frm.doc.is_server_setup,
+ ],
[
__('Fetch Keys'),
'fetch_keys',
@@ -42,6 +66,8 @@ frappe.ui.form.on('Proxy Server', {
(!frm.doc.frappe_public_key || !frm.doc.root_public_key),
],
[__('Update TLS Certificate'), 'update_tls_certificate', true],
+ [__('Reload NGINX'), 'reload_nginx', true, frm.doc.is_server_setup],
+ [__('Create Image'), 'create_image', true, frm.doc.status == 'Active'],
[
__('Setup Replication'),
'setup_replication',
@@ -58,7 +84,17 @@ frappe.ui.form.on('Proxy Server', {
!frm.doc.is_primary &&
frm.doc.is_replication_setup,
],
+ [__('Archive'), 'archive', true, frm.doc.status !== 'Archived'],
[__('Setup Fail2ban'), 'setup_fail2ban', true, frm.doc.is_server_setup],
+ [__('Remove Fail2ban'), 'remove_fail2ban', true, frm.doc.is_server_setup],
+ [__('Setup Wireguard'), 'setup_wireguard', true],
+ [__('Reload Wireguard'), 'reload_wireguard', true],
+ [
+ __('Reboot with serial console'),
+ 'reboot_with_serial_console',
+ true,
+ frm.doc.virtual_machine,
+ ],
].forEach(([label, method, confirm, condition]) => {
if (typeof condition === 'undefined' || condition) {
frm.add_custom_button(
@@ -91,4 +127,8 @@ frappe.ui.form.on('Proxy Server', {
}
});
},
+
+ hostname: function (frm) {
+ press.set_hostname_abbreviation(frm);
+ },
});
diff --git a/press/press/doctype/proxy_server/proxy_server.json b/press/press/doctype/proxy_server/proxy_server.json
index 0ac00d12e8a..39cae8627fd 100644
--- a/press/press/doctype/proxy_server/proxy_server.json
+++ b/press/press/doctype/proxy_server/proxy_server.json
@@ -7,8 +7,11 @@
"field_order": [
"status",
"hostname",
+ "hostname_abbreviation",
"domain",
"self_hosted_server_domain",
+ "tls_certificate_renewal_failed",
+ "plan",
"column_break_3",
"cluster",
"provider",
@@ -16,14 +19,24 @@
"is_server_setup",
"is_self_hosted",
"team",
+ "public",
+ "halt_agent_jobs",
+ "storage_section",
+ "auto_add_storage_min",
+ "auto_add_storage_max",
+ "auto_increase_storage",
"section_break_8",
"ip",
+ "enabled_default_routing",
+ "is_static_ip",
"column_break_10",
"private_ip",
"private_mac_address",
"private_vlan_id",
"agent_section",
"agent_password",
+ "column_break_mznm",
+ "disable_agent_job_auto_retry",
"replica_section",
"is_primary",
"primary",
@@ -34,6 +47,7 @@
"frappe_user_password",
"frappe_public_key",
"column_break_18",
+ "bastion_server",
"root_public_key",
"section_break_21",
"domains",
@@ -44,29 +58,36 @@
"proxysql_section",
"proxysql_admin_password",
"proxysql_monitor_password",
- "is_proxysql_setup"
+ "is_proxysql_setup",
+ "vpn_tab",
+ "wireguard_network",
+ "wireguard_network_ip",
+ "wireguard_port",
+ "is_wireguard_setup",
+ "column_break_dapz",
+ "wireguard_private_key",
+ "wireguard_public_key",
+ "private_ip_interface_id",
+ "wireguard_interface_id"
],
"fields": [
{
- "fetch_from": "virtual_machine.public_ip_address",
"fieldname": "ip",
"fieldtype": "Data",
"in_list_view": 1,
- "label": "IP",
- "set_only_once": 1
+ "label": "IP"
},
{
"fetch_from": "virtual_machine.private_ip_address",
"fieldname": "private_ip",
"fieldtype": "Data",
- "label": "Private IP",
- "set_only_once": 1
+ "label": "Private IP"
},
{
"fieldname": "agent_password",
"fieldtype": "Password",
"label": "Agent Password",
- "set_only_once": 1
+ "read_only": 1
},
{
"default": "Pending",
@@ -137,7 +158,6 @@
"fieldtype": "Link",
"label": "Domain",
"options": "Root Domain",
- "read_only": 1,
"set_only_once": 1
},
{
@@ -145,7 +165,7 @@
"fieldname": "provider",
"fieldtype": "Select",
"label": "Provider",
- "options": "Generic\nScaleway\nAWS EC2",
+ "options": "Generic\nScaleway\nAWS EC2\nOCI\nHetzner\nVodacom\nDigitalOcean",
"set_only_once": 1
},
{
@@ -181,7 +201,8 @@
},
{
"fieldname": "section_break_21",
- "fieldtype": "Section Break"
+ "fieldtype": "Section Break",
+ "label": "Nginx"
},
{
"fieldname": "domains",
@@ -190,11 +211,11 @@
"options": "Proxy Server Domain"
},
{
- "depends_on": "eval: doc.provider === \"AWS EC2\"",
+ "depends_on": "eval:[\"AWS EC2\", \"OCI\", \"Hetzner\", \"DigitalOcean\"].includes(doc.provider)",
"fieldname": "virtual_machine",
"fieldtype": "Link",
"label": "Virtual Machine",
- "mandatory_depends_on": "eval: doc.provider === \"AWS EC2\"",
+ "mandatory_depends_on": "eval:[\"AWS EC2\", \"OCI\"].includes(doc.provider)",
"options": "Virtual Machine"
},
{
@@ -301,10 +322,151 @@
"fieldname": "self_hosted_server_domain",
"fieldtype": "Data",
"label": "Self Hosted Server Domain"
+ },
+ {
+ "fieldname": "vpn_tab",
+ "fieldtype": "Tab Break",
+ "label": "VPN"
+ },
+ {
+ "default": "51820",
+ "fieldname": "wireguard_port",
+ "fieldtype": "Int",
+ "label": "Wireguard Port"
+ },
+ {
+ "fieldname": "wireguard_private_key",
+ "fieldtype": "Password",
+ "label": "Wireguard Private Key"
+ },
+ {
+ "fieldname": "wireguard_public_key",
+ "fieldtype": "Password",
+ "label": "Wireguard Public Key"
+ },
+ {
+ "default": "wg0",
+ "fieldname": "wireguard_interface_id",
+ "fieldtype": "Data",
+ "label": "Wireguard Interface ID"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_wireguard_setup",
+ "fieldtype": "Check",
+ "label": "Is Wireguard Setup"
+ },
+ {
+ "fieldname": "private_ip_interface_id",
+ "fieldtype": "Data",
+ "label": "Private IP Interface ID"
+ },
+ {
+ "fieldname": "wireguard_network",
+ "fieldtype": "Data",
+ "label": "Wireguard Network"
+ },
+ {
+ "fieldname": "wireguard_network_ip",
+ "fieldtype": "Data",
+ "label": "Wireguard Network IP"
+ },
+ {
+ "fieldname": "column_break_dapz",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "hostname_abbreviation",
+ "fieldtype": "Data",
+ "label": "Hostname Abbreviation"
+ },
+ {
+ "default": "0",
+ "description": "Wildcard routing eg. *.root-domain",
+ "fieldname": "enabled_default_routing",
+ "fieldtype": "Check",
+ "label": "Enabled Default Routing",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_mznm",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "1",
+ "fieldname": "disable_agent_job_auto_retry",
+ "fieldtype": "Check",
+ "label": "Disable Agent Job Auto Retry"
+ },
+ {
+ "default": "0",
+ "fieldname": "public",
+ "fieldtype": "Check",
+ "label": "Public"
+ },
+ {
+ "fieldname": "storage_section",
+ "fieldtype": "Section Break",
+ "label": "Storage"
+ },
+ {
+ "default": "10",
+ "description": "Minimum storage to add automatically each time",
+ "fieldname": "auto_add_storage_min",
+ "fieldtype": "Int",
+ "label": "Auto Add Storage Min",
+ "non_negative": 1
+ },
+ {
+ "default": "50",
+ "description": "Maximum storage to add automatically each time",
+ "fieldname": "auto_add_storage_max",
+ "fieldtype": "Int",
+ "label": "Auto Add Storage Max",
+ "non_negative": 1
+ },
+ {
+ "default": "0",
+ "description": "Stop polling and queuing agent jobs",
+ "fieldname": "halt_agent_jobs",
+ "fieldtype": "Check",
+ "label": "Halt Agent Jobs"
+ },
+ {
+ "default": "0",
+ "fieldname": "tls_certificate_renewal_failed",
+ "fieldtype": "Check",
+ "label": "TLS Certificate Renewal Failed",
+ "read_only": 1
+ },
+ {
+ "fieldname": "bastion_server",
+ "fieldtype": "Link",
+ "label": "Bastion Server",
+ "options": "Bastion Server"
+ },
+ {
+ "default": "1",
+ "fieldname": "auto_increase_storage",
+ "fieldtype": "Check",
+ "label": "Auto Increase Storage"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_static_ip",
+ "fieldtype": "Check",
+ "label": "Is Static IP",
+ "read_only": 1
+ },
+ {
+ "fieldname": "plan",
+ "fieldtype": "Link",
+ "label": "Plan",
+ "options": "Server Plan"
}
],
"links": [],
- "modified": "2023-04-20 11:58:35.926243",
+ "modified": "2026-01-18 16:20:49.521726",
"modified_by": "Administrator",
"module": "Press",
"name": "Proxy Server",
@@ -323,8 +485,9 @@
"write": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/proxy_server/proxy_server.py b/press/press/doctype/proxy_server/proxy_server.py
index a708630e723..da4f0c4dbf2 100644
--- a/press/press/doctype/proxy_server/proxy_server.py
+++ b/press/press/doctype/proxy_server/proxy_server.py
@@ -1,70 +1,100 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
+from typing import TYPE_CHECKING
import frappe
+from frappe.utils import unique
+
from press.press.doctype.server.server import BaseServer
from press.runner import Ansible
-from press.agent import Agent
+from press.security import fail2ban
from press.utils import log_error
-from frappe.utils import unique
-from frappe.core.utils import find
-import boto3
+if TYPE_CHECKING:
+ from press.press.doctype.bench.bench import Bench
+ from press.press.doctype.root_domain.root_domain import RootDomain
class ProxyServer(BaseServer):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.proxy_server_domain.proxy_server_domain import ProxyServerDomain
+
+ agent_password: DF.Password | None
+ auto_add_storage_max: DF.Int
+ auto_add_storage_min: DF.Int
+ auto_increase_storage: DF.Check
+ bastion_server: DF.Link | None
+ cluster: DF.Link | None
+ disable_agent_job_auto_retry: DF.Check
+ domain: DF.Link | None
+ domains: DF.Table[ProxyServerDomain]
+ enabled_default_routing: DF.Check
+ frappe_public_key: DF.Code | None
+ frappe_user_password: DF.Password | None
+ halt_agent_jobs: DF.Check
+ hostname: DF.Data
+ hostname_abbreviation: DF.Data | None
+ ip: DF.Data | None
+ is_primary: DF.Check
+ is_proxysql_setup: DF.Check
+ is_replication_setup: DF.Check
+ is_self_hosted: DF.Check
+ is_server_setup: DF.Check
+ is_ssh_proxy_setup: DF.Check
+ is_static_ip: DF.Check
+ is_wireguard_setup: DF.Check
+ plan: DF.Link | None
+ primary: DF.Link | None
+ private_ip: DF.Data | None
+ private_ip_interface_id: DF.Data | None
+ private_mac_address: DF.Data | None
+ private_vlan_id: DF.Data | None
+ provider: DF.Literal["Generic", "Scaleway", "AWS EC2", "OCI", "Hetzner", "Vodacom", "DigitalOcean"]
+ proxysql_admin_password: DF.Password | None
+ proxysql_monitor_password: DF.Password | None
+ public: DF.Check
+ root_public_key: DF.Code | None
+ self_hosted_server_domain: DF.Data | None
+ ssh_certificate_authority: DF.Link | None
+ ssh_port: DF.Int
+ ssh_user: DF.Data | None
+ status: DF.Literal["Pending", "Installing", "Active", "Broken", "Archived"]
+ team: DF.Link | None
+ tls_certificate_renewal_failed: DF.Check
+ virtual_machine: DF.Link | None
+ wireguard_interface_id: DF.Data | None
+ wireguard_network: DF.Data | None
+ wireguard_network_ip: DF.Data | None
+ wireguard_port: DF.Int
+ wireguard_private_key: DF.Password | None
+ wireguard_public_key: DF.Password | None
+ # end: auto-generated types
+
def validate(self):
super().validate()
self.validate_domains()
self.validate_proxysql_admin_password()
def validate_domains(self):
- domains = [row.domain for row in self.domains]
- # Always include self.domain in the domains child table
- # Remove duplicates
- domains = unique([self.domain] + domains)
- self.domains = []
- for domain in domains:
+ domains_to_validate = unique([self.domain] + [row.domain for row in self.domains])
+ for domain in domains_to_validate:
if not frappe.db.exists(
"TLS Certificate", {"wildcard": True, "status": "Active", "domain": domain}
):
- frappe.throw(f"Valid wildcard TLS Certificate not found for {domain}")
- self.append("domains", {"domain": domain})
+ # frappe.throw(f"Valid wildcard TLS Certificate not found for {domain}")
+ ...
def validate_proxysql_admin_password(self):
if not self.proxysql_admin_password:
self.proxysql_admin_password = frappe.generate_hash(length=32)
- def get_wildcard_domains(self):
- wildcard_domains = []
- for domain in self.domains:
- if domain.domain == self.domain:
- # self.domain certs are symlinks
- continue
- certificate_name = frappe.db.get_value(
- "TLS Certificate", {"wildcard": True, "domain": domain.domain}, "name"
- )
- certificate = frappe.get_doc("TLS Certificate", certificate_name)
- wildcard_domains.append(
- {
- "domain": domain.domain,
- "certificate": {
- "privkey.pem": certificate.private_key,
- "fullchain.pem": certificate.full_chain,
- "chain.pem": certificate.intermediate_chain,
- },
- }
- )
- return wildcard_domains
-
- @frappe.whitelist()
- def setup_wildcard_hosts(self):
- agent = Agent(self.name, server_type="Proxy Server")
- wildcards = self.get_wildcard_domains()
- agent.setup_wildcard_hosts(wildcards)
-
def _setup_server(self):
agent_password = self.get_password("agent_password")
agent_repository_url = self.get_agent_repository_url()
@@ -72,23 +102,17 @@ def _setup_server(self):
"TLS Certificate", {"wildcard": True, "domain": self.domain}, "name"
)
certificate = frappe.get_doc("TLS Certificate", certificate_name)
- monitoring_password = frappe.get_doc("Cluster", self.cluster).get_password(
- "monitoring_password"
- )
+ monitoring_password = frappe.get_doc("Cluster", self.cluster).get_password("monitoring_password")
log_server = frappe.db.get_single_value("Press Settings", "log_server")
if log_server:
- kibana_password = frappe.get_doc("Log Server", log_server).get_password(
- "kibana_password"
- )
+ kibana_password = frappe.get_doc("Log Server", log_server).get_password("kibana_password")
else:
kibana_password = None
try:
ansible = Ansible(
- playbook="self_hosted_proxy.yml"
- if getattr(self, "is_self_hosted", False)
- else "proxy.yml",
+ playbook="self_hosted_proxy.yml" if getattr(self, "is_self_hosted", False) else "proxy.yml",
server=self,
user=self.ssh_user or "root",
port=self.ssh_port or 22,
@@ -104,6 +128,7 @@ def _setup_server(self):
"certificate_private_key": certificate.private_key,
"certificate_full_chain": certificate.full_chain,
"certificate_intermediate_chain": certificate.intermediate_chain,
+ "press_url": frappe.utils.get_url(),
},
)
play = ansible.run()
@@ -119,9 +144,7 @@ def _setup_server(self):
self.save()
def _install_exporters(self):
- monitoring_password = frappe.get_doc("Cluster", self.cluster).get_password(
- "monitoring_password"
- )
+ monitoring_password = frappe.get_doc("Cluster", self.cluster).get_password("monitoring_password")
try:
ansible = Ansible(
playbook="proxy_exporters.yml",
@@ -139,9 +162,7 @@ def _install_exporters(self):
@frappe.whitelist()
def setup_ssh_proxy(self):
- frappe.enqueue_doc(
- self.doctype, self.name, "_setup_ssh_proxy", queue="long", timeout=1200
- )
+ frappe.enqueue_doc(self.doctype, self.name, "_setup_ssh_proxy", queue="long", timeout=1200)
def _setup_ssh_proxy(self):
settings = frappe.db.get_value(
@@ -174,17 +195,26 @@ def _setup_ssh_proxy(self):
@frappe.whitelist()
def setup_fail2ban(self):
- self.status = "Installing"
- self.save()
frappe.enqueue_doc(
- self.doctype, self.name, "_setup_fail2ban", queue="long", timeout=1200
+ self.doctype,
+ self.name,
+ "_setup_fail2ban",
+ queue="long",
+ timeout=1200,
)
+ self.status = "Installing"
+ self.save()
def _setup_fail2ban(self):
try:
ansible = Ansible(
playbook="fail2ban.yml",
server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables={
+ "ignore_ips": fail2ban.ignore_ips(),
+ },
)
play = ansible.run()
self.reload()
@@ -198,22 +228,56 @@ def _setup_fail2ban(self):
self.save()
@frappe.whitelist()
- def setup_proxysql(self):
+ def remove_fail2ban(self):
frappe.enqueue_doc(
- self.doctype, self.name, "_setup_proxysql", queue="long", timeout=1200
+ self.doctype,
+ self.name,
+ "_remove_fail2ban",
+ queue="long",
+ timeout=1200,
)
+ self.status = "Installing"
+ self.save()
+
+ def _remove_fail2ban(self):
+ try:
+ ansible = Ansible(
+ playbook="fail2ban_remove.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ )
+ play = ansible.run()
+ self.reload()
+ if play.status == "Success":
+ self.status = "Active"
+ else:
+ self.status = "Broken"
+ except Exception:
+ self.status = "Broken"
+ log_error("Fail2ban Setup Exception", server=self.as_dict())
+ self.save()
+
+ @frappe.whitelist()
+ def setup_proxysql(self):
+ frappe.enqueue_doc(self.doctype, self.name, "_setup_proxysql", queue="long", timeout=1200)
def _setup_proxysql(self):
try:
- default_hostgroup = frappe.get_all(
- "Database Server",
- "MIN(server_id)",
- {"status": "Active", "cluster": self.cluster},
- as_list=True,
- )[0][0]
+ default_hostgroup = (
+ frappe.get_all(
+ "Database Server",
+ "MIN(server_id)",
+ {"status": "Active", "cluster": self.cluster},
+ as_list=True,
+ )[0][0]
+ or 0
+ )
ansible = Ansible(
playbook="proxysql.yml",
server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
variables={
"server": self.name,
"proxysql_admin_password": self.get_password("proxysql_admin_password"),
@@ -225,6 +289,9 @@ def _setup_proxysql(self):
self.reload()
self.is_proxysql_setup = True
self.save()
+ if self.provider == "DigitalOcean":
+ # To adjust docker permissions
+ self.reboot()
except Exception:
log_error("ProxySQL Setup Exception", server=self.as_dict())
@@ -232,9 +299,7 @@ def _setup_proxysql(self):
def setup_replication(self):
self.status = "Installing"
self.save()
- frappe.enqueue_doc(
- self.doctype, self.name, "_setup_replication", queue="long", timeout=1200
- )
+ frappe.enqueue_doc(self.doctype, self.name, "_setup_replication", queue="long", timeout=1200)
def _setup_replication(self):
self._setup_secondary()
@@ -251,6 +316,8 @@ def _setup_primary(self, secondary):
ansible = Ansible(
playbook="primary_proxy.yml",
server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
variables={"secondary_private_ip": secondary_private_ip},
)
play = ansible.run()
@@ -265,14 +332,13 @@ def _setup_primary(self, secondary):
self.save()
def _setup_secondary(self):
- primary_public_key = frappe.db.get_value(
- "Proxy Server", self.primary, "frappe_public_key"
- )
try:
ansible = Ansible(
playbook="secondary_proxy.yml",
server=self,
- variables={"primary_public_key": primary_public_key},
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables={"primary_public_key": self.get_primary_frappe_public_key()},
)
play = ansible.run()
self.reload()
@@ -292,76 +358,117 @@ def trigger_failover(self):
return
self.status = "Installing"
self.save()
- frappe.enqueue_doc(
- self.doctype, self.name, "_trigger_failover", queue="long", timeout=1200
+ frappe.enqueue_doc(self.doctype, self.name, "_trigger_failover", queue="long", timeout=3600)
+
+ def stop_primary(self):
+ primary = frappe.get_doc("Proxy Server", self.primary)
+ try:
+ ansible = Ansible(
+ playbook="failover_prepare_primary_proxy.yml",
+ server=primary,
+ user=primary._ssh_user(),
+ port=primary._ssh_port(),
+ )
+ ansible.run()
+ except Exception:
+ pass # may be unreachable
+
+ def forward_jobs_to_secondary(self):
+ frappe.db.set_value(
+ "Agent Job",
+ {"server": self.primary, "status": "Undelivered"},
+ "server",
+ self.name,
+ )
+
+ def move_wildcard_domains_from_primary(self):
+ frappe.db.set_value(
+ "Proxy Server Domain",
+ {"parent": self.primary},
+ "parent",
+ self.name,
+ )
+
+ def remove_primarys_access(self):
+ ansible = Ansible(
+ playbook="failover_remove_primary_access.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables={
+ "primary_public_key": frappe.db.get_value("Proxy Server", self.primary, "frappe_public_key")
+ },
+ )
+ ansible.run()
+
+ def up_secondary(self):
+ ansible = Ansible(
+ playbook="failover_up_secondary_proxy.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ )
+ ansible.run()
+
+ def update_dns_records_for_all_sites(self):
+ from itertools import groupby
+
+ servers = frappe.get_all("Server", {"proxy_server": self.primary}, pluck="name")
+ sites_domains = frappe.get_all(
+ "Site",
+ {"status": ("!=", "Archived"), "server": ("in", servers)},
+ ["name", "domain"],
+ order_by="domain",
)
+ for domain_name, sites in groupby(sites_domains, lambda x: x["domain"]):
+ domain: RootDomain = frappe.get_doc("Root Domain", domain_name)
+ domain.update_dns_records_for_sites([site.name for site in sites], self.name)
def _trigger_failover(self):
try:
- self.update_dns_record()
- self.reload_nginx()
+ self.update_dns_records_for_all_sites()
+ self.stop_primary()
+ self.remove_primarys_access()
+ self.forward_jobs_to_secondary()
+ self.up_secondary()
self.update_app_servers()
+ self.move_wildcard_domains_from_primary()
self.switch_primary()
+ self.add_ssh_users_for_existing_benches()
except Exception:
self.status = "Broken"
- log_error("Proxy Server Failover Exception", server=self.as_dict())
+ log_error("Proxy Server Failover Exception", doc=self)
self.save()
- def update_dns_record(self):
- try:
- domain = frappe.get_doc("Root Domain", self.domain)
- client = boto3.client(
- "route53",
- aws_access_key_id=domain.aws_access_key_id,
- aws_secret_access_key=domain.get_password("aws_secret_access_key"),
- )
- zones = client.list_hosted_zones_by_name()["HostedZones"]
- # list_hosted_zones_by_name returns a lexicographically ordered list of zones
- # i.e. x.example.com comes after example.com
- # Name field has a trailing dot
- hosted_zone = find(reversed(zones), lambda x: domain.name.endswith(x["Name"][:-1]))[
- "Id"
- ]
- client.change_resource_record_sets(
- ChangeBatch={
- "Changes": [
- {
- "Action": "UPSERT",
- "ResourceRecordSet": {
- "Name": self.primary,
- "Type": "CNAME",
- "TTL": 3600,
- "ResourceRecords": [{"Value": self.name}],
- },
- }
- ]
- },
- HostedZoneId=hosted_zone,
- )
- except Exception:
- self.status = "Broken"
- log_error("Route 53 Record Update Error", domain=domain.name, server=self.name)
-
- def reload_nginx(self):
- agent = Agent(self.name, server_type="Proxy Server")
- agent.restart_nginx()
+ def add_ssh_users_for_existing_benches(self):
+ benches = frappe.qb.DocType("Bench")
+ servers = frappe.qb.DocType("Server")
+ active_benches = (
+ frappe.qb.from_(benches)
+ .join(servers)
+ .on(servers.name == benches.server)
+ .select(benches.name)
+ .where(servers.proxy_server == self.primary)
+ .where(benches.status == "Active")
+ .run(as_dict=True)
+ )
+ for bench_name in active_benches:
+ bench: "Bench" = frappe.get_doc("Bench", bench_name)
+ bench.add_ssh_user()
def update_app_servers(self):
- frappe.db.set_value(
- "Server", {"proxy_server": self.primary}, "proxy_server", self.name
- )
+ frappe.db.set_value("Server", {"proxy_server": self.primary}, "proxy_server", self.name)
def switch_primary(self):
- primary = frappe.get_doc("Proxy Server", self.primary)
+ frappe.db.set_value("Proxy Server", self.primary, "is_primary", False)
self.is_primary = True
- primary.is_primary = False
- primary.save()
+ self.is_replication_setup = False
+ self.primary = None
+ self.status = "Active"
@frappe.whitelist()
def setup_proxysql_monitor(self):
- frappe.enqueue_doc(
- self.doctype, self.name, "_setup_proxysql_monitor", queue="long", timeout=1200
- )
+ frappe.enqueue_doc(self.doctype, self.name, "_setup_proxysql_monitor", queue="long", timeout=1200)
def _setup_proxysql_monitor(self):
try:
@@ -374,6 +481,8 @@ def _setup_proxysql_monitor(self):
ansible = Ansible(
playbook="proxysql_monitor.yml",
server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
variables={
"server": self.name,
"proxysql_admin_password": self.get_password("proxysql_admin_password"),
@@ -384,12 +493,96 @@ def _setup_proxysql_monitor(self):
except Exception:
log_error("ProxySQL Monitor Setup Exception", server=self.as_dict())
+ @frappe.whitelist()
+ def setup_wireguard(self):
+ if not self.private_ip_interface_id:
+ play = frappe.get_last_doc("Ansible Play", {"play": "Ping Server", "server": self.name})
+ task = frappe.get_doc("Ansible Task", {"play": play.name, "task": "Gather Facts"})
+ import json
+
+ task_res = json.loads(task.result)["ansible_facts"]
+ for i in task_res["interfaces"]:
+ if task_res[i]["ipv4"]["address"] == self.private_ip:
+ self.private_ip_interface_id = task_res[i]["device"]
+ self.save()
+ frappe.enqueue_doc(self.doctype, self.name, "_setup_wireguard", queue="long", timeout=1200)
+
+ def _setup_wireguard(self):
+ try:
+ ansible = Ansible(
+ playbook="wireguard.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables={
+ "server": self.name,
+ "wireguard_port": self.wireguard_port,
+ "wireguard_network": self.wireguard_network_ip
+ + "/"
+ + self.wireguard_network.split("/")[1],
+ "interface_id": self.private_ip_interface_id,
+ "wireguard_private_key": False,
+ "wireguard_public_key": False,
+ "peers": "",
+ "reload_wireguard": bool(self.is_wireguard_setup),
+ },
+ )
+ play = ansible.run()
+ if play.status == "Success":
+ self.reload()
+ self.is_wireguard_setup = True
+ if not self.wireguard_private_key and not self.wireguard_public_key:
+ self.wireguard_private_key = frappe.get_doc(
+ "Ansible Task", {"play": play.name, "task": "Generate Wireguard Private Key"}
+ ).output
+ self.wireguard_public_key = frappe.get_doc(
+ "Ansible Task", {"play": play.name, "task": "Generate Wireguard Public Key"}
+ ).output
+ self.save()
+ except Exception:
+ log_error("Wireguard Setup Exception", server=self.as_dict())
+
+ @frappe.whitelist()
+ def reload_wireguard(self):
+ frappe.enqueue_doc("Proxy Server", self.name, "_reload_wireguard", queue="default", timeout=1200)
+
+ def _reload_wireguard(self):
+ import json
+
+ peers = frappe.get_list(
+ "Wireguard Peer",
+ filters={"upstream_proxy": self.name, "status": "Active"},
+ fields=["peer_name as name", "public_key", "ip as peer_ip", "allowed_ips"],
+ order_by="creation asc",
+ )
+ try:
+ ansible = Ansible(
+ playbook="reload_wireguard.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables={
+ "server": self.name,
+ "wireguard_port": self.wireguard_port,
+ "wireguard_network": self.wireguard_network_ip
+ + "/"
+ + self.wireguard_network.split("/")[1],
+ "interface_id": self.private_ip_interface_id,
+ "wireguard_private_key": self.get_password("wireguard_private_key"),
+ "wireguard_public_key": self.get_password("wireguard_public_key"),
+ "peers": json.dumps(peers),
+ },
+ )
+ ansible.run()
+ except Exception:
+ log_error("Wireguard Setup Exception", server=self.as_dict())
+
def process_update_nginx_job_update(job):
proxy_server = frappe.get_doc("Proxy Server", job.server)
if job.status == "Success":
proxy_server.status = "Active"
- elif job.status in ["Failure", "Undelivered"]:
+ elif job.status in ["Failure", "Undelivered", "Delivery Failure"]:
proxy_server.status = "Broken"
elif job.status in ["Pending", "Running"]:
proxy_server.status = "Installing"
diff --git a/press/press/doctype/proxy_server/test_proxy_server.py b/press/press/doctype/proxy_server/test_proxy_server.py
index 5fa4a60357d..552a98ec6f7 100644
--- a/press/press/doctype/proxy_server/test_proxy_server.py
+++ b/press/press/doctype/proxy_server/test_proxy_server.py
@@ -1,30 +1,37 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
+from __future__ import annotations
-import unittest
-from typing import Dict, List
from unittest.mock import Mock, patch
import frappe
from frappe.model.naming import make_autoname
+from frappe.tests.utils import FrappeTestCase
+from moto import mock_aws
+from press.press.doctype.agent_job.test_agent_job import fake_agent_job
from press.press.doctype.press_settings.test_press_settings import (
create_test_press_settings,
)
from press.press.doctype.proxy_server.proxy_server import ProxyServer
+from press.press.doctype.root_domain.root_domain import RootDomain
from press.press.doctype.server.server import BaseServer
+from press.press.doctype.virtual_machine.test_virtual_machine import create_test_virtual_machine
+from press.utils.test import foreground_enqueue_doc
@patch.object(BaseServer, "after_insert", new=Mock())
-@patch.object(ProxyServer, "validate", new=Mock())
+@patch.object(ProxyServer, "validate_domains", new=Mock())
def create_test_proxy_server(
hostname: str = "n",
domain: str = "fc.dev",
- domains: List[Dict[str, str]] = [{"domain": "fc.dev"}],
+ domains: list[dict[str, str]] | None = None,
cluster: str = "Default",
-):
+ is_primary: bool = True,
+) -> ProxyServer:
"""Create test Proxy Server doc"""
+ if domains is None:
+ domains = [{"domain": "fc.dev"}]
create_test_press_settings()
server = frappe.get_doc(
{
@@ -36,11 +43,53 @@ def create_test_proxy_server(
"cluster": cluster,
"domain": domain,
"domains": domains,
+ "is_primary": is_primary,
+ "virtual_machine": create_test_virtual_machine().name,
}
).insert(ignore_if_duplicate=True)
server.reload()
return server
-class TestProxyServer(unittest.TestCase):
- pass
+@patch(
+ "press.press.doctype.proxy_server.proxy_server.frappe.enqueue_doc",
+ foreground_enqueue_doc,
+)
+@patch("press.press.doctype.proxy_server.proxy_server.Ansible", new=Mock())
+class TestProxyServer(FrappeTestCase):
+ @fake_agent_job("Reload NGINX Job")
+ @mock_aws
+ @patch.object(
+ RootDomain,
+ "update_dns_records_for_sites",
+ wraps=RootDomain.update_dns_records_for_sites,
+ autospec=True,
+ )
+ def test_sites_dns_updated_on_failover(self, update_dns_records_for_sites):
+ from press.press.doctype.server.test_server import create_test_server
+ from press.press.doctype.site.test_site import create_test_site
+
+ proxy1 = create_test_proxy_server()
+ proxy2 = create_test_proxy_server(is_primary=False)
+
+ root_domain: RootDomain = frappe.get_doc("Root Domain", proxy1.domain)
+ root_domain.boto3_client.create_hosted_zone(
+ Name=proxy1.domain,
+ CallerReference="1",
+ HostedZoneConfig={"Comment": "Test", "PrivateZone": False},
+ )
+
+ server = create_test_server(proxy1.name)
+ site1 = create_test_site(server=server.name)
+ create_test_site() # another proxy; unrelated
+
+ proxy2.db_set("primary", proxy1.name)
+ proxy2.db_set("is_replication_setup", 1)
+ proxy2.trigger_failover()
+ update_dns_records_for_sites.assert_called_once_with(root_domain, [site1.name], proxy2.name)
+ proxy2.reload()
+ proxy1.reload()
+ self.assertTrue(proxy2.is_primary)
+ self.assertFalse(proxy1.is_primary)
+ self.assertEqual(proxy2.status, "Active")
+ self.assertEqual(proxy1.status, "Active")
diff --git a/press/press/doctype/proxy_server_domain/proxy_server_domain.json b/press/press/doctype/proxy_server_domain/proxy_server_domain.json
index 614a0bf43a9..1c1a22b103e 100644
--- a/press/press/doctype/proxy_server_domain/proxy_server_domain.json
+++ b/press/press/doctype/proxy_server_domain/proxy_server_domain.json
@@ -5,7 +5,8 @@
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
- "domain"
+ "domain",
+ "code_server"
],
"fields": [
{
@@ -16,12 +17,18 @@
"label": "Domain",
"options": "Root Domain",
"reqd": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "code_server",
+ "fieldtype": "Check",
+ "label": "Code Server"
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
- "modified": "2021-03-24 10:21:00.192032",
+ "modified": "2023-07-24 21:41:33.648896",
"modified_by": "Administrator",
"module": "Press",
"name": "Proxy Server Domain",
@@ -29,5 +36,6 @@
"permissions": [],
"sort_field": "modified",
"sort_order": "DESC",
+ "states": [],
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/proxy_server_domain/proxy_server_domain.py b/press/press/doctype/proxy_server_domain/proxy_server_domain.py
index 6ecb8df8ef6..706c8c1103c 100644
--- a/press/press/doctype/proxy_server_domain/proxy_server_domain.py
+++ b/press/press/doctype/proxy_server_domain/proxy_server_domain.py
@@ -8,4 +8,19 @@
class ProxyServerDomain(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ code_server: DF.Check
+ domain: DF.Link
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/razorpay_payment_record/razorpay_payment_record.json b/press/press/doctype/razorpay_payment_record/razorpay_payment_record.json
index 16396ff97a5..386745209e6 100644
--- a/press/press/doctype/razorpay_payment_record/razorpay_payment_record.json
+++ b/press/press/doctype/razorpay_payment_record/razorpay_payment_record.json
@@ -10,6 +10,7 @@
"payment_id",
"order_id",
"signature",
+ "type",
"status",
"failure_reason"
],
@@ -57,11 +58,18 @@
"fieldtype": "Small Text",
"label": "Failure Reason",
"read_only": 1
+ },
+ {
+ "default": "Prepaid Credits",
+ "fieldname": "type",
+ "fieldtype": "Select",
+ "label": "Type",
+ "options": "Prepaid Credits\nPartnership Fee"
}
],
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2022-01-20 16:34:15.352515",
+ "modified": "2025-01-05 22:06:31.980472",
"modified_by": "Administrator",
"module": "Press",
"name": "Razorpay Payment Record",
@@ -82,5 +90,6 @@
],
"sort_field": "modified",
"sort_order": "DESC",
+ "states": [],
"title_field": "order_id"
}
\ No newline at end of file
diff --git a/press/press/doctype/razorpay_payment_record/razorpay_payment_record.py b/press/press/doctype/razorpay_payment_record/razorpay_payment_record.py
index 0c575533195..10711e967db 100644
--- a/press/press/doctype/razorpay_payment_record/razorpay_payment_record.py
+++ b/press/press/doctype/razorpay_payment_record/razorpay_payment_record.py
@@ -1,29 +1,56 @@
# Copyright (c) 2022, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
+
+from datetime import datetime, timedelta
import frappe
+from frappe.model.document import Document
-from datetime import datetime, timedelta
+from press.press.doctype.team.team import _enqueue_finalize_unpaid_invoices_for_team
from press.utils import log_error
-from frappe.model.document import Document
from press.utils.billing import get_razorpay_client
-from press.press.doctype.team.team import enqueue_finalize_unpaid_for_team
class RazorpayPaymentRecord(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ failure_reason: DF.SmallText | None
+ order_id: DF.Data | None
+ payment_id: DF.Data | None
+ signature: DF.Data | None
+ status: DF.Literal["Captured", "Failed", "Pending"]
+ team: DF.Link | None
+ type: DF.Literal["Prepaid Credits", "Partnership Fee"]
+ # end: auto-generated types
+
def on_update(self):
if self.has_value_changed("status") and self.status == "Captured":
- self.process_prepaid_credits()
+ if self.type == "Prepaid Credits":
+ self.process_prepaid_credits()
+ elif self.type == "Partnership Fee":
+ self.process_partnership_fee()
def process_prepaid_credits(self):
team = frappe.get_doc("Team", self.team)
client = get_razorpay_client()
payment = client.payment.fetch(self.payment_id)
- amount = payment["amount"] / 100
+ amount_with_tax = payment["amount"] / 100
+ gst = float(payment["notes"].get("gst", 0))
+ amount = amount_with_tax - gst
balance_transaction = team.allocate_credit_amount(
- amount, source="Prepaid Credits", remark=f"Razorpay: {self.payment_id}"
+ amount,
+ source="Prepaid Credits",
+ remark=f"Razorpay: {self.payment_id}",
)
+ team.reload()
# Add a field to track razorpay event
invoice = frappe.get_doc(
@@ -32,8 +59,11 @@ def process_prepaid_credits(self):
type="Prepaid Credits",
status="Paid",
due_date=datetime.fromtimestamp(payment["created_at"]),
- amount_paid=amount,
+ total=amount,
amount_due=amount,
+ gst=gst or 0,
+ amount_due_with_tax=amount_with_tax,
+ amount_paid=amount_with_tax,
razorpay_order_id=self.order_id,
razorpay_payment_record=self.name,
razorpay_payment_method=payment["method"],
@@ -54,7 +84,55 @@ def process_prepaid_credits(self):
invoice.update_razorpay_transaction_details(payment)
invoice.submit()
- enqueue_finalize_unpaid_for_team(team.name)
+ _enqueue_finalize_unpaid_invoices_for_team(team.name)
+
+ def process_partnership_fee(self):
+ team = frappe.get_doc("Team", self.team)
+
+ client = get_razorpay_client()
+ payment = client.payment.fetch(self.payment_id)
+ amount_with_tax = payment["amount"] / 100
+ gst = float(payment["notes"].get("gst", 0))
+ amount = amount_with_tax - gst
+ balance_transaction = team.allocate_credit_amount(
+ amount,
+ source="Prepaid Credits",
+ remark=f"Razorpay: {self.payment_id}",
+ type="Partnership Fee",
+ )
+ team.reload()
+
+ # Add a field to track razorpay event
+ invoice = frappe.get_doc(
+ doctype="Invoice",
+ team=team.name,
+ type="Partnership Fees",
+ status="Paid",
+ due_date=datetime.fromtimestamp(payment["created_at"]),
+ total=amount,
+ amount_due=amount,
+ gst=gst or 0,
+ amount_due_with_tax=amount_with_tax,
+ amount_paid=amount_with_tax,
+ razorpay_order_id=self.order_id,
+ razorpay_payment_record=self.name,
+ razorpay_payment_method=payment["method"],
+ )
+ invoice.append(
+ "items",
+ {
+ "description": "Partnership Fee",
+ "document_type": "Balance Transaction",
+ "document_name": balance_transaction.name,
+ "quantity": 1,
+ "rate": amount,
+ },
+ )
+ invoice.insert()
+ invoice.reload()
+
+ invoice.update_razorpay_transaction_details(payment)
+ invoice.submit()
@frappe.whitelist()
def sync(self):
@@ -77,9 +155,8 @@ def sync(self):
log_error(title="Failed to sync Razorpay Payment Record", order_id=self.order_id)
-def fetch_pending_payment_orders():
-
- past_12hrs_ago = datetime.now() - timedelta(hours=12)
+def fetch_pending_payment_orders(hours=12):
+ past_12hrs_ago = datetime.now() - timedelta(hours=hours)
pending_orders = frappe.get_all(
"Razorpay Payment Record",
dict(status="Pending", creation=(">=", past_12hrs_ago)),
@@ -91,7 +168,6 @@ def fetch_pending_payment_orders():
return
for order_id in pending_orders:
-
try:
response = client.order.payments(order_id)
for item in response.get("items"):
diff --git a/press/press/doctype/razorpay_payment_record/test_razorpay_payment_record.py b/press/press/doctype/razorpay_payment_record/test_razorpay_payment_record.py
index 438e4bacfd3..3d38f2b55f7 100644
--- a/press/press/doctype/razorpay_payment_record/test_razorpay_payment_record.py
+++ b/press/press/doctype/razorpay_payment_record/test_razorpay_payment_record.py
@@ -2,8 +2,8 @@
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestRazorpayPaymentRecord(unittest.TestCase):
+class TestRazorpayPaymentRecord(FrappeTestCase):
pass
diff --git a/press/press/doctype/razorpay_webhook_log/razorpay_webhook_log.py b/press/press/doctype/razorpay_webhook_log/razorpay_webhook_log.py
index f9c21c1e534..b7ce05c58fd 100644
--- a/press/press/doctype/razorpay_webhook_log/razorpay_webhook_log.py
+++ b/press/press/doctype/razorpay_webhook_log/razorpay_webhook_log.py
@@ -1,25 +1,97 @@
# Copyright (c) 2022, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
import frappe
+from frappe.model.document import Document
from press.utils import log_error
-from frappe.model.document import Document
from press.utils.billing import get_razorpay_client
class RazorpayWebhookLog(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ event: DF.Data | None
+ payload: DF.Code | None
+ payment_id: DF.Data | None
+ # end: auto-generated types
+
def after_insert(self):
payment_record = frappe.get_doc("Razorpay Payment Record", {"order_id": self.name})
- if (
- self.event in ("order.paid", "payment.captured")
- and payment_record.status != "Captured"
- ):
+ if self.event in ("order.paid", "payment.captured") and payment_record.status != "Captured":
payment_record.update({"payment_id": self.payment_id, "status": "Captured"})
payment_record.save(ignore_permissions=True)
+@frappe.whitelist(allow_guest=True)
+def razorpay_authorized_payment_handler():
+ client = get_razorpay_client()
+ form_dict = frappe.local.form_dict
+
+ payment_id = None
+ try:
+ payload = frappe.request.get_data()
+ signature = frappe.get_request_header("X-Razorpay-Signature")
+ webhook_secret = frappe.db.get_single_value("Press Settings", "razorpay_webhook_secret")
+ entity_data = form_dict["payload"]["payment"]["entity"]
+
+ client.utility.verify_webhook_signature(payload.decode(), signature, webhook_secret)
+ if entity_data["status"] != "authorized":
+ raise Exception("invalid payment status received")
+ payment_id = entity_data.get("id")
+ order_id = entity_data.get("order_id", "")
+ amount = entity_data.get("amount")
+ notes = entity_data.get("notes")
+
+ if not order_id:
+ return
+
+ razorpay_payment_record = frappe.db.exists("Razorpay Payment Record", {"order_id": order_id})
+ if not razorpay_payment_record:
+ # Don't log error if its not FrappeCloud order
+ # Example of valid notes
+ # "notes": {
+ # "Description": "Order for Frappe Cloud Prepaid Credits",
+ # "Team (Frappe Cloud ID)": "test@example.com"
+ # "gst": 245
+ # },
+
+ if notes and notes.get("description"):
+ log_error(
+ "Razorpay payment record for given order does not exist",
+ order_id=order_id,
+ )
+ return
+
+ # Only capture payment, if the status of order id is pending
+ if frappe.db.get_value("Razorpay Payment Record", razorpay_payment_record, "status") != "Pending":
+ return
+
+ # Capture the authorized payment
+ client.payment.capture(payment_id, amount)
+ except Exception as e:
+ error_message = str(e)
+ if (
+ "payment has already been captured" in error_message
+ or "the order is already paid" in error_message
+ or "id provided does not exist" in error_message
+ ):
+ return
+ log_error(
+ title="Razorpay Authorized Payment Webhook Handler",
+ payment_id=payment_id,
+ )
+ raise Exception from e
+
+
@frappe.whitelist(allow_guest=True)
def razorpay_webhook_handler():
client = get_razorpay_client()
@@ -29,23 +101,36 @@ def razorpay_webhook_handler():
try:
payload = frappe.request.get_data()
signature = frappe.get_request_header("X-Razorpay-Signature")
- webhook_secret = frappe.db.get_single_value(
- "Press Settings", "razorpay_webhook_secret"
- )
+ webhook_secret = frappe.db.get_single_value("Press Settings", "razorpay_webhook_secret")
client.utility.verify_webhook_signature(payload.decode(), signature, webhook_secret)
# set user to Administrator, to not have to do ignore_permissions everywhere
frappe.set_user("Administrator")
- razorpay_order_id = form_dict["payload"]["payment"]["entity"]["order_id"]
- if not frappe.db.exists(
- "Razorpay Payment Record",
- {"order_id": razorpay_order_id},
- ):
- log_error(
- "Razorpay payment record for given order does not exist", order_id=razorpay_order_id
- )
+ entity_data = form_dict["payload"]["payment"]["entity"]
+ razorpay_order_id = entity_data.get("order_id")
+
+ if not razorpay_order_id:
+ return
+
+ razorpay_payment_record = frappe.db.exists("Razorpay Payment Record", {"order_id": razorpay_order_id})
+
+ notes = form_dict["payload"]["payment"]["entity"]["notes"]
+ if not razorpay_payment_record:
+ # Don't log error if its not FrappeCloud order
+ # Example of valid notes
+ # "notes": {
+ # "Description": "Order for Frappe Cloud Prepaid Credits",
+ # "Team (Frappe Cloud ID)": "test@example.com",
+ # "gst": 245
+ # },
+
+ if notes and notes.get("description"):
+ log_error(
+ "Razorpay payment record for given order does not exist",
+ order_id=razorpay_order_id,
+ )
return
frappe.get_doc(
@@ -58,11 +143,11 @@ def razorpay_webhook_handler():
}
).insert(ignore_if_duplicate=True)
- except Exception:
+ except Exception as e:
frappe.db.rollback()
log_error(
title="Razorpay Webhook Handler",
payment_id=form_dict["payload"]["payment"]["entity"]["id"],
)
frappe.set_user(current_user)
- raise Exception
+ raise Exception from e
diff --git a/press/press/doctype/razorpay_webhook_log/test_razorpay_webhook_log.py b/press/press/doctype/razorpay_webhook_log/test_razorpay_webhook_log.py
index d6b12840d87..ff65e6674db 100644
--- a/press/press/doctype/razorpay_webhook_log/test_razorpay_webhook_log.py
+++ b/press/press/doctype/razorpay_webhook_log/test_razorpay_webhook_log.py
@@ -2,8 +2,8 @@
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestRazorpayWebhookLog(unittest.TestCase):
+class TestRazorpayWebhookLog(FrappeTestCase):
pass
diff --git a/press/press/doctype/region/region.py b/press/press/doctype/region/region.py
index 959eff41f3d..ad99f134908 100644
--- a/press/press/doctype/region/region.py
+++ b/press/press/doctype/region/region.py
@@ -7,4 +7,16 @@
class Region(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ last_allocated_to: DF.Link | None
+ region_name: DF.Data
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/region/test_region.py b/press/press/doctype/region/test_region.py
index c30d737fbf4..e6d863c5ab0 100644
--- a/press/press/doctype/region/test_region.py
+++ b/press/press/doctype/region/test_region.py
@@ -1,10 +1,9 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe and Contributors
# See license.txt
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestRegion(unittest.TestCase):
+class TestRegion(FrappeTestCase):
pass
diff --git a/press/press/doctype/registry_server/registry_server.js b/press/press/doctype/registry_server/registry_server.js
index e3931cf8249..dcaff487fce 100644
--- a/press/press/doctype/registry_server/registry_server.js
+++ b/press/press/doctype/registry_server/registry_server.js
@@ -8,6 +8,25 @@ frappe.ui.form.on('Registry Server', {
[__('Ping Ansible Unprepared'), 'ping_ansible_unprepared', true],
[__('Prepare Server'), 'prepare_server', true, !frm.doc.is_server_setup],
[__('Setup Server'), 'setup_server', true, !frm.doc.is_server_setup],
+ [
+ __('Rewrite Config'),
+ 'rewrite_config',
+ true,
+ frm.doc.is_server_setup && frm.doc.is_mirror,
+ ],
+ [
+ __('Show Registry Password'),
+ 'show_registry_password',
+ false,
+ frm.doc.is_server_Setup,
+ ],
+ [__('Create Mirror'), 'create_registry_mirror', true, !frm.doc.is_mirror],
+ [
+ __('Update TLS Certificate'),
+ 'update_tls_certificate',
+ true,
+ frm.doc.is_server_setup,
+ ],
[
__('Fetch Keys'),
'fetch_keys',
@@ -16,7 +35,10 @@ frappe.ui.form.on('Registry Server', {
(!frm.doc.frappe_public_key || !frm.doc.root_public_key),
],
].forEach(([label, method, confirm, condition]) => {
- if (typeof condition === 'undefined' || condition) {
+ if (
+ typeof condition === 'undefined' ||
+ (condition && method != 'create_registry_mirror')
+ ) {
frm.add_custom_button(
label,
() => {
@@ -45,6 +67,74 @@ frappe.ui.form.on('Registry Server', {
__('Actions'),
);
}
+ if (method == 'create_registry_mirror') {
+ frm.add_custom_button(
+ label,
+ () => {
+ frappe.prompt(
+ [
+ {
+ fieldtype: 'Data',
+ label: 'Hostname',
+ fieldname: 'hostname',
+ reqd: 1,
+ },
+ {
+ fieldtype: 'Data',
+ label: 'Mount Point',
+ fieldname: 'docker_data_mountpoint',
+ reqd: 1,
+ },
+ {
+ fieldtype: 'Data',
+ label: 'Container Registry Config Path',
+ fieldname: 'container_registry_config_path',
+ reqd: 1,
+ },
+ {
+ fieldtype: 'Data',
+ label: 'Public IP',
+ fieldname: 'public_ip',
+ reqd: 1,
+ },
+ {
+ fieldtype: 'Data',
+ label: 'Private IP',
+ fieldname: 'private_ip',
+ reqd: 1,
+ },
+ {
+ fieldtype: 'Data',
+ label: 'Proxy Pass',
+ fieldname: 'proxy_pass',
+ reqd: 1,
+ },
+ ],
+ ({
+ hostname,
+ docker_data_mountpoint,
+ container_registry_config_path,
+ public_ip,
+ private_ip,
+ proxy_pass,
+ }) => {
+ frm
+ .call(method, {
+ hostname,
+ docker_data_mountpoint,
+ container_registry_config_path,
+ public_ip,
+ private_ip,
+ proxy_pass,
+ })
+ .then((r) => frm.refresh());
+ },
+ __('Create Mirror Registry'),
+ );
+ },
+ __('Actions'),
+ );
+ }
});
},
});
diff --git a/press/press/doctype/registry_server/registry_server.json b/press/press/doctype/registry_server/registry_server.json
index b2fa8ee4736..c12ac8967a0 100644
--- a/press/press/doctype/registry_server/registry_server.json
+++ b/press/press/doctype/registry_server/registry_server.json
@@ -8,10 +8,13 @@
"status",
"hostname",
"domain",
+ "tls_certificate_renewal_failed",
+ "plan",
"column_break_4",
"provider",
"virtual_machine",
"is_server_setup",
+ "is_mirror",
"networking_section",
"ip",
"column_break_9",
@@ -22,15 +25,24 @@
"agent_password",
"registry_section",
"registry_username",
+ "docker_data_mountpoint",
"column_break_10",
"registry_password",
+ "container_registry_config_path",
"ssh_section",
+ "ssh_user",
+ "ssh_port",
"frappe_user_password",
"frappe_public_key",
"column_break_20",
"root_public_key",
"monitoring_section",
- "monitoring_password"
+ "monitoring_password",
+ "storage_section",
+ "region_endpoint",
+ "region",
+ "bucket_name",
+ "proxy_pass"
],
"fields": [
{
@@ -134,7 +146,7 @@
"fieldname": "provider",
"fieldtype": "Select",
"label": "Provider",
- "options": "Generic\nScaleway\nAWS EC2",
+ "options": "Generic\nScaleway\nAWS EC2\nOCI",
"set_only_once": 1
},
{
@@ -196,6 +208,71 @@
"label": "Virtual Machine",
"mandatory_depends_on": "eval:doc.provider === \"AWS EC2\"",
"options": "Virtual Machine"
+ },
+ {
+ "fieldname": "ssh_user",
+ "fieldtype": "Data",
+ "label": "SSH User"
+ },
+ {
+ "default": "22",
+ "fieldname": "ssh_port",
+ "fieldtype": "Int",
+ "label": "SSH Port"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_mirror",
+ "fieldtype": "Check",
+ "label": "Is Mirror"
+ },
+ {
+ "fieldname": "docker_data_mountpoint",
+ "fieldtype": "Data",
+ "label": "Docker Data Mountpoint"
+ },
+ {
+ "fieldname": "container_registry_config_path",
+ "fieldtype": "Data",
+ "label": "Container Registry Config Path"
+ },
+ {
+ "default": "0",
+ "fieldname": "tls_certificate_renewal_failed",
+ "fieldtype": "Check",
+ "label": "TLS Certificate Renewal Failed",
+ "read_only": 1
+ },
+ {
+ "fieldname": "storage_section",
+ "fieldtype": "Section Break",
+ "label": "Storage"
+ },
+ {
+ "fieldname": "region_endpoint",
+ "fieldtype": "Data",
+ "label": "Region Endpoint"
+ },
+ {
+ "fieldname": "region",
+ "fieldtype": "Data",
+ "label": "Region"
+ },
+ {
+ "fieldname": "bucket_name",
+ "fieldtype": "Data",
+ "label": "Bucket Name"
+ },
+ {
+ "fieldname": "proxy_pass",
+ "fieldtype": "Data",
+ "label": "Proxy Pass"
+ },
+ {
+ "fieldname": "plan",
+ "fieldtype": "Link",
+ "label": "Plan",
+ "options": "Server Plan"
}
],
"links": [
@@ -204,7 +281,7 @@
"link_fieldname": "server"
}
],
- "modified": "2022-06-16 22:27:50.409392",
+ "modified": "2025-11-22 15:50:08.907789",
"modified_by": "Administrator",
"module": "Press",
"name": "Registry Server",
@@ -223,8 +300,9 @@
"write": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/registry_server/registry_server.py b/press/press/doctype/registry_server/registry_server.py
index e2de6f99620..9e71dca1979 100644
--- a/press/press/doctype/registry_server/registry_server.py
+++ b/press/press/doctype/registry_server/registry_server.py
@@ -1,15 +1,57 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
+
+import os
import frappe
+from frappe.frappeclient import FrappeClient
+
from press.press.doctype.server.server import BaseServer
from press.runner import Ansible
from press.utils import log_error
class RegistryServer(BaseServer):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ agent_password: DF.Password | None
+ bucket_name: DF.Data | None
+ container_registry_config_path: DF.Data | None
+ docker_data_mountpoint: DF.Data | None
+ domain: DF.Link | None
+ frappe_public_key: DF.Code | None
+ frappe_user_password: DF.Password | None
+ hostname: DF.Data
+ ip: DF.Data
+ is_mirror: DF.Check
+ is_server_setup: DF.Check
+ monitoring_password: DF.Password | None
+ plan: DF.Link | None
+ private_ip: DF.Data
+ private_mac_address: DF.Data | None
+ private_vlan_id: DF.Data | None
+ provider: DF.Literal["Generic", "Scaleway", "AWS EC2", "OCI"]
+ proxy_pass: DF.Data | None
+ region: DF.Data | None
+ region_endpoint: DF.Data | None
+ registry_password: DF.Password | None
+ registry_username: DF.Data | None
+ root_public_key: DF.Code | None
+ ssh_port: DF.Int
+ ssh_user: DF.Data | None
+ status: DF.Literal["Pending", "Installing", "Active", "Broken", "Archived"]
+ tls_certificate_renewal_failed: DF.Check
+ virtual_machine: DF.Link | None
+ # end: auto-generated types
+
def validate(self):
self.validate_agent_password()
self.validate_registry_username()
@@ -29,6 +71,7 @@ def validate_monitoring_password(self):
self.monitoring_password = frappe.generate_hash()
def _setup_server(self):
+ settings = frappe.get_cached_doc("Press Settings")
agent_password = self.get_password("agent_password")
agent_repository_url = self.get_agent_repository_url()
monitoring_password = self.get_password("monitoring_password")
@@ -36,24 +79,39 @@ def _setup_server(self):
"TLS Certificate", {"wildcard": True, "domain": self.domain}, "name"
)
certificate = frappe.get_doc("TLS Certificate", certificate_name)
+ access_key = settings.docker_s3_access_key
+ secret_key = settings.get_password("docker_s3_secret_key")
+ variables = {
+ "server": self.name,
+ "workers": 1,
+ "domain": self.domain,
+ "agent_password": agent_password,
+ "agent_repository_url": agent_repository_url,
+ "monitoring_password": monitoring_password,
+ "private_ip": self.private_ip,
+ "registry_username": self.registry_username,
+ "registry_password": self.get_password("registry_password"),
+ "certificate_private_key": certificate.private_key,
+ "certificate_full_chain": certificate.full_chain,
+ "is_mirror": self.is_mirror,
+ "docker_data_mountpoint": self.docker_data_mountpoint,
+ "certificate_intermediate_chain": certificate.intermediate_chain,
+ "container_registry_config_path": self.container_registry_config_path,
+ "registry_url": f"https://{self.name}",
+ "access_key": access_key,
+ "secret_key": secret_key,
+ "region_endpoint": self.region_endpoint,
+ "region": self.region,
+ "bucket_name": self.bucket_name,
+ "proxy_pass": self.proxy_pass,
+ }
try:
ansible = Ansible(
playbook="registry.yml",
server=self,
- variables={
- "server": self.name,
- "workers": 1,
- "domain": self.domain,
- "agent_password": agent_password,
- "agent_repository_url": agent_repository_url,
- "monitoring_password": monitoring_password,
- "private_ip": self.private_ip,
- "registry_username": self.registry_username,
- "registry_password": self.get_password("registry_password"),
- "certificate_private_key": certificate.private_key,
- "certificate_full_chain": certificate.full_chain,
- "certificate_intermediate_chain": certificate.intermediate_chain,
- },
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables=variables,
)
play = ansible.run()
self.reload()
@@ -65,4 +123,178 @@ def _setup_server(self):
except Exception:
self.status = "Broken"
log_error("Registry Server Setup Exception", server=self.as_dict())
+
self.save()
+
+ def prune_mirror_registry(self):
+ """Clear out the docker system for the mirror registry"""
+ if not self.is_mirror:
+ return
+
+ frappe.enqueue_doc(self.doctype, self.name, "_prune_mirror_registry", queue="long", timeout=3600)
+
+ def _prune_mirror_registry(self):
+ try:
+ ansible = Ansible(
+ playbook="prune_mirror_registry.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables={
+ "mountpoint": os.path.join(
+ self.docker_data_mountpoint, "docker", "registry", "v2", "blobs"
+ ),
+ "registry_container": "registry-registry-1",
+ },
+ )
+ ansible.run()
+ except Exception:
+ log_error("Mirror Registry Prune Failed", server=self.as_dict())
+
+ @frappe.whitelist()
+ def show_registry_password(self):
+ """Show registry password"""
+ frappe.msgprint(self.get_password("registry_password"))
+
+ @frappe.whitelist()
+ def create_registry_mirror(
+ self,
+ hostname: str,
+ docker_data_mountpoint: str,
+ container_registry_config_path: str,
+ public_ip: str,
+ private_ip: str,
+ proxy_pass: str,
+ ):
+ """Create a registry mirror"""
+ registry: RegistryServer = frappe.get_doc(
+ {
+ "doctype": "Registry Server",
+ "ip": public_ip,
+ "private_ip": private_ip,
+ "docker_data_mountpoint": docker_data_mountpoint,
+ "container_registry_config_path": container_registry_config_path,
+ "hostname": hostname,
+ "is_mirror": True,
+ "provider": "Generic",
+ "registry_username": self.registry_username,
+ "registry_password": self.get_password("registry_password"),
+ "proxy_pass": proxy_pass,
+ }
+ )
+ registry.insert()
+
+ def _rewrite_config(self):
+ try:
+ ansible = Ansible(
+ playbook="rewrite_registry_config.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables={
+ "is_mirror": self.is_mirror,
+ "proxy_pass": self.proxy_pass,
+ "docker_data_mountpoint": self.docker_data_mountpoint,
+ "container_registry_config_path": self.container_registry_config_path,
+ "registry_username": self.registry_username,
+ "registry_password": self.get_password("registry_password"),
+ },
+ )
+ ansible.run()
+ except Exception as e:
+ log_error("Error during mirror config rewrite", e)
+
+ @frappe.whitelist()
+ def rewrite_config(self):
+ """Rewrite mirror's config"""
+ if not self.is_mirror:
+ frappe.throw("Config can not be update for the hub registry")
+
+ frappe.enqueue_doc(self.doctype, self.name, "_rewrite_config")
+
+
+def delete_old_images_from_registry(): # noqa: C901
+ """Purge registry of older images"""
+ settings = frappe.get_doc("Press Settings", None)
+ registry = settings.docker_registry_url
+
+ requests = FrappeClient(registry).session
+
+ headers = {"Accept": "application/vnd.docker.distribution.manifest.v2+json"}
+ auth = (settings.docker_registry_username, settings.docker_registry_password)
+
+ # Traverse all pages
+ last = None
+ while True:
+ params = {"last": last} if last else {}
+ response = requests.get(f"https://{registry}/v2/_catalog", auth=auth, headers=headers, params=params)
+
+ if not response.ok:
+ return
+
+ repositories = response.json()["repositories"]
+ if not repositories:
+ break
+ last = repositories[-1]
+
+ for repository in repositories:
+ try:
+ # Skip non-bench images
+ if not frappe.db.exists("Release Group", repository.split("/")[-1]):
+ continue
+ tags = (
+ requests.get(f"https://{registry}/v2/{repository}/tags/list", auth=auth, headers=headers)
+ .json()
+ .get("tags", [])
+ or []
+ )
+ tags = sorted(tags)
+ for tag in tags:
+ if tag.startswith("deploy-"):
+ deploy_candidate = tag
+ else:
+ deploy_candidate = frappe.db.get_value(
+ "Deploy Candidate Build", tag, "deploy_candidate"
+ )
+
+ if not deploy_candidate:
+ in_use = False
+ else:
+ in_use = frappe.db.get_all(
+ "Bench",
+ ["count(*) as count"],
+ {"status": "Active", "candidate": deploy_candidate},
+ )[0].count
+
+ if not in_use:
+ digest = requests.head(
+ f"https://{registry}/v2/{repository}/manifests/{tag}", auth=auth, headers=headers
+ ).headers["Docker-Content-Digest"]
+ should_delete = False
+
+ # Delete all except the most recent candidates
+ if tags.index(tag) < len(tags) - 1:
+ should_delete = True
+ else:
+ # For most recent candidate delete the image if
+ # 1. It hasn't been in use for sometime OR
+ # 2. The Release Group is disabled
+
+ enabled = frappe.db.get_value(
+ "Release Group", repository.split("/")[-1], "enabled"
+ )
+ created = frappe.db.get_value("Deploy Candidate", deploy_candidate, "creation")
+ if not enabled or created < frappe.utils.add_days(None, -7):
+ # log(["DELETING", repository, tag, tags.index(tag) == len(tags) - 1, not enabled, created])
+ should_delete = True
+
+ if should_delete:
+ # DELETE the image
+ requests.delete(
+ f"https://{registry}/v2/{repository}/manifests/{digest}",
+ auth=auth,
+ headers=headers,
+ )
+
+ except Exception:
+ pass
diff --git a/press/press/doctype/registry_server/test_registry_server.py b/press/press/doctype/registry_server/test_registry_server.py
index 3ada7d61433..2e170d6b18f 100644
--- a/press/press/doctype/registry_server/test_registry_server.py
+++ b/press/press/doctype/registry_server/test_registry_server.py
@@ -1,11 +1,10 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2021, Frappe and Contributors
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestRegistryServer(unittest.TestCase):
+class TestRegistryServer(FrappeTestCase):
pass
diff --git a/press/press/doctype/release_group/release_group.js b/press/press/doctype/release_group/release_group.js
index 9cce2627dd3..cf4256e4389 100644
--- a/press/press/doctype/release_group/release_group.js
+++ b/press/press/doctype/release_group/release_group.js
@@ -4,7 +4,7 @@
frappe.ui.form.on('Release Group', {
refresh: function (frm) {
frm.add_web_link(
- `/dashboard/benches/${frm.doc.name}`,
+ `/dashboard/groups/${frm.doc.name}`,
__('Visit Dashboard'),
);
[
@@ -13,6 +13,7 @@ frappe.ui.form.on('Release Group', {
__('Create Duplicate Deploy Candidate'),
'create_duplicate_deploy_candidate',
],
+ [__('Update Benches Configuration'), 'update_benches_config'],
].forEach(([label, method]) => {
frm.add_custom_button(
label,
@@ -72,16 +73,28 @@ frappe.ui.form.on('Release Group', {
options: 'Server',
reqd: 1,
},
+ {
+ fieldtype: 'Check',
+ fieldname: 'force_new_build',
+ label: 'Force New Build',
+ reqd: 0,
+ },
],
- primary_action({ server }) {
- frm.call('add_server', { server, deploy: true }).then((r) => {
- if (!r.exc) {
- frappe.show_alert(
- `Added ${server} and deployed last successful candidate`,
- );
- }
- d.hide();
- });
+ primary_action({ server, force_new_build }) {
+ frm
+ .call('add_server', {
+ server,
+ deploy: true,
+ force_new_build: force_new_build,
+ })
+ .then((r) => {
+ if (!r.exc) {
+ frappe.show_alert(
+ `Added ${server} and deployed last successful candidate`,
+ );
+ }
+ d.hide();
+ });
},
});
d.show();
@@ -91,10 +104,4 @@ frappe.ui.form.on('Release Group', {
frm.set_df_property('dependencies', 'cannot_add_rows', 1);
},
- version: function (frm) {
- if (frm.is_new()) {
- frm.clear_table('dependencies');
- frm.call('validate_dependencies');
- }
- },
});
diff --git a/press/press/doctype/release_group/release_group.json b/press/press/doctype/release_group/release_group.json
index d82f213b5b2..1db103d38b1 100644
--- a/press/press/doctype/release_group/release_group.json
+++ b/press/press/doctype/release_group/release_group.json
@@ -17,7 +17,10 @@
"central_bench",
"section_break_7",
"servers",
+ "section_break_keov",
+ "build_server",
"apps_tab",
+ "check_dependent_apps",
"apps",
"config_tab",
"bench_configuration_section",
@@ -26,22 +29,38 @@
"common_site_config",
"column_break_14",
"bench_config",
+ "gunicorn_threads_per_worker",
+ "redis_cache_size",
+ "redis_password",
+ "automatic_worker_allocation_section",
+ "min_gunicorn_workers",
+ "min_background_workers",
+ "column_break_njfg",
+ "max_gunicorn_workers",
+ "max_background_workers",
"dependencies_tab",
"dependencies",
+ "last_dependency_update",
"packages",
"environment_variables",
+ "mounts",
"feature_flags_tab",
"is_redisearch_enabled",
"is_push_to_deploy_enabled",
+ "use_app_cache",
+ "compress_app_cache",
+ "use_delta_builds",
"column_break_9efq",
"merge_all_rq_queues",
"merge_default_and_short_rq_queues",
+ "use_rq_workerpool",
"saas_tab",
"saas_bench",
"column_break_26",
"saas_app",
"miscellaneous_tab",
- "tags"
+ "tags",
+ "is_code_server_enabled"
],
"fields": [
{
@@ -54,6 +73,7 @@
},
{
"default": "0",
+ "description": "Will be deployed on all public servers",
"fieldname": "public",
"fieldtype": "Check",
"in_list_view": 1,
@@ -73,7 +93,8 @@
"in_standard_filter": 1,
"label": "Team",
"options": "Team",
- "reqd": 1
+ "reqd": 1,
+ "search_index": 1
},
{
"fieldname": "version",
@@ -215,7 +236,8 @@
},
{
"fieldname": "column_break_9efq",
- "fieldtype": "Column Break"
+ "fieldtype": "Column Break",
+ "label": "RQ Worker Config"
},
{
"default": "0",
@@ -246,15 +268,131 @@
"label": "Common Site Config"
},
{
- "default": "{}",
"fieldname": "common_site_config_table",
"fieldtype": "Table",
"label": "Configuration",
"options": "Common Site Config"
+ },
+ {
+ "fieldname": "last_dependency_update",
+ "fieldtype": "Datetime",
+ "label": "Last Dependency Update",
+ "read_only": 1
+ },
+ {
+ "fieldname": "max_gunicorn_workers",
+ "fieldtype": "Int",
+ "label": "Max Gunicorn Workers",
+ "non_negative": 1
+ },
+ {
+ "fieldname": "max_background_workers",
+ "fieldtype": "Int",
+ "label": "Max Background Workers",
+ "non_negative": 1
+ },
+ {
+ "fieldname": "automatic_worker_allocation_section",
+ "fieldtype": "Section Break",
+ "label": "Automatic Worker Allocation"
+ },
+ {
+ "fieldname": "min_gunicorn_workers",
+ "fieldtype": "Int",
+ "label": "Min Gunicorn Workers",
+ "non_negative": 1
+ },
+ {
+ "fieldname": "min_background_workers",
+ "fieldtype": "Int",
+ "label": "Min Background Workers",
+ "non_negative": 1
+ },
+ {
+ "fieldname": "column_break_njfg",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "description": "Setting this to non-zero value will set Gunicorn worker class to gthread.",
+ "fieldname": "gunicorn_threads_per_worker",
+ "fieldtype": "Int",
+ "label": "Gunicorn Threads Per Worker",
+ "non_negative": 1
+ },
+ {
+ "default": "0",
+ "fetch_from": "team.is_code_server_user",
+ "fieldname": "is_code_server_enabled",
+ "fieldtype": "Check",
+ "label": "Is Code Server Enabled",
+ "read_only": 1
+ },
+ {
+ "fieldname": "mounts",
+ "fieldtype": "Table",
+ "label": "Mounts",
+ "options": "Release Group Mount"
+ },
+ {
+ "default": "0",
+ "fieldname": "use_rq_workerpool",
+ "fieldtype": "Check",
+ "label": "Use RQ WorkerPool"
+ },
+ {
+ "default": "0",
+ "description": "Uses Bench get-app cache for faster image builds. Can be used only if Bench version is 5.22.1 or later.",
+ "fieldname": "use_app_cache",
+ "fieldtype": "Check",
+ "label": "Use App Cache"
+ },
+ {
+ "default": "0",
+ "depends_on": "eval: doc.use_app_cache",
+ "description": "Use Gzip to compress bench get-app artifacts before caching.",
+ "fieldname": "compress_app_cache",
+ "fieldtype": "Check",
+ "label": "Compress App Cache"
+ },
+ {
+ "fieldname": "section_break_keov",
+ "fieldtype": "Section Break"
+ },
+ {
+ "default": "0",
+ "description": "Quickens builds by fetching app changes without rebuilding app if app rebuild is not required.",
+ "fieldname": "use_delta_builds",
+ "fieldtype": "Check",
+ "label": "Use Delta Builds"
+ },
+ {
+ "fieldname": "build_server",
+ "fieldtype": "Link",
+ "label": "Build Server",
+ "options": "Server"
+ },
+ {
+ "default": "512",
+ "fieldname": "redis_cache_size",
+ "fieldtype": "Int",
+ "label": "Redis Cache Size (MB)"
+ },
+ {
+ "default": "0",
+ "fieldname": "check_dependent_apps",
+ "fieldtype": "Check",
+ "label": "Check Dependent Apps"
+ },
+ {
+ "fieldname": "redis_password",
+ "fieldtype": "Password",
+ "label": "Redis Password",
+ "read_only": 1
}
],
"links": [],
- "modified": "2023-07-17 17:44:39.805124",
+ "modified": "2025-11-02 15:12:26.835798",
"modified_by": "Administrator",
"module": "Press",
"name": "Release Group",
@@ -286,9 +424,10 @@
"write": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"title_field": "title",
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/release_group/release_group.py b/press/press/doctype/release_group/release_group.py
index 75670306e25..28743a4bd67 100644
--- a/press/press/doctype/release_group/release_group.py
+++ b/press/press/doctype/release_group/release_group.py
@@ -1,48 +1,296 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
-import frappe
-import copy
import json
+from contextlib import suppress
+from functools import cached_property
+from itertools import chain
+from typing import TYPE_CHECKING, TypedDict
-from typing import List
-from frappe.core.utils import find
+import frappe
+import frappe.query_builder
+import semantic_version as sv
+from frappe import _
+from frappe.core.doctype.version.version import get_diff
+from frappe.core.utils import find, find_all
from frappe.model.document import Document
+from frappe.model.naming import append_number_if_name_exists
+from frappe.query_builder.functions import Count
+from frappe.utils import cstr, flt, get_url, sbool
+from frappe.utils.caching import redis_cache
+
+from press.access.actions import ReleaseGroupActions
+from press.access.decorators import action_guard
+from press.agent import Agent
+from press.api.client import dashboard_whitelist
+from press.exceptions import ImageNotFoundInRegistry, InsufficientSpaceOnServer, VolumeResizeLimitError
+from press.guards import role_guard
+from press.overrides import get_permission_query_conditions_for_doctype
+from press.press.doctype.app.app import new_app
+from press.press.doctype.app_source.app_source import AppSource, create_app_source
+from press.press.doctype.deploy_candidate.utils import is_suspended
+from press.press.doctype.deploy_candidate_build.deploy_candidate_build import create_platform_build_and_deploy
+from press.press.doctype.resource_tag.tag_helpers import TagHelpers
from press.press.doctype.server.server import Server
from press.utils import (
- get_last_doc,
+ fmt_timedelta,
get_app_tag,
+ get_client_blacklisted_keys,
get_current_team,
+ get_last_doc,
log_error,
- get_client_blacklisted_keys,
)
-from press.overrides import get_permission_query_conditions_for_doctype
-from press.press.doctype.app_source.app_source import AppSource, create_app_source
-from typing import TYPE_CHECKING
-from frappe.utils import cstr
if TYPE_CHECKING:
- from press.press.doctype.deploy_candidate.deploy_candidate import DeployCandidate
+ from datetime import datetime
+ from typing import Any
+
+ from press.press.doctype.user_ssh_key.user_ssh_key import UserSSHKey
DEFAULT_DEPENDENCIES = [
{"dependency": "NVM_VERSION", "version": "0.36.0"},
{"dependency": "NODE_VERSION", "version": "14.19.0"},
{"dependency": "PYTHON_VERSION", "version": "3.7"},
{"dependency": "WKHTMLTOPDF_VERSION", "version": "0.12.5"},
- {"dependency": "BENCH_VERSION", "version": "5.15.2"},
+ {"dependency": "BENCH_VERSION", "version": "5.25.1"},
+ {"dependency": "PIP_VERSION", "version": "25.2"},
]
+SUPPORTED_WKHTMLTOPDF_VERSIONS = ["0.12.5", "0.12.6"]
+
+
+class LastDeployInfo(TypedDict):
+ name: str
+ status: str
+ creation: datetime
+
+
+if TYPE_CHECKING:
+ from press.press.doctype.app.app import App
+ from press.press.doctype.bench.bench import Bench
+ from press.press.doctype.deploy_candidate.deploy_candidate import DeployCandidate
+ from press.press.doctype.deploy_candidate_build.deploy_candidate_build import DeployCandidateBuild
+
+
+class ReleaseGroup(Document, TagHelpers):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.common_site_config.common_site_config import CommonSiteConfig
+ from press.press.doctype.release_group_app.release_group_app import ReleaseGroupApp
+ from press.press.doctype.release_group_dependency.release_group_dependency import (
+ ReleaseGroupDependency,
+ )
+ from press.press.doctype.release_group_mount.release_group_mount import ReleaseGroupMount
+ from press.press.doctype.release_group_package.release_group_package import ReleaseGroupPackage
+ from press.press.doctype.release_group_server.release_group_server import ReleaseGroupServer
+ from press.press.doctype.release_group_variable.release_group_variable import ReleaseGroupVariable
+ from press.press.doctype.resource_tag.resource_tag import ResourceTag
+
+ apps: DF.Table[ReleaseGroupApp]
+ bench_config: DF.Code | None
+ build_server: DF.Link | None
+ central_bench: DF.Check
+ check_dependent_apps: DF.Check
+ common_site_config: DF.Code | None
+ common_site_config_table: DF.Table[CommonSiteConfig]
+ compress_app_cache: DF.Check
+ default: DF.Check
+ dependencies: DF.Table[ReleaseGroupDependency]
+ enabled: DF.Check
+ environment_variables: DF.Table[ReleaseGroupVariable]
+ gunicorn_threads_per_worker: DF.Int
+ is_code_server_enabled: DF.Check
+ is_push_to_deploy_enabled: DF.Check
+ is_redisearch_enabled: DF.Check
+ last_dependency_update: DF.Datetime | None
+ max_background_workers: DF.Int
+ max_gunicorn_workers: DF.Int
+ merge_all_rq_queues: DF.Check
+ merge_default_and_short_rq_queues: DF.Check
+ min_background_workers: DF.Int
+ min_gunicorn_workers: DF.Int
+ mounts: DF.Table[ReleaseGroupMount]
+ packages: DF.Table[ReleaseGroupPackage]
+ public: DF.Check
+ redis_cache_size: DF.Int
+ redis_password: DF.Password | None
+ saas_app: DF.Link | None
+ saas_bench: DF.Check
+ servers: DF.Table[ReleaseGroupServer]
+ tags: DF.Table[ResourceTag]
+ team: DF.Link
+ title: DF.Data
+ use_app_cache: DF.Check
+ use_delta_builds: DF.Check
+ use_rq_workerpool: DF.Check
+ version: DF.Link
+ # end: auto-generated types
+
+ dashboard_fields = ("title", "version", "apps", "team", "public", "tags")
+
+ @staticmethod
+ def get_list_query(query, filters, **list_args):
+ ReleaseGroupServer = frappe.qb.DocType("Release Group Server")
+ ReleaseGroup = frappe.qb.DocType("Release Group")
+ Bench = frappe.qb.DocType("Bench")
+ Site = frappe.qb.DocType("Site")
+
+ site_count = (
+ frappe.qb.from_(Site)
+ .select(frappe.query_builder.functions.Count("*"))
+ .where(Site.group == ReleaseGroup.name)
+ .where(Site.status != "Archived")
+ )
+
+ active_benches = (
+ frappe.qb.from_(Bench)
+ .select(frappe.query_builder.functions.Count("*"))
+ .where(Bench.group == ReleaseGroup.name)
+ .where(Bench.status == "Active")
+ )
+
+ query = (
+ query.where(ReleaseGroup.team == frappe.local.team().name)
+ .where(ReleaseGroup.enabled == 1)
+ .where(ReleaseGroup.public == 0)
+ .select(site_count.as_("site_count"), active_benches.as_("active_benches"))
+ )
+
+ if server := filters.get("server"):
+ query = (
+ query.inner_join(ReleaseGroupServer)
+ .on(ReleaseGroupServer.parent == ReleaseGroup.name)
+ .where(ReleaseGroupServer.server == server)
+ )
+
+ return query
+
+ def get_doc(self, doc):
+ doc.deploy_information = self.deploy_information()
+ doc.status = self.status
+ doc.actions = self.get_actions()
+ doc.are_builds_suspended = are_builds_suspended()
+ doc.eol_versions = frappe.db.get_all(
+ "Frappe Version",
+ filters={"status": "End of Life"},
+ fields=["name"],
+ order_by="name desc",
+ pluck="name",
+ )
+
+ if len(self.servers) == 1:
+ server = frappe.db.get_value("Server", self.servers[0].server, ["team", "title"], as_dict=True)
+ doc.server = self.servers[0].server
+ doc.server_title = server.title
+ doc.server_team = server.team
+
+ doc.enable_inplace_updates = frappe.get_value(
+ "Team",
+ self.team,
+ "enable_inplace_updates",
+ )
+ if doc.enable_inplace_updates:
+ doc.inplace_update_failed_benches = self.get_inplace_update_failed_benches()
+
+ def get_inplace_update_failed_benches(self):
+ return frappe.db.get_all(
+ "Bench",
+ {"group": self.name, "status": "Active", "last_inplace_update_failed": True},
+ pluck="name",
+ )
+
+ def get_actions(self):
+ return [
+ {
+ "action": "Rename Bench Group",
+ "description": "Rename the bench group",
+ "button_label": "Rename",
+ "doc_method": "rename",
+ },
+ {
+ "action": "Transfer Bench Group",
+ "description": "Transfer ownership of this bench group to another team",
+ "button_label": "Transfer",
+ "doc_method": "send_change_team_request",
+ },
+ {
+ "action": "Drop Bench Group",
+ "description": "Drop the bench group",
+ "button_label": "Drop",
+ "doc_method": "drop",
+ "group": "Dangerous Actions",
+ },
+ ]
-class ReleaseGroup(Document):
+ @role_guard.action()
def validate(self):
self.validate_title()
self.validate_frappe_app()
self.validate_duplicate_app()
self.validate_app_versions()
self.validate_servers()
- self.validate_dependencies()
self.validate_rq_queues()
+ self.validate_max_min_workers()
+ self.validate_feature_flags()
+ self.validate_dependencies()
+ if self.check_dependent_apps:
+ self.validate_dependent_apps()
+ if not self.redis_password:
+ self.set_redis_password()
+
+ def set_redis_password(self):
+ self.redis_password = frappe.generate_hash(length=32)
+
+ def validate_dependent_apps(self):
+ required_repository_urls = set()
+ existing_repository_urls = set()
+
+ for app in self.apps:
+ app_source: AppSource = frappe.get_doc("App Source", app.source)
+ existing_repository_urls.add(
+ frappe.get_value("App Source", filters={"name": app.source}, fieldname=["repository_url"])
+ )
+
+ for required_app in app_source.required_apps:
+ required_repository_urls.add(required_app.repository_url)
+
+ missing_urls = required_repository_urls - existing_repository_urls
+ if missing_urls:
+ missing_app_source = frappe.db.get_values(
+ "App Source", filters={"repository_url": ("in", missing_urls)}, pluck="name"
+ )
+ frappe.throw(
+ f"""
+ Please add the following sources
+
+ {" ".join(missing_app_source) or " ".join(missing_urls)}
+
+ """
+ )
+
+ def before_insert(self):
+ # to avoid adding deps while cloning a release group
+ if len(self.dependencies) == 0:
+ self.fetch_dependencies()
+ self.set_default_app_cache_flags()
+ self.set_default_delta_builds_flags()
+ self.setup_default_feature_flags()
+
+ def on_update(self):
+ old_doc = self.get_doc_before_save()
+ if self.flags.in_insert or self.is_new() or not old_doc:
+ return
+ diff = get_diff(old_doc, self) or {}
+ for row in chain(diff.get("row_changed", []), diff.get("added", [])):
+ if row[0] == "dependencies":
+ self.db_set("last_dependency_update", frappe.utils.now_datetime())
+ break
def on_trash(self):
candidates = frappe.get_all("Deploy Candidate", {"group": self.name})
@@ -53,7 +301,7 @@ def before_save(self):
self.update_common_site_config_preview()
def update_common_site_config_preview(self):
- """Regenerates rg.common_site_config on each rg.befor_save
+ """Regenerates rg.common_site_config on each rg.before_save
from the rg.common_site_config child table data"""
new_config = {}
@@ -61,19 +309,12 @@ def update_common_site_config_preview(self):
# update internal flag from master
row.internal = frappe.db.get_value("Site Config Key", row.key, "internal")
key_type = row.type or row.get_type()
- if key_type == "Password":
- # we don't support password type yet!
- key_type = "String"
row.type = key_type
if key_type == "Number":
- key_value = (
- int(row.value) if isinstance(row.value, (float, int)) else json.loads(row.value)
- )
+ key_value = int(row.value) if isinstance(row.value, (float, int)) else json.loads(row.value)
elif key_type == "Boolean":
- key_value = (
- row.value if isinstance(row.value, bool) else bool(json.loads(cstr(row.value)))
- )
+ key_value = row.value if isinstance(row.value, bool) else bool(json.loads(cstr(row.value)))
elif key_type == "JSON":
key_value = json.loads(cstr(row.value))
else:
@@ -83,6 +324,73 @@ def update_common_site_config_preview(self):
self.common_site_config = json.dumps(new_config, indent=4)
+ @dashboard_whitelist()
+ def update_dependency(self, dependency_name, version, is_custom):
+ """Updates a dependency version in the Release Group Dependency table"""
+ for dependency in self.dependencies:
+ if dependency.name == dependency_name:
+ dependency.version = version
+ dependency.is_custom = is_custom
+ self.save()
+ return
+
+ @dashboard_whitelist()
+ def delete_config(self, key):
+ """Deletes a key from the common_site_config_table"""
+
+ if key in get_client_blacklisted_keys():
+ return
+
+ updated_common_site_config = []
+ for row in self.common_site_config_table:
+ if row.key != key and not row.internal:
+ updated_common_site_config.append({"key": row.key, "value": row.value, "type": row.type})
+
+ # using a tuple to avoid updating bench_config
+ # TODO: remove tuple when bench_config is removed and field for http_timeout is added
+ self.update_config_in_release_group(updated_common_site_config, ())
+
+ @dashboard_whitelist()
+ def update_config(self, config):
+ sanitized_common_site_config = [
+ {"key": c.key, "type": c.type, "value": c.value} for c in self.common_site_config_table
+ ]
+ sanitized_bench_config = []
+ bench_config_keys = ["http_timeout"]
+
+ config = frappe.parse_json(config)
+
+ for key, value in config.items():
+ if key in get_client_blacklisted_keys():
+ frappe.throw(_(f"The key
{key} is blacklisted or is internal and cannot be updated"))
+
+ config_type = get_config_type(value)
+
+ if frappe.db.exists("Site Config Key", key):
+ config_type = frappe.db.get_value("Site Config Key", key, "type")
+
+ value = get_formatted_config_value(
+ config_type,
+ value,
+ key,
+ self.name,
+ )
+
+ if key in bench_config_keys:
+ sanitized_bench_config.append({"key": key, "value": value, "type": config_type})
+
+ # update existing key
+ for row in sanitized_common_site_config:
+ if row["key"] == key:
+ row["value"] = value
+ row["type"] = config_type
+ break
+ else:
+ sanitized_common_site_config.append({"key": key, "value": value, "type": config_type})
+
+ self.update_config_in_release_group(sanitized_common_site_config, sanitized_bench_config)
+ self.update_benches_config()
+
def update_config_in_release_group(self, common_site_config, bench_config):
"""Updates bench_config and common_site_config in the Release Group
@@ -105,19 +413,46 @@ def update_config_in_release_group(self, common_site_config, bench_config):
value = json.dumps(d.value)
else:
value = d.value
- self.append(
- "common_site_config_table", {"key": d.key, "value": value, "type": d.type}
- )
+ self.append("common_site_config_table", {"key": d.key, "value": value, "type": d.type})
+ # redis_cache_size is a field on release group but we want to treat it as config key
+ # TODO: add another interface for updating similar values
+ if d["key"] == "redis_cache_size":
+ self.redis_cache_size = int(d.value)
for d in bench_config:
- if d.key == "http_timeout":
+ if d["key"] == "http_timeout":
# http_timeout should be the only thing configurable in bench_config
- self.bench_config = json.dumps({"http_timeout": int(d.value)}, indent=4)
+ self.bench_config = json.dumps({"http_timeout": int(d["value"])}, indent=4)
+
if bench_config == []:
self.bench_config = json.dumps({})
self.save()
+ @dashboard_whitelist()
+ def update_environment_variable(self, environment_variables: dict):
+ for key, value in environment_variables.items():
+ is_updated = False
+ for env_var in self.environment_variables:
+ if env_var.key == key:
+ if env_var.internal:
+ frappe.throw(f"Environment variable {env_var.key} is internal and cannot be updated")
+ else:
+ env_var.value = value
+ is_updated = True
+ if not is_updated:
+ self.append("environment_variables", {"key": key, "value": value, "internal": False})
+ self.save()
+
+ @dashboard_whitelist()
+ def delete_environment_variable(self, key):
+ updated_env_variables = []
+ for env_var in self.environment_variables:
+ if env_var.key != key or env_var.internal:
+ updated_env_variables.append(env_var)
+ self.environment_variables = updated_env_variables
+ self.save()
+
def validate_title(self):
if frappe.get_all(
"Release Group",
@@ -129,7 +464,10 @@ def validate_title(self):
},
limit=1,
):
- frappe.throw(f"Release Group {self.title} already exists.", frappe.ValidationError)
+ frappe.throw(
+ f"Bench Group of name {self.title} already exists. Please try another name.",
+ frappe.ValidationError,
+ )
def validate_frappe_app(self):
if self.apps[0].app != "frappe":
@@ -145,12 +483,21 @@ def validate_duplicate_app(self):
def validate_app_versions(self):
# App Source should be compatible with Release Group's version
+ with suppress(AttributeError, RuntimeError):
+ if (
+ not frappe.flags.in_test
+ and frappe.request.path == "/api/method/press.api.bench.change_branch"
+ ):
+ return # Separate validation exists in set_app_source
for app in self.apps:
- source = frappe.get_doc("App Source", app.source)
- if all(row.version != self.version for row in source.versions):
- frappe.throw(
- f"App Source {app.source} version is not {self.version}", frappe.ValidationError
- )
+ self.validate_app_version(app)
+
+ def validate_app_version(self, app: "ReleaseGroupApp"):
+ source = frappe.get_doc("App Source", app.source)
+ if all(row.version != self.version for row in source.versions):
+ branch, repo = frappe.db.get_values("App Source", app.source, ("branch", "repository"))[0]
+ msg = f"{repo.rsplit('/')[-1] or repo.rsplit('/')[-2]}:{branch} branch is no longer compatible with {self.version} version of Frappe"
+ frappe.throw(msg, frappe.ValidationError)
def validate_servers(self):
if self.servers:
@@ -162,26 +509,11 @@ def validate_servers(self):
if server_for_new_bench:
self.append("servers", {"server": server_for_new_bench})
- @frappe.whitelist()
- def validate_dependencies(self):
- # TODO: Move this to Frappe Version DocType
- dependencies = copy.deepcopy(DEFAULT_DEPENDENCIES)
- if self.version in ("Version 15", "Nightly"):
- python = find(dependencies, lambda x: x["dependency"] == "PYTHON_VERSION")
- python["version"] = "3.11"
- node = find(dependencies, lambda x: x["dependency"] == "NODE_VERSION")
- node["version"] = "18.16.0"
+ def fetch_dependencies(self):
+ frappe_version = frappe.get_doc("Frappe Version", self.version)
- if self.version == "Version 14":
- python = find(dependencies, lambda x: x["dependency"] == "PYTHON_VERSION")
- python["version"] = "3.10"
-
- if self.version == "Version 12":
- node = find(dependencies, lambda x: x["dependency"] == "NODE_VERSION")
- node["version"] = "12.19.0"
-
- if not hasattr(self, "dependencies") or not self.dependencies:
- self.extend("dependencies", dependencies)
+ for d in frappe_version.dependencies:
+ self.append("dependencies", {"dependency": d.dependency, "version": d.version})
def validate_rq_queues(self):
if self.merge_all_rq_queues and self.merge_default_and_short_rq_queues:
@@ -190,77 +522,222 @@ def validate_rq_queues(self):
frappe.ValidationError,
)
+ def validate_max_min_workers(self):
+ if (
+ self.max_gunicorn_workers
+ and self.min_gunicorn_workers
+ and self.max_gunicorn_workers < self.min_gunicorn_workers
+ ):
+ frappe.throw(
+ "Max Gunicorn Workers can't be less than Min Gunicorn Workers",
+ frappe.ValidationError,
+ )
+ if (
+ self.max_background_workers
+ and self.min_background_workers
+ and self.max_background_workers < self.min_background_workers
+ ):
+ frappe.throw(
+ "Max Background Workers can't be less than Min Background Workers",
+ frappe.ValidationError,
+ )
+
+ def validate_feature_flags(self) -> None:
+ if self.use_app_cache and not self.can_use_get_app_cache():
+ frappe.throw(_("Use App Cache cannot be set, BENCH_VERSION must be 5.22.1 or later"))
+
+ def _validate_dependency_format(self, dependency: str, version: str):
+ # Append patch version
+ if version.count(".") == 1:
+ version += ".0"
+
+ try:
+ sv.Version(version)
+ except ValueError as e:
+ frappe.throw(f"{dependency}: {e}")
+
+ def _validate_supported_wkhtmltopdf_version(self, version):
+ if version not in SUPPORTED_WKHTMLTOPDF_VERSIONS:
+ frappe.throw(
+ f"Unsupported wkhtmltopdf version {version}\n"
+ f"Supported versions: {', '.join(SUPPORTED_WKHTMLTOPDF_VERSIONS)}"
+ )
+
+ def validate_dependencies(self):
+ for dependency in self.dependencies:
+ self._validate_dependency_format(dependency.dependency, dependency.version)
+ if dependency.dependency == "WKHTMLTOPDF_VERSION":
+ self._validate_supported_wkhtmltopdf_version(dependency.version)
+
+ def can_use_get_app_cache(self) -> bool:
+ version = find(
+ self.dependencies,
+ lambda x: x.dependency == "BENCH_VERSION",
+ ).version
+
+ try:
+ return sv.Version(version) in sv.SimpleSpec(">=5.22.1")
+ except ValueError:
+ return False
+
+ def required_build_platforms(self) -> tuple[bool, bool]:
+ platforms = frappe.get_all(
+ "Server",
+ {"name": ("in", [server_ref.server for server_ref in self.servers])},
+ pluck="platform",
+ )
+ required_arm_build = "arm64" in platforms
+ required_intel_build = "x86_64" in platforms
+ return required_arm_build, required_intel_build
+
+ def get_redis_password(self) -> str:
+ """Get redis password create and update password if not present
+ Ignore validation while setting redis password to allow older RGs
+ to be password protected.
+ """
+ try:
+ return self.get_password("redis_password")
+ except (frappe.AuthenticationError, frappe.ValidationError):
+ self.redis_password = frappe.generate_hash(length=32)
+ self.flags.ignore_validate = 1
+ self._save_passwords()
+ self.save()
+ frappe.db.commit() # Safe password regardless
+ return self.get_password("redis_password")
+
@frappe.whitelist()
def create_duplicate_deploy_candidate(self):
- return self.create_deploy_candidate([app.as_dict() for app in self.apps])
+ return self.create_deploy_candidate([])
+
+ @dashboard_whitelist()
+ def redeploy(self):
+ dc = self.create_duplicate_deploy_candidate()
+ dc.schedule_build_and_deploy()
+
+ @dashboard_whitelist()
+ def initial_deploy(self):
+ dc = self.create_deploy_candidate()
+ dc.schedule_build_and_deploy()
+
+ def _try_server_size_increase_or_throw(self, server: Server, mountpoint: str, required_size: int):
+ """In case of low storage on the server try to either increase the storage (if allowed) or throw an error"""
+ if server.auto_increase_storage:
+ try:
+ server.calculated_increase_disk_size(mountpoint=mountpoint)
+ except VolumeResizeLimitError:
+ frappe.throw(
+ f"We are unable to increase server space right now for the deploy. Please wait "
+ f"{fmt_timedelta(server.time_to_wait_before_updating_volume)} before trying again.",
+ InsufficientSpaceOnServer,
+ )
+ else:
+ frappe.throw(
+ f"Not enough space on server {server.name} to create a new bench. {required_size}G is required.",
+ InsufficientSpaceOnServer,
+ )
- @frappe.whitelist()
- def create_deploy_candidate(self, apps_to_ignore=None) -> "DeployCandidate":
- if not self.enabled:
- return
+ @staticmethod
+ def _get_last_deployed_image_size(server: Server, last_deployed_bench: Bench) -> float | None:
+ """Try and fetch the last deployed image size"""
+ try:
+ return Agent(server.name).get(f"server/image-size/{last_deployed_bench.build}").get("size")
+ except Exception as e:
+ log_error("Failed to fetch last image size", data=e)
+ return None
- if apps_to_ignore is None:
- apps_to_ignore = []
+ def check_app_server_storage(self):
+ """
+ Check storage on the app server before deploying
+ Check if the free space on the server is more than the last
+ image deployed, assuming new image to be created will have the same or more
+ size than the last time.
+ """
+ for server in self.servers:
+ server: Server = frappe.get_cached_doc("Server", server.server)
- # Get the deploy information for apps
- # that have updates available
- apps_deploy_info = self.deploy_information().apps
+ if server.is_self_hosted:
+ continue
- app_updates = [
- app
- for app in apps_deploy_info
- if app["update_available"]
- and (not find(apps_to_ignore, lambda x: x["app"] == app["app"]))
- ]
+ mountpoint = server.guess_data_disk_mountpoint()
+ # If prometheus acts up
+ try:
+ free_space = server.free_space(mountpoint) / 1024**3
+ except Exception:
+ continue
- apps = []
- for update in app_updates:
- apps.append(
- {
- "release": update["next_release"],
- "source": update["source"],
- "app": update["app"],
- "hash": update["next_hash"],
- }
+ last_deployed_bench = get_last_doc("Bench", {"group": self.name, "status": "Active"})
+
+ if not last_deployed_bench:
+ continue
+
+ last_image_size = self._get_last_deployed_image_size(server, last_deployed_bench)
+
+ if last_image_size and (free_space < last_image_size):
+ self._try_server_size_increase_or_throw(
+ server, mountpoint, required_size=last_image_size - free_space
+ )
+
+ def check_auto_scales(self) -> None:
+ """Check for servers that are scaled up in the release group and throw if any"""
+ has_scaled_up_servers = frappe.db.get_value(
+ "Server",
+ {
+ "name": ("IN", [server.server for server in self.servers]),
+ "scaled_up": True,
+ },
+ )
+ if has_scaled_up_servers:
+ frappe.throw(
+ "Server(s) are scaled up currently and no deployment can run on them as of now."
+ "Please scale down all the server before deploying."
)
- # The apps that are in the release group
- # Not updated or ignored
- untouched_apps = [
- a for a in self.apps if not find(app_updates, lambda x: x["app"] == a.app)
- ]
- last_deployed_bench = get_last_doc("Bench", {"group": self.name, "status": "Active"})
+ has_running_auto_scales = frappe.db.get_value(
+ "Auto Scale Record",
+ {
+ "primary_server": ("IN", [server.server for server in self.servers]),
+ "status": ("IN", ["Running", "Pending"]),
+ },
+ )
- if last_deployed_bench:
- for app in untouched_apps:
- update = find(last_deployed_bench.apps, lambda x: x.app == app.app)
+ if has_running_auto_scales:
+ frappe.throw(
+ "Server(s) are triggered to auto scale and no deployment can run on them as of now."
+ "Please scale down all the server before deploying."
+ )
- if update:
- apps.append(
- {
- "release": update.release,
- "source": update.source,
- "app": update.app,
- "hash": update.hash,
- }
- )
+ @frappe.whitelist()
+ def create_deploy_candidate(
+ self,
+ apps_to_update=None,
+ run_will_fail_check=False,
+ ) -> "DeployCandidate | None":
+ if not self.enabled:
+ return None
- dependencies = [
- {"dependency": d.dependency, "version": d.version} for d in self.dependencies
- ]
+ self.check_app_server_storage()
+ self.check_auto_scales()
- packages = [
- {"package_manager": p.package_manager, "package": p.package} for p in self.packages
- ]
+ apps = self.get_apps_to_update(apps_to_update)
+ if apps_to_update is None:
+ self.validate_dc_apps_against_rg(apps)
- environment_variables = [
- {"key": v.key, "value": v.value} for v in self.environment_variables
- ]
+ dependencies = [{"dependency": d.dependency, "version": d.version} for d in self.dependencies]
- apps = self.get_sorted_based_on_rg_apps(apps)
+ packages = [
+ {
+ "package_manager": p.package_manager,
+ "package": p.package,
+ "package_prerequisites": p.package_prerequisites,
+ "after_install": p.after_install,
+ }
+ for p in self.packages
+ ]
+ environment_variables = [{"key": v.key, "value": v.value} for v in self.environment_variables]
+ requires_arm_build, requires_intel_build = self.required_build_platforms()
# Create and deploy the DC
- candidate = frappe.get_doc(
+ new_dc: "DeployCandidate" = frappe.get_doc(
{
"doctype": "Deploy Candidate",
"group": self.name,
@@ -268,10 +745,92 @@ def create_deploy_candidate(self, apps_to_ignore=None) -> "DeployCandidate":
"dependencies": dependencies,
"packages": packages,
"environment_variables": environment_variables,
+ "requires_arm_build": requires_arm_build,
+ "requires_intel_build": requires_intel_build,
+ "build_token": frappe.generate_hash(length=10),
}
- ).insert()
+ )
+
+ if run_will_fail_check:
+ from press.press.doctype.deploy_candidate.validations import (
+ check_if_update_will_fail,
+ )
+
+ check_if_update_will_fail(self, new_dc)
+
+ new_dc.insert()
+ return new_dc
+
+ def validate_dc_apps_against_rg(self, dc_apps) -> None:
+ app_map = {app["app"]: app for app in dc_apps}
+ not_found = []
+ for app in self.apps:
+ if app.app in app_map:
+ continue
+ not_found.append(app.app)
- return candidate
+ if not not_found:
+ return
+
+ msg = _("Following apps {0} not found. Potentially due to not approved App Releases.").format(
+ not_found
+ )
+ frappe.throw(msg)
+
+ def get_apps_to_update(self, apps_to_update):
+ # If apps_to_update is None, try to update all apps
+ if apps_to_update is None:
+ apps_to_update = self.apps
+
+ apps = []
+ last_deployed_bench = get_last_doc(
+ "Bench", {"group": self.name, "status": ("in", ("Active", "Installing", "Pending"))}
+ )
+
+ for app in self.deploy_information().apps:
+ app_to_update = find(apps_to_update, lambda x: x.get("app") == app.app)
+ # If we want to update the app and there's an update available
+ if app_to_update and app["update_available"]:
+ # Use a specific release if mentioned, otherwise pick the most recent one
+ target_release = app_to_update.get("release", app.next_release)
+ apps.append(
+ {
+ "app": app["app"],
+ "source": app["source"],
+ "release": target_release,
+ "hash": frappe.db.get_value("App Release", target_release, "hash"),
+ }
+ )
+ else:
+ # Find the last deployed release and use it, if no deployed bench is present use the rg apps
+ if last_deployed_bench:
+ app_to_keep = find(last_deployed_bench.apps, lambda x: x.app == app.app)
+ if app_to_keep:
+ apps.append(
+ {
+ "app": app_to_keep.app,
+ "source": app_to_keep.source,
+ "release": app_to_keep.release,
+ "hash": app_to_keep.hash,
+ }
+ )
+
+ else:
+ app_to_keep = find(self.apps, lambda x: x.app == app.app)
+ if app_to_keep:
+ app_release, app_hash = frappe.db.get_value(
+ "App Release", {"source": app_to_keep.source}, ["name", "hash"]
+ )
+ apps.append(
+ {
+ "app": app_to_keep.app,
+ "source": app_to_keep.source,
+ "release": app_release,
+ "hash": app_hash,
+ }
+ )
+
+ return self.get_sorted_based_on_rg_apps(apps)
def get_sorted_based_on_rg_apps(self, apps):
# Rearrange Apps to match release group ordering
@@ -291,50 +850,300 @@ def get_sorted_based_on_rg_apps(self, apps):
@frappe.whitelist()
def deploy_information(self):
out = frappe._dict(update_available=False)
-
- last_deployed_bench = get_last_doc("Bench", {"group": self.name, "status": "Active"})
- out.apps = self.get_app_updates(
- last_deployed_bench.apps if last_deployed_bench else []
+ last_deployed_bench = get_last_doc(
+ "Bench", {"group": self.name, "status": ("in", ("Active", "Installing", "Pending"))}
)
+ out.apps = self.get_app_updates(last_deployed_bench.apps if last_deployed_bench else [])
+ out.last_deploy = self.last_dc_info
+ out.deploy_in_progress = self.deploy_in_progress
+
out.removed_apps = self.get_removed_apps()
- out.update_available = any([app["update_available"] for app in out.apps]) or (
- len(out.removed_apps) > 0
+ out.update_available = (
+ any([app["update_available"] for app in out.apps])
+ or (len(out.removed_apps) > 0)
+ or self.dependency_update_pending
)
out.number_of_apps = len(self.apps)
- last_dc_info = self.get_last_deploy_candidate_info()
- out.last_deploy = last_dc_info
- out.deploy_in_progress = last_dc_info and last_dc_info.status == "Running"
out.sites = [
site.update({"skip_failing_patches": False, "skip_backups": False})
for site in frappe.get_all(
- "Site", {"group": self.name, "status": "Active"}, ["name", "server"]
+ "Site",
+ {"group": self.name, "status": ("in", ["Active", "Broken"])},
+ ["name", "server", "bench"],
)
]
return out
+ @dashboard_whitelist()
+ def deployed_versions(self):
+ Bench = frappe.qb.DocType("Bench")
+ Server = frappe.qb.DocType("Server")
+ deployed_versions = (
+ frappe.qb.from_(Bench)
+ .left_join(Server)
+ .on(Server.name == Bench.server)
+ .where((Bench.group == self.name) & (Bench.status != "Archived"))
+ .groupby(Bench.name)
+ .select(Bench.name, Bench.status, Bench.is_ssh_proxy_setup, Server.proxy_server)
+ .orderby(Bench.creation, order=frappe.qb.desc)
+ .run(as_dict=True)
+ )
+
+ rg_version = self.version
+
+ sites_in_group_details = frappe.db.get_all(
+ "Site",
+ filters={
+ "group": self.name,
+ "status": ("not in", ("Archived", "Suspended")),
+ "is_standby": 0,
+ },
+ fields=["name", "status", "cluster", "plan", "creation", "bench"],
+ )
+
+ if sites_in_group_details:
+ Cluster = frappe.qb.DocType("Cluster")
+ cluster_data = (
+ frappe.qb.from_(Cluster)
+ .select(Cluster.name, Cluster.title, Cluster.image)
+ .where(Cluster.name.isin([site.cluster for site in sites_in_group_details]))
+ .run(as_dict=True)
+ )
+
+ Plan = frappe.qb.DocType("Site Plan")
+ plan_data = (
+ frappe.qb.from_(Plan)
+ .select(Plan.name, Plan.plan_title, Plan.price_inr, Plan.price_usd)
+ .where(Plan.name.isin([site.plan for site in sites_in_group_details]))
+ .run(as_dict=True)
+ )
+
+ ResourceTag = frappe.qb.DocType("Resource Tag")
+ tag_data = (
+ frappe.qb.from_(ResourceTag)
+ .select(ResourceTag.tag_name, ResourceTag.parent)
+ .where(ResourceTag.parent.isin([site.name for site in sites_in_group_details]))
+ .run(as_dict=True)
+ )
+
+ cur_user_ssh_key = frappe.get_all(
+ "User SSH Key", {"user": frappe.session.user, "is_default": 1}, limit=1
+ )
+
+ benches = [dn.name for dn in deployed_versions]
+ benches_with_patches = frappe.get_all(
+ "App Patch",
+ fields=["bench"],
+ filters={"bench": ["in", benches], "status": "Applied"},
+ pluck="bench",
+ )
+
+ for version in deployed_versions:
+ version.has_app_patch_applied = version.name in benches_with_patches
+ version.has_ssh_access = version.is_ssh_proxy_setup and len(cur_user_ssh_key) > 0
+ version.sites = find_all(sites_in_group_details, lambda x: x.bench == version.name)
+ for site in version.sites:
+ site.version = rg_version
+ site.server_region_info = find(cluster_data, lambda x: x.name == site.cluster)
+ site.plan = find(plan_data, lambda x: x.name == site.plan)
+ tags = find_all(tag_data, lambda x: x.parent == site.name)
+ site.tags = [tag.tag_name for tag in tags]
+
+ version.deployed_on = frappe.db.get_value(
+ "Agent Job",
+ {"bench": version.name, "job_type": "New Bench", "status": "Success"},
+ "end",
+ )
+
+ return deployed_versions
+
+ @dashboard_whitelist()
+ def get_app_versions(self, bench):
+ apps = frappe.db.get_all(
+ "Bench App",
+ {"parent": bench},
+ ["name", "app", "hash", "source"],
+ order_by="idx",
+ )
+ for app in apps:
+ app.update(
+ frappe.db.get_value(
+ "App Source",
+ app.source,
+ ("branch", "repository", "repository_owner", "repository_url"),
+ as_dict=1,
+ cache=True,
+ )
+ )
+ app.tag = get_app_tag(app.repository, app.repository_owner, app.hash)
+ return apps
+
+ @dashboard_whitelist()
+ def send_change_team_request(self, team_mail_id: str, reason: str):
+ """Send email to team to accept bench transfer request"""
+
+ if self.team != get_current_team():
+ frappe.throw(
+ "You should belong to the team owning the bench to initiate a bench ownership transfer."
+ )
+
+ if not frappe.db.exists("Team", {"user": team_mail_id, "enabled": 1}):
+ frappe.throw("No Active Team record found.")
+
+ old_team = frappe.db.get_value("Team", self.team, "user")
+
+ if old_team == team_mail_id:
+ frappe.throw(f"Bench group is already owned by the team {team_mail_id}")
+
+ key = frappe.generate_hash("Release Group Transfer Link", 20)
+ frappe.get_doc(
+ {
+ "doctype": "Team Change",
+ "document_type": "Release Group",
+ "document_name": self.name,
+ "to_team": frappe.db.get_value("Team", {"user": team_mail_id, "enabled": 1}),
+ "from_team": self.team,
+ "reason": reason or "",
+ "key": key,
+ }
+ ).insert()
+
+ link = get_url(f"/api/method/press.api.bench.confirm_bench_transfer?key={key}")
+
+ if frappe.conf.developer_mode:
+ print(f"Bench transfer link for {team_mail_id}\n{link}\n")
+
+ frappe.sendmail(
+ recipients=team_mail_id,
+ subject="Transfer Bench Ownership Confirmation",
+ template="transfer_team_confirmation",
+ args={
+ "name": self.title or self.name,
+ "type": "bench",
+ "old_team": old_team,
+ "new_team": team_mail_id,
+ "transfer_url": link,
+ },
+ )
+
+ @dashboard_whitelist()
+ @action_guard(ReleaseGroupActions.SSHAccess)
+ def generate_certificate(self):
+ ssh_key = frappe.get_all(
+ "User SSH Key",
+ {"user": frappe.session.user, "is_default": True},
+ pluck="name",
+ limit=1,
+ )
+
+ if not ssh_key:
+ frappe.throw(_("Please set a SSH key to generate certificate"))
+
+ user_ssh_key: UserSSHKey = frappe.get_doc("User SSH Key", ssh_key[0])
+ user_ssh_key.validate() # user may have already added invalid ssh key. Validate again
+
+ return frappe.get_doc(
+ {
+ "doctype": "SSH Certificate",
+ "certificate_type": "User",
+ "group": self.name,
+ "user": frappe.session.user,
+ "user_ssh_key": ssh_key[0],
+ "validity": "6h",
+ }
+ ).insert()
+
+ @dashboard_whitelist()
+ def get_certificate(self):
+ user_ssh_key = frappe.db.get_all(
+ "User SSH Key", {"user": frappe.session.user, "is_default": True}, pluck="name"
+ )
+ if not len(user_ssh_key):
+ return False
+ certificates = frappe.db.get_all(
+ "SSH Certificate",
+ {
+ "user": frappe.session.user,
+ "valid_until": [">", frappe.utils.now()],
+ "group": self.name,
+ "user_ssh_key": user_ssh_key[0],
+ },
+ pluck="name",
+ limit=1,
+ )
+ if certificates:
+ return frappe.get_doc("SSH Certificate", certificates[0])
+ return False
+
+ @property
+ def dependency_update_pending(self):
+ if not self.last_dependency_update or not self.last_dc_info:
+ return False
+ return frappe.utils.get_datetime(self.last_dependency_update) > self.last_dc_info.creation
+
@property
def deploy_in_progress(self):
- last_dc_info = self.get_last_deploy_candidate_info()
- return last_dc_info and last_dc_info.status == "Running"
+ from press.press.doctype.bench.bench import TRANSITORY_STATES as BENCH_TRANSITORY
+ from press.press.doctype.deploy_candidate.deploy_candidate import (
+ TRANSITORY_STATES as DC_TRANSITORY,
+ )
+
+ if self.last_dc_info and self.last_dc_info.status in DC_TRANSITORY:
+ return True
+
+ if any(i["status"] in BENCH_TRANSITORY for i in self.last_benches_info):
+ return True
+
+ update_jobs = get_job_names(self.name, "Update Bench In Place", ["Pending", "Running"])
+ if len(update_jobs):
+ return True
+
+ return False
- def get_last_deploy_candidate_info(self):
- dc = frappe.qb.DocType("Deploy Candidate")
+ @property
+ def status(self):
+ active_benches = frappe.db.get_all(
+ "Bench", {"group": self.name, "status": "Active"}, limit=1, order_by="creation desc"
+ )
+ return "Active" if active_benches else "Awaiting Deploy"
+
+ @cached_property
+ def last_dc_info(self) -> LastDeployInfo | None:
+ DeployCandidateBuild = frappe.qb.DocType("Deploy Candidate Build")
query = (
- frappe.qb.from_(dc)
- .where(dc.group == self.name)
- .select(dc.name, dc.status)
- .orderby(dc.creation, order=frappe.qb.desc)
+ frappe.qb.from_(DeployCandidateBuild)
+ .where(DeployCandidateBuild.group == self.name)
+ .select(DeployCandidateBuild.name, DeployCandidateBuild.status, DeployCandidateBuild.creation)
+ .orderby(DeployCandidateBuild.creation, order=frappe.qb.desc)
.limit(1)
)
results = query.run(as_dict=True)
-
- if len(results) > 0:
+ if results:
return results[0]
+ return None
+
+ @cached_property
+ def last_benches_info(self) -> list[LastDeployInfo]:
+ last_dc_info: LastDeployInfo | dict = self.last_dc_info or {}
+ name: str | None = last_dc_info.get("name")
+ if not name:
+ return []
+
+ Bench = frappe.qb.DocType("Bench")
+ query = (
+ frappe.qb.from_(Bench)
+ .where(Bench.candidate == name)
+ .select(Bench.name, Bench.status, Bench.creation)
+ .orderby(Bench.creation, order=frappe.qb.desc)
+ .limit(1)
+ )
+ return query.run(as_dict=True)
+
def get_app_updates(self, current_apps):
next_apps = self.get_next_apps(current_apps)
@@ -347,9 +1156,7 @@ def get_app_updates(self, current_apps):
will_branch_change = False
current_branch = source.branch
if bench_app:
- current_source_branch = frappe.db.get_value(
- "App Source", bench_app.source, "branch"
- )
+ current_source_branch = frappe.db.get_value("App Source", bench_app.source, "branch")
will_branch_change = current_source_branch != source.branch
current_branch = current_source_branch
@@ -358,49 +1165,88 @@ def get_app_updates(self, current_apps):
if current_hash
else None
)
+
+ for release in app.releases:
+ release.tag = get_app_tag(source.repository, source.repository_owner, release.hash)
+
next_hash = app.hash
+
+ update_available = not current_hash or current_hash != next_hash or will_branch_change
+ if not app.releases:
+ update_available = False
+
apps.append(
- {
- "title": app.title,
- "app": app.app,
- "source": source.name,
- "repository": source.repository,
- "repository_owner": source.repository_owner,
- "repository_url": source.repository_url,
- "branch": source.branch,
- "current_hash": current_hash,
- "current_tag": current_tag,
- "current_release": bench_app.release if bench_app else None,
- "next_release": app.release,
- "next_hash": next_hash,
- "next_tag": get_app_tag(source.repository, source.repository_owner, next_hash),
- "will_branch_change": will_branch_change,
- "current_branch": current_branch,
- "update_available": not current_hash or current_hash != next_hash,
- }
+ frappe._dict(
+ {
+ "title": app.title,
+ "app": app.app,
+ "name": app.app,
+ "source": source.name,
+ "repository": source.repository,
+ "repository_owner": source.repository_owner,
+ "repository_url": source.repository_url,
+ "branch": source.branch,
+ "current_hash": current_hash,
+ "current_tag": current_tag,
+ "current_release": bench_app.release if bench_app else None,
+ "releases": app.releases,
+ "next_release": app.release,
+ "will_branch_change": will_branch_change,
+ "current_branch": current_branch,
+ "update_available": update_available,
+ }
+ )
)
return apps
def get_next_apps(self, current_apps):
marketplace_app_sources = self.get_marketplace_app_sources()
- current_team = get_current_team()
- only_approved_for_sources = [
- source
- for source in marketplace_app_sources
- if frappe.db.get_value("App Source", source, "team") != current_team
- ]
+ current_team = get_current_team(True)
+ app_publishers_team = [current_team.name]
+
+ if current_team.parent_team:
+ app_publishers_team.append(current_team.parent_team)
+
+ only_approved_for_sources = [self.apps[0].source] # add frappe app source
+ if marketplace_app_sources:
+ AppSource = frappe.qb.DocType("App Source")
+ only_approved_for_sources.append(
+ frappe.qb.from_(AppSource)
+ .where(AppSource.name.isin(marketplace_app_sources))
+ .where(AppSource.team.notin(app_publishers_team))
+ .select(AppSource.name)
+ .run(as_dict=True, pluck="name")
+ )
next_apps = []
+
+ app_sources = [app.source for app in self.apps]
+ AppRelease = frappe.qb.DocType("App Release")
+ latest_releases = (
+ frappe.qb.from_(AppRelease)
+ .where(AppRelease.source.isin(app_sources))
+ .select(
+ AppRelease.name,
+ AppRelease.source,
+ AppRelease.public,
+ AppRelease.status,
+ AppRelease.hash,
+ AppRelease.message,
+ AppRelease.creation,
+ )
+ .orderby(AppRelease.creation, order=frappe.qb.desc)
+ .run(as_dict=True)
+ )
+
for app in self.apps:
- # TODO: Optimize using get_value, maybe?
latest_app_release = None
+ latest_app_releases = find_all(latest_releases, lambda x: x.source == app.source)
if app.source in only_approved_for_sources:
- latest_app_release = get_last_doc(
- "App Release", {"source": app.source, "status": "Approved"}
- )
+ latest_app_release = find(latest_app_releases, can_use_release)
+ latest_app_releases = find_all(latest_app_releases, can_use_release)
else:
- latest_app_release = get_last_doc("App Release", {"source": app.source})
+ latest_app_release = find(latest_app_releases, lambda x: x.source == app.source)
# No release exists for this source
if not latest_app_release:
@@ -408,11 +1254,24 @@ def get_next_apps(self, current_apps):
bench_app = find(current_apps, lambda x: x.app == app.app)
- upcoming_release = (
- latest_app_release.name if latest_app_release else bench_app.release
- )
+ upcoming_release = latest_app_release.name if latest_app_release else bench_app.release
upcoming_hash = latest_app_release.hash if latest_app_release else bench_app.hash
+ upcoming_releases = latest_app_releases
+ if bench_app:
+ new_branch = frappe.db.get_value("App Source", app.source, "branch")
+ old_branch = frappe.db.get_value("App Source", bench_app.source, "branch")
+
+ if new_branch == old_branch:
+ current_release_creation = frappe.db.get_value(
+ "App Release", bench_app.release, "creation"
+ )
+ upcoming_releases = [
+ release
+ for release in latest_app_releases
+ if release.creation > current_release_creation
+ ]
+
next_apps.append(
frappe._dict(
{
@@ -421,6 +1280,7 @@ def get_next_apps(self, current_apps):
"release": upcoming_release,
"hash": upcoming_hash,
"title": app.title,
+ "releases": upcoming_releases[:16],
}
)
)
@@ -444,10 +1304,44 @@ def get_removed_apps(self):
return removed_apps
- def add_app(self, source):
- self.append("apps", {"source": source.name, "app": source.app})
+ def update_source(self, source: "AppSource", is_update: bool = False):
+ self.remove_app_if_invalid(source)
+ if is_update:
+ update_rg_app_source(self, source)
+ else:
+ self.append("apps", {"source": source.name, "app": source.app})
self.save()
+ def remove_app_if_invalid(self, source: "AppSource"):
+ """
+ Remove app if previously added app has an invalid
+ repository URL and GitHub responds with a 404 when
+ fetching the app information.
+ """
+ matching_apps = [a for a in self.apps if a.app == source.app]
+ if not matching_apps:
+ return
+
+ rg_app = matching_apps[0]
+ value = frappe.get_value(
+ "App Source",
+ rg_app.source,
+ ["last_github_poll_failed", "last_github_response", "repository_url"],
+ as_dict=True,
+ )
+
+ if value.repository_url == source.repository_url:
+ return
+
+ if not value.last_github_poll_failed or not value.last_github_response:
+ return
+
+ if '"Not Found"' not in value.last_github_response:
+ return
+
+ self.remove_app(source.app)
+
+ @dashboard_whitelist()
def change_app_branch(self, app: str, to_branch: str) -> None:
current_app_source = self.get_app_source(app)
@@ -458,23 +1352,40 @@ def change_app_branch(self, app: str, to_branch: str) -> None:
required_app_source = frappe.get_all(
"App Source",
filters={"repository_url": current_app_source.repository_url, "branch": to_branch},
+ or_filters={"team": get_current_team(), "public": 1},
limit=1,
+ fields=["name", "team", "public"],
)
if required_app_source:
required_app_source = required_app_source[0]
+ if not required_app_source.public:
+ required_app_source = frappe.get_doc("App Source", required_app_source.name)
+ # check if the version already exists
+ if not any(vs.version == self.version for vs in required_app_source.versions):
+ required_app_source.append(
+ "versions",
+ {
+ "version": self.version,
+ },
+ )
+ required_app_source.save()
+
else:
versions = frappe.get_all(
"App Source Version", filters={"parent": current_app_source.name}, pluck="version"
)
-
+ required_apps = frappe.get_all(
+ "Required Apps",
+ filters={"parent": current_app_source.name},
+ fields=["repository_url"],
+ pluck="repository_url",
+ )
required_app_source = create_app_source(
- app, current_app_source.repository_url, to_branch, versions
+ app, current_app_source.repository_url, to_branch, versions, required_apps
)
required_app_source.reload()
- required_app_source.github_installation_id = (
- current_app_source.github_installation_id
- )
+ required_app_source.github_installation_id = current_app_source.github_installation_id
required_app_source.save()
self.set_app_source(app, required_app_source.name)
@@ -498,49 +1409,99 @@ def set_app_source(self, target_app: str, source: str) -> None:
app.source = source
app.save()
break
+ self.validate_app_version(app)
self.save()
- def get_marketplace_app_sources(self) -> List[str]:
+ def get_marketplace_app_sources(self) -> list[str]:
all_marketplace_sources = frappe.get_all("Marketplace App Version", pluck="source")
- marketplace_app_sources = [
- app.source for app in self.apps if app.source in all_marketplace_sources
- ]
-
- return marketplace_app_sources
+ return [app.source for app in self.apps if app.source in all_marketplace_sources]
def get_clusters(self):
"""Get unique clusters corresponding to self.servers"""
- servers = frappe.db.get_all(
- "Release Group Server", {"parent": self.name}, pluck="server"
- )
- return frappe.get_all(
- "Server", {"name": ("in", servers)}, pluck="cluster", distinct=True
- )
+ servers = frappe.db.get_all("Release Group Server", {"parent": self.name}, pluck="server")
+ return frappe.get_all("Server", {"name": ("in", servers)}, pluck="cluster", distinct=True)
+
+ @dashboard_whitelist()
+ def add_region(self, region):
+ """
+ Add new region to release group (limits to 2). Meant for dashboard use only.
+ """
+
+ if len(self.get_clusters()) >= 2:
+ frappe.throw("More than 2 regions for bench not allowed")
+ self.add_cluster(region)
def add_cluster(self, cluster: str):
"""
Add new server belonging to cluster.
-
- Deploys bench if no update available
"""
server = Server.get_prod_for_new_bench({"cluster": cluster})
+
if not server:
log_error("No suitable server for new bench")
frappe.throw(f"No suitable server for new bench in {cluster}")
- app_update_available = self.deploy_information().update_available
- self.add_server(server, deploy=not app_update_available)
- def get_last_successful_candidate(self) -> Document:
- return frappe.get_last_doc(
- "Deploy Candidate", {"status": "Success", "group": self.name}
- )
+ self.add_server(server, deploy=True)
+
+ def get_last_successful_candidate_build(self, platform: str | None = None) -> DeployCandidateBuild | None:
+ try:
+ filters = {"status": "Success", "group": self.name}
+ if platform:
+ filters.update({"platform": platform})
+
+ return frappe.get_last_doc("Deploy Candidate Build", filters)
+ except frappe.DoesNotExistError:
+ return None
+
+ def get_last_deploy_candidate_build(self) -> DeployCandidateBuild:
+ try:
+ return frappe.get_last_doc("Deploy Candidate Build", {"group": self.name})
+ except frappe.DoesNotExistError:
+ return None
@frappe.whitelist()
- def add_server(self, server: str, deploy=False):
+ def add_server(self, server: str, deploy=False, force_new_build: bool = False):
+ """
+ Add a server to the release group in case last successful deploy candidate exists
+ create a deploy check if the image has not been pruned from the registry in case of
+ missing image create new build.
+ """
+ if not deploy:
+ return None
+
+ server_platform = frappe.get_value("Server", server, "platform")
+ last_successful_deploy_candidate_build = self.get_last_successful_candidate_build(
+ platform=server_platform
+ )
+
+ if not last_successful_deploy_candidate_build or force_new_build:
+ # No build of this platform is available creating new build
+ last_candidate_build = (
+ self.get_last_successful_candidate_build()
+ ) # Checking for any platform build
+
+ if not last_candidate_build:
+ frappe.throw("No build present for this release group", frappe.ValidationError)
+
+ self.append("servers", {"server": server, "default": False})
+ self.save()
+
+ return create_platform_build_and_deploy(
+ deploy_candidate=last_candidate_build.candidate.name, # type: ignore
+ server=server,
+ platform=server_platform,
+ )
+
self.append("servers", {"server": server, "default": False})
self.save()
- if deploy:
- self.get_last_successful_candidate()._create_deploy([server], staging=False)
+
+ try:
+ return last_successful_deploy_candidate_build._create_deploy(
+ [server],
+ check_image_exists=True,
+ )
+ except ImageNotFoundInRegistry:
+ return self.add_server(server=server, deploy=True, force_new_build=True)
@frappe.whitelist()
def change_server(self, server: str):
@@ -554,22 +1515,161 @@ def change_server(self, server: str):
self.remove(self.servers[0])
self.add_server(server, deploy=True)
+ @frappe.whitelist()
+ def update_benches_config(self):
+ from press.press.doctype.bench.bench import Bench
+
+ """Update benches config for all benches in the release group"""
+ benches = frappe.get_all("Bench", "name", {"group": self.name, "status": "Active"})
+ for bench in benches:
+ Bench("Bench", bench.name).update_bench_config(force=True)
+
+ @dashboard_whitelist()
+ def add_app(self, app, is_update: bool = False):
+ if isinstance(app, str):
+ app = json.loads(app)
+
+ if not (name := app.get("name")):
+ return
+
+ if frappe.db.exists("App", name):
+ app_doc: "App" = frappe.get_doc("App", name)
+ else:
+ app_doc = new_app(name, app["title"])
+
+ source = app_doc.add_source(
+ self.version,
+ app["repository_url"],
+ app["branch"],
+ self.team,
+ app.get("github_installation_id", None),
+ )
+ self.update_source(source, is_update)
+
+ @dashboard_whitelist()
+ def remove_app(self, app: str):
+ """Remove app from release group"""
+
+ app_doc_to_remove = find(self.apps, lambda x: x.app == app)
+ if app_doc_to_remove:
+ self.remove(app_doc_to_remove)
+
+ self.save()
+ return app
+
+ @dashboard_whitelist()
+ def fetch_latest_app_update(self, app: str):
+ app_source = self.get_app_source(app)
+ app_source.create_release(force=True)
+
+ @frappe.whitelist()
+ def archive(self):
+ benches = frappe.get_all("Bench", filters={"group": self.name, "status": "Active"}, pluck="name")
+ for bench in benches:
+ frappe.get_doc("Bench", bench).archive()
+
+ new_name = f"{self.title}.archived"
+ self.title = append_number_if_name_exists("Release Group", new_name, "title", separator=".")
+ self.enabled = 0
+ self.save()
+
+ @dashboard_whitelist()
+ def delete(self) -> None:
+ # Note: using delete instead of archive to avoid client api fetching the doc again
+
+ self.archive()
+
+ def set_default_app_cache_flags(self):
+ if self.use_app_cache:
+ return
+
+ if not frappe.db.get_single_value("Press Settings", "use_app_cache"):
+ return
+
+ if not self.can_use_get_app_cache():
+ return
+
+ self.use_app_cache = 1
+ self.compress_app_cache = frappe.db.get_single_value(
+ "Press Settings",
+ "compress_app_cache",
+ )
+
+ def set_default_delta_builds_flags(self):
+ if not frappe.db.get_single_value("Press Settings", "use_delta_builds"):
+ return
+
+ self.use_delta_builds = 0
+
+ def is_this_version_or_above(self, version: int) -> bool:
+ return frappe.get_cached_value("Frappe Version", self.version, "number") >= version
+
+ def setup_default_feature_flags(self):
+ DEFAULT_FEATURE_FLAGS = {
+ "Version 14": {"merge_default_and_short_rq_queues": True},
+ "Version 15": {
+ "gunicorn_threads_per_worker": "4",
+ "merge_default_and_short_rq_queues": True,
+ "use_rq_workerpool": True,
+ },
+ "Nightly": {
+ "gunicorn_threads_per_worker": "4",
+ "merge_default_and_short_rq_queues": True,
+ "use_rq_workerpool": True,
+ },
+ }
+ flags = DEFAULT_FEATURE_FLAGS.get(self.version, {})
+ for key, value in flags.items():
+ setattr(self, key, value)
+
-def new_release_group(
- title, version, apps, team=None, cluster=None, saas_app="", server=None
-):
+@redis_cache(ttl=60)
+def are_builds_suspended() -> bool:
+ return is_suspended()
+
+
+def new_release_group(title, version, apps, team=None, cluster=None, saas_app="", server=None):
if cluster:
if not server:
- server = frappe.get_all(
+ restricted_release_group_names = frappe.db.get_all(
+ "Site Plan Release Group",
+ pluck="release_group",
+ filters={"parenttype": "Site Plan", "parentfield": "release_groups"},
+ distinct=True,
+ )
+ restricted_server_names = frappe.db.get_all(
+ "Release Group Server",
+ pluck="server",
+ filters={
+ "parenttype": "Release Group",
+ "parentfield": "servers",
+ "parent": ("in", restricted_release_group_names),
+ },
+ distinct=True,
+ )
+ servers = frappe.get_all(
"Server",
- {"status": "Active", "cluster": cluster, "use_for_new_benches": True},
+ {
+ "status": "Active",
+ "cluster": cluster,
+ "use_for_new_benches": True,
+ "name": ("not in", restricted_server_names),
+ },
pluck="name",
limit=1,
- )[0]
+ )
+
+ if not servers:
+ frappe.throw("No servers found for new benches!")
+ else:
+ server = servers[0]
+
+ servers = [{"server": server}]
+ elif server:
servers = [{"server": server}]
else:
servers = []
- group = frappe.get_doc(
+ return frappe.get_doc(
{
"doctype": "Release Group",
"title": title,
@@ -580,19 +1680,172 @@ def new_release_group(
"saas_app": saas_app,
}
).insert()
- return group
def get_status(name):
return (
"Active"
- if frappe.get_all(
- "Bench", {"group": name, "status": "Active"}, limit=1, order_by="creation desc"
- )
+ if frappe.get_all("Bench", {"group": name, "status": "Active"}, limit=1, order_by="creation desc")
else "Awaiting Deploy"
)
-get_permission_query_conditions = get_permission_query_conditions_for_doctype(
- "Release Group"
-)
+def prune_servers_without_sites():
+ rg_servers = frappe.qb.DocType("Release Group Server")
+ rg = frappe.qb.DocType("Release Group")
+ groups_with_multiple_servers = (
+ frappe.qb.from_(rg_servers)
+ .inner_join(rg)
+ .on(rg.name == rg_servers.parent)
+ .where(rg.enabled == 1)
+ .where(rg.public == 0)
+ .where(rg.central_bench == 0)
+ .where(rg.team != "team@erpnext.com")
+ .where(
+ rg.modified < frappe.utils.add_to_date(None, days=-7)
+ ) # use this timestamp to assume server added time
+ .groupby(rg_servers.parent)
+ .having(Count("*") > 1)
+ .select(rg_servers.parent)
+ .run(as_dict=False)
+ )
+ groups_with_multiple_servers = [x[0] for x in groups_with_multiple_servers]
+ groups_with_multiple_servers = frappe.get_all(
+ "Release Group Server",
+ filters={"parent": ("in", groups_with_multiple_servers)},
+ fields=["parent", "server"],
+ order_by="parent",
+ as_list=True,
+ )
+
+ from press.press.doctype.bench.bench import (
+ get_scheduled_version_upgrades,
+ get_unfinished_site_migrations,
+ )
+
+ for group, server in groups_with_multiple_servers:
+ sites = frappe.get_all(
+ "Site",
+ {"status": ("!=", "Archived"), "group": group, "server": server},
+ ["name"],
+ )
+ if not sites:
+ benches = frappe.get_all(
+ "Bench",
+ {"group": group, "server": server, "status": "Active"},
+ ["name", "server", "group"],
+ )
+ for bench in benches:
+ if get_unfinished_site_migrations(bench.name) or get_scheduled_version_upgrades(bench):
+ continue
+ frappe.db.delete("Release Group Server", {"parent": group, "server": server})
+ frappe.db.commit()
+
+
+get_permission_query_conditions = get_permission_query_conditions_for_doctype("Release Group")
+
+
+def can_use_release(app_src):
+ if not app_src.public:
+ return True
+
+ return app_src.status == "Approved"
+
+
+def update_rg_app_source(rg: "ReleaseGroup", source: "AppSource"):
+ for app in rg.apps:
+ if app.app == source.app:
+ app.source = source.name
+ break
+
+
+def get_job_names(rg: str, job_type: str, job_status: list[str]):
+ b = frappe.qb.DocType("Bench")
+ aj = frappe.qb.DocType("Agent Job")
+
+ jobs = (
+ frappe.qb.from_(b)
+ .inner_join(aj)
+ .on(b.name == aj.bench)
+ .where(b.group == rg)
+ .where(aj.job_type == job_type)
+ .where(aj.status.isin(job_status))
+ .select(aj.name)
+ .orderby(aj.modified, order=frappe.query_builder.Order.desc)
+ ).run()
+
+ return [j[0] for j in jobs]
+
+
+def get_config_type(value: Any):
+ if isinstance(value, (dict, list)):
+ return "JSON"
+
+ if isinstance(value, bool):
+ return "Boolean"
+
+ if isinstance(value, (int, float)):
+ return "Number"
+
+ return "String"
+
+
+def get_formatted_config_value(config_type: str, value: Any, key: str, name: str):
+ if config_type == "Number":
+ return flt(value)
+
+ if config_type == "Boolean":
+ return bool(sbool(value))
+
+ if config_type == "JSON":
+ return frappe.parse_json(value)
+
+ if config_type == "Password" and value == "*******":
+ return frappe.get_value("Site Config", {"key": key, "parent": name}, "value")
+
+ return value
+
+
+def add_public_servers_to_public_groups():
+ """
+ Add public servers to public release groups.
+ Used when a new server is added to the system.
+ """
+ public_groups = frappe.get_all(
+ "Release Group",
+ filters={"public": 1, "enabled": 1, "central_bench": 0},
+ fields=["name"],
+ )
+ public_servers = frappe.get_all(
+ "Server",
+ filters={"public": 1, "status": "Active"},
+ pluck="name",
+ )
+
+ for group in public_groups:
+ rg = ReleaseGroup("Release Group", group.name)
+ for server in public_servers:
+ if find(rg.servers, lambda x: x.server == server):
+ continue
+ rg.reload()
+ rg.append("servers", {"server": server, "default": False})
+ rg.save()
+
+
+def get_restricted_server_names():
+ restricted_release_group_names = frappe.db.get_all(
+ "Site Plan Release Group",
+ pluck="release_group",
+ filters={"parenttype": "Site Plan", "parentfield": "release_groups"},
+ distinct=True,
+ )
+ return frappe.db.get_all(
+ "Release Group Server",
+ pluck="server",
+ filters={
+ "parenttype": "Release Group",
+ "parentfield": "servers",
+ "parent": ("in", restricted_release_group_names),
+ },
+ distinct=True,
+ )
diff --git a/press/press/doctype/release_group/test_release_group.py b/press/press/doctype/release_group/test_release_group.py
index a0bfaaa7217..3dd82b2323c 100644
--- a/press/press/doctype/release_group/test_release_group.py
+++ b/press/press/doctype/release_group/test_release_group.py
@@ -1,43 +1,67 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
-import unittest
-from unittest.mock import patch
+from __future__ import annotations
+
+import typing
+from unittest.mock import Mock, patch
import frappe
+from frappe.core.utils import find
+from frappe.tests.utils import FrappeTestCase
-from press.press.doctype.app.app import App
+from press.agent import Agent
+from press.api.bench import deploy_information
+from press.api.client import get_list
+from press.press.doctype.agent_job.agent_job import AgentJob
from press.press.doctype.app.test_app import create_test_app
from press.press.doctype.app_release.test_app_release import create_test_app_release
from press.press.doctype.app_source.app_source import AppSource
from press.press.doctype.app_source.test_app_source import create_test_app_source
-from press.press.doctype.frappe_version.test_frappe_version import (
- create_test_frappe_version,
-)
from press.press.doctype.release_group.release_group import (
ReleaseGroup,
new_release_group,
)
+from press.press.doctype.server.server import BaseServer
from press.press.doctype.team.test_team import create_test_team
+if typing.TYPE_CHECKING:
+ from press.press.doctype.app.app import App
+
+
+def mock_free_space(space_required: int):
+ def wrapper(*args, **kwargs):
+ return space_required
+
+ return wrapper
+
+
+def mock_image_size(image_size: int):
+ def wrapper(*args, **kwargs):
+ return {"size": image_size}
+
+ return wrapper
+
def create_test_release_group(
- apps: list[App], user: str = "Administrator", public=False, frappe_version=None
+ apps: list[App],
+ user: str | None = None,
+ public=False,
+ frappe_version="Version 14",
+ servers: list[str] | None = None,
) -> ReleaseGroup:
"""
Create Release Group doc.
Also creates app source
"""
- if not frappe_version:
- frappe_version = create_test_frappe_version().name
+ user = user or frappe.session.user
release_group = frappe.get_doc(
{
"doctype": "Release Group",
"version": frappe_version,
"enabled": True,
- "title": f"Test ReleaseGroup {frappe.mock('name')}",
+ "title": f"Test ReleaseGroup {frappe.generate_hash(length=10)}",
"team": frappe.get_value("Team", {"user": user}, "name"),
"public": public,
}
@@ -46,14 +70,20 @@ def create_test_release_group(
app_source = create_test_app_source(release_group.version, app)
release_group.append("apps", {"app": app.name, "source": app_source.name})
+ if servers:
+ for server in servers:
+ release_group.append("servers", {"server": server})
+
release_group.insert(ignore_if_duplicate=True)
release_group.reload()
return release_group
@patch.object(AppSource, "create_release", create_test_app_release)
-class TestReleaseGroup(unittest.TestCase):
+class TestReleaseGroup(FrappeTestCase):
def setUp(self):
+ super().setUp()
+
self.team = create_test_team().name
def tearDown(self):
@@ -188,8 +218,311 @@ def test_branch_change_app_source_does_not_exist(self):
new_app_source = frappe.get_doc("App Source", rg.apps[0].source)
self.assertEqual(new_app_source.branch, "develop")
- self.assertEqual(
- new_app_source.versions[0].version, previous_app_source.versions[0].version
- )
+ self.assertEqual(new_app_source.versions[0].version, previous_app_source.versions[0].version)
self.assertEqual(new_app_source.repository_url, previous_app_source.repository_url)
self.assertEqual(new_app_source.app, app.name)
+
+ def test_new_release_group_loaded_with_correct_dependencies(self):
+ app = create_test_app("frappe", "Frappe Framework")
+ frappe_version = frappe.get_doc("Frappe Version", "Version 14")
+ group = frappe.get_doc(
+ {
+ "doctype": "Release Group",
+ "title": "Test Group",
+ "version": "Version 14",
+ "apps": [{"app": app.name, "source": create_test_app_source("Version 14", app).name}],
+ "team": self.team,
+ }
+ ).insert()
+
+ self.assertEqual(
+ find(group.dependencies, lambda d: d.dependency == "PYTHON_VERSION").version,
+ find(frappe_version.dependencies, lambda x: x.dependency == "PYTHON_VERSION").version,
+ )
+
+ def test_cant_set_min_greater_than_max_workers(self):
+ rg = create_test_release_group([create_test_app()])
+ rg.max_gunicorn_workers = 1
+ rg.min_gunicorn_workers = 2
+ self.assertRaises(frappe.ValidationError, rg.save)
+ rg.max_background_workers = 1
+ rg.min_background_workers = 2
+ self.assertRaises(frappe.ValidationError, rg.save)
+ rg.reload()
+ try:
+ rg.max_gunicorn_workers = 2
+ rg.min_gunicorn_workers = 1
+ rg.max_background_workers = 2
+ rg.min_background_workers = 1
+ rg.save()
+ rg.max_gunicorn_workers = 0 # default
+ rg.min_gunicorn_workers = 2
+ rg.max_background_workers = 0 # default
+ rg.min_background_workers = 2
+ rg.save()
+ except frappe.ValidationError:
+ self.fail("Should not raise validation error")
+
+ def test_update_available_shows_for_first_deploy(self):
+ rg = create_test_release_group([create_test_app()])
+ self.assertEqual(deploy_information(rg.name).get("update_available"), True)
+
+ def test_fetch_environment_variables(self):
+ rg = create_test_release_group([create_test_app()])
+ environment_variables = [
+ {"key": "test_key", "value": "test_value", "internal": False},
+ {"key": "test_key_2", "value": "test_value", "internal": False},
+ {"key": "secret_key", "value": "test_value", "internal": True},
+ ]
+ for env in environment_variables:
+ rg.append("environment_variables", env)
+ rg.save()
+ rg.reload()
+ fetched_environment_variable_list = get_list(
+ "Release Group Variable",
+ fields=["name", "key", "value"],
+ filters={"parenttype": "Release Group", "parent": rg.name},
+ )
+ self.assertEqual(len(fetched_environment_variable_list), 2)
+ internal_environment_variables_keys = [env["key"] for env in environment_variables if env["internal"]]
+ non_internal_environment_variables_keys = [
+ env["key"] for env in environment_variables if not env["internal"]
+ ]
+ for env in fetched_environment_variable_list:
+ self.assertNotIn(env.key, internal_environment_variables_keys)
+ self.assertIn(env.key, non_internal_environment_variables_keys)
+
+ def test_add_environment_variable(self):
+ rg = create_test_release_group([create_test_app()])
+ rg.update_environment_variable({"test_key": "test_value"})
+ rg.reload()
+ self.assertEqual(len(rg.environment_variables), 1)
+ self.assertEqual(rg.environment_variables[0].key, "test_key")
+ self.assertEqual(rg.environment_variables[0].value, "test_value")
+
+ def test_update_environment_variable(self):
+ rg = create_test_release_group([create_test_app()])
+ rg.append("environment_variables", {"key": "test_key", "value": "test_value", "internal": 0})
+ rg.save()
+ rg.reload()
+ self.assertEqual(len(rg.environment_variables), 1)
+ rg.update_environment_variable({"test_key": "new_test_value"})
+ rg.reload()
+ self.assertEqual(len(rg.environment_variables), 1)
+ self.assertEqual(rg.environment_variables[0].value, "new_test_value")
+
+ def test_update_internal_environment_variable(self):
+ rg = create_test_release_group([create_test_app()])
+ rg.append("environment_variables", {"key": "test_key", "value": "test_value", "internal": 1})
+ rg.save()
+ rg.reload()
+ self.assertEqual(len(rg.environment_variables), 1)
+
+ def update_internal_environment_variable():
+ rg.update_environment_variable({"test_key": "new_test_value"})
+
+ self.assertRaisesRegex(
+ frappe.ValidationError,
+ "Environment variable test_key is internal and cannot be updated",
+ update_internal_environment_variable,
+ )
+
+ def test_delete_internal_environment_variable(self):
+ rg = create_test_release_group([create_test_app()])
+ rg.append("environment_variables", {"key": "test_key", "value": "test_value", "internal": 1})
+ rg.save()
+ rg.reload()
+ self.assertEqual(len(rg.environment_variables), 1)
+ rg.delete_environment_variable("test_key")
+ rg.reload()
+ self.assertEqual(len(rg.environment_variables), 1)
+
+ def test_delete_environment_variable(self):
+ rg = create_test_release_group([create_test_app()])
+ rg.append("environment_variables", {"key": "test_key", "value": "test_value", "internal": 0})
+ rg.save()
+ rg.reload()
+ self.assertEqual(len(rg.environment_variables), 1)
+ rg.delete_environment_variable("test_key")
+ rg.reload()
+ self.assertEqual(len(rg.environment_variables), 0)
+
+ @patch.object(AgentJob, "enqueue_http_request", new=Mock())
+ def test_creating_private_bench_should_not_pick_servers_used_in_restricted_site_plans(
+ self,
+ ):
+ from press.api.bench import new
+ from press.press.doctype.cluster.test_cluster import create_test_cluster
+ from press.press.doctype.proxy_server.test_proxy_server import (
+ create_test_proxy_server,
+ )
+ from press.press.doctype.root_domain.test_root_domain import create_test_root_domain
+ from press.press.doctype.server.test_server import create_test_server
+ from press.press.doctype.site.test_site import create_test_bench
+ from press.press.doctype.site_plan.test_site_plan import create_test_plan
+
+ cluster = create_test_cluster("Default", public=True)
+ root_domain = create_test_root_domain("local.fc.frappe.dev")
+ frappe.db.set_single_value("Press Settings", "domain", root_domain.name)
+
+ frappe_app = create_test_app(name="frappe")
+ new_frappe_app_source = create_test_app_source(version="Version 15", app=frappe_app)
+
+ n1_server = create_test_proxy_server(cluster=cluster.name, domain=root_domain.name)
+ f1_server = create_test_server(cluster=cluster.name, proxy_server=n1_server.name)
+ f1_server.use_for_new_benches = True
+ f1_server.save()
+ f1_server.reload()
+
+ n2_server = create_test_proxy_server(cluster=cluster.name, domain=root_domain.name)
+ f2_server = create_test_server(cluster=cluster.name, proxy_server=n2_server.name)
+ f2_server.use_for_new_benches = True
+ f2_server.save()
+ f2_server.reload()
+
+ rg = create_test_release_group([frappe_app], servers=[f2_server.name])
+ create_test_bench(group=rg)
+
+ create_test_plan("Site", allowed_apps=[], release_groups=[rg.name])
+
+ """
+ Try to create new bench, it should always pick the server which haven't used in any restricted release group
+ """
+ group_name = new(
+ {
+ "title": "Test Bench 55",
+ "apps": [{"name": frappe_app.name, "source": new_frappe_app_source.name}],
+ "version": "Version 15",
+ "cluster": "Default",
+ "saas_app": None,
+ "server": None,
+ }
+ )
+ new_group = frappe.get_doc("Release Group", group_name)
+ self.assertEqual(new_group.servers[0].server, f1_server.name)
+
+ def test_validate_dependant_apps(self):
+ release_group: ReleaseGroup = frappe.get_doc(
+ {
+ "doctype": "Release Group",
+ "version": "Nightly",
+ "enabled": True,
+ "title": f"Test ReleaseGroup {frappe.mock('name')}",
+ "team": self.team,
+ "public": True,
+ }
+ )
+ frappe_app = create_test_app()
+ hrms_app = create_test_app(name="hrms", title="test-hrms")
+
+ hrms_app_source = create_test_app_source(
+ "Nightly", hrms_app, "https://github.com/frappe/hrms", "master", team=self.team
+ )
+ frappe_app_source = create_test_app_source(
+ "Nightly", frappe_app, "https://github.com/frappe/frappe", "master", team=self.team
+ )
+
+ for app, app_source in [(frappe_app, frappe_app_source), (hrms_app, hrms_app_source)]:
+ release_group.append("apps", {"app": app.name, "source": app_source.name})
+
+ release_group.check_dependent_apps = True
+
+ with self.assertRaises(frappe.exceptions.ValidationError):
+ release_group.insert()
+
+ # Insert dependant app and check if it works
+ erpnext = create_test_app("erpnext", "ERPNext")
+ erpnext_app_source = create_test_app_source(
+ "Nightly", erpnext, "https://github.com/frappe/erpnext", "master", self.team
+ )
+
+ release_group.append("apps", {"app": erpnext.name, "source": erpnext_app_source.name})
+ release_group.insert()
+
+ @patch.object(frappe, "enqueue_doc", new=Mock())
+ def test_multiple_platform_server_addition(self):
+ def create_build_and_succeed(release_group: ReleaseGroup):
+ deploy_candidate = release_group.create_deploy_candidate()
+ response = deploy_candidate.build()
+ deploy_candidate_name = response["message"]
+ frappe.db.set_value("Deploy Candidate Build", deploy_candidate_name, "status", "Success")
+
+ from press.press.doctype.cluster.test_cluster import create_test_cluster
+ from press.press.doctype.proxy_server.test_proxy_server import (
+ create_test_proxy_server,
+ )
+ from press.press.doctype.root_domain.test_root_domain import create_test_root_domain
+ from press.press.doctype.server.test_server import create_test_server
+
+ cluster = create_test_cluster("Default", public=True)
+ root_domain = create_test_root_domain("local.fc.frappe.dev")
+ n1_server = create_test_proxy_server(cluster=cluster.name, domain=root_domain.name)
+
+ f1_server = create_test_server(cluster=cluster.name, proxy_server=n1_server.name, use_for_build=True)
+ f2_server = create_test_server(
+ cluster=cluster.name, proxy_server=n1_server.name, platform="arm64", use_for_build=True
+ )
+
+ f1_server.save()
+ f2_server.save()
+
+ rg = create_test_release_group([create_test_app()], servers=[f1_server.name])
+
+ with self.assertRaises(frappe.ValidationError):
+ # No previous builds present
+ rg.add_server(f2_server.name, True)
+
+ create_build_and_succeed(rg)
+
+ # This server addition created a deploy candidate build
+ rg.add_server(f2_server.name, True)
+ arm_build = frappe.get_value("Deploy Candidate Build", {"group": rg.name, "platform": "arm64"})
+
+ self.assertTrue(arm_build)
+
+ # Assert added deploy candidate build has a `deploy on server` field
+ self.assertEqual(
+ frappe.get_value("Deploy Candidate Build", arm_build, "deploy_on_server"), f2_server.name
+ )
+
+ @patch.object(AgentJob, "enqueue_http_request", new=Mock())
+ @patch.object(BaseServer, "calculated_increase_disk_size", Mock())
+ def test_insufficient_space(self):
+ from press.press.doctype.server.test_server import create_test_server
+ from press.press.doctype.site.test_site import create_test_bench
+
+ app = create_test_app()
+ server = create_test_server(auto_increase_storage=False)
+ test_release_group = create_test_release_group([app], servers=[server.name])
+ create_test_bench(group=test_release_group)
+
+ with (
+ patch.object(BaseServer, "free_space", mock_free_space(space_required=54000000000)),
+ patch.object(Agent, "get", mock_image_size(5.21)),
+ ): # Image size is 5.2gb: # mocking 50 gib of storage enough space!
+ test_release_group.check_app_server_storage()
+
+ with (
+ self.assertRaises(frappe.ValidationError),
+ patch.object(BaseServer, "free_space", mock_free_space(space_required=5400000000)),
+ patch.object(Agent, "get", mock_image_size(6)),
+ ): # Image size is 6gb: # mocking 5 gib of storage enough space!
+ test_release_group.check_app_server_storage()
+
+ @patch.object(AgentJob, "enqueue_http_request", new=Mock())
+ @patch.object(BaseServer, "calculated_increase_disk_size", Mock())
+ @patch.object(Agent, "get", mock_image_size(60))
+ @patch.object(BaseServer, "free_space", mock_free_space(space_required=5400000000))
+ def test_insufficient_space_on_auto_add_storage_servers(self):
+ from press.press.doctype.server.test_server import create_test_server
+ from press.press.doctype.site.test_site import create_test_bench
+
+ # In case of public and servers with auto increase storage
+ # We should avoid throwing space errors instead just increment it for them
+
+ app = create_test_app()
+ server = create_test_server(auto_increase_storage=1)
+ test_release_group = create_test_release_group([app], servers=[server.name])
+ create_test_bench(group=test_release_group)
+
+ test_release_group.check_app_server_storage()
diff --git a/press/press/doctype/release_group_app/release_group_app.py b/press/press/doctype/release_group_app/release_group_app.py
index 730f1d28bb8..e8cf12d24ad 100644
--- a/press/press/doctype/release_group_app/release_group_app.py
+++ b/press/press/doctype/release_group_app/release_group_app.py
@@ -1,11 +1,37 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
# import frappe
+import typing
+
from frappe.model.document import Document
+from frappe.utils import cstr
+
+from press.api.bench import apps
class ReleaseGroupApp(Document):
- pass
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ app: DF.Link
+ enable_auto_deploy: DF.Check
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ source: DF.Link
+ title: DF.Data
+ # end: auto-generated types
+
+ dashboard_fields: typing.ClassVar = ["app"]
+
+ @staticmethod
+ def get_list_query(query, filters=None, **list_args):
+ group = cstr(filters.get("parent", "")) if filters else None
+ return apps(group)
diff --git a/press/press/doctype/release_group_dependency/release_group_dependency.json b/press/press/doctype/release_group_dependency/release_group_dependency.json
index 99f67445c3e..789f568dbf2 100644
--- a/press/press/doctype/release_group_dependency/release_group_dependency.json
+++ b/press/press/doctype/release_group_dependency/release_group_dependency.json
@@ -6,7 +6,8 @@
"engine": "InnoDB",
"field_order": [
"dependency",
- "version"
+ "version",
+ "is_custom"
],
"fields": [
{
@@ -23,12 +24,20 @@
"in_list_view": 1,
"label": "Version",
"reqd": 1
+ },
+ {
+ "default": "0",
+ "depends_on": "eval: doc.is_custom",
+ "description": "Version has been set by the user from the dashboard and could be invalid.",
+ "fieldname": "is_custom",
+ "fieldtype": "Check",
+ "label": "Is Custom"
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
- "modified": "2021-05-18 18:38:54.760858",
+ "modified": "2024-07-19 13:13:25.158330",
"modified_by": "Administrator",
"module": "Press",
"name": "Release Group Dependency",
@@ -36,5 +45,6 @@
"permissions": [],
"sort_field": "modified",
"sort_order": "DESC",
+ "states": [],
"track_changes": 1
}
\ No newline at end of file
diff --git a/press/press/doctype/release_group_dependency/release_group_dependency.py b/press/press/doctype/release_group_dependency/release_group_dependency.py
index 2b450a38979..76028910dab 100644
--- a/press/press/doctype/release_group_dependency/release_group_dependency.py
+++ b/press/press/doctype/release_group_dependency/release_group_dependency.py
@@ -1,9 +1,47 @@
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
-# import frappe
+import frappe
from frappe.model.document import Document
+from press.api.client import is_owned_by_team
+
class ReleaseGroupDependency(Document):
- pass
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ dependency: DF.Data
+ is_custom: DF.Check
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ version: DF.Data
+ # end: auto-generated types
+
+ @staticmethod
+ def get_list_query(query, filters=None, **list_args):
+ if not filters or not (group := filters.get("parent")):
+ return None
+ is_owned_by_team("Release Group", group, raise_exception=True)
+
+ RGDependency = frappe.qb.DocType("Release Group Dependency")
+ BenchDependency = frappe.qb.DocType("Bench Dependency")
+
+ query = (
+ query.join(BenchDependency)
+ .on(BenchDependency.name == RGDependency.dependency)
+ .where(BenchDependency.internal == 0)
+ .select(
+ RGDependency.dependency,
+ RGDependency.version,
+ BenchDependency.title,
+ RGDependency.is_custom,
+ )
+ )
+ return query.run(as_dict=True)
diff --git a/press/press/doctype/release_group_mount/__init__.py b/press/press/doctype/release_group_mount/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/release_group_mount/release_group_mount.json b/press/press/doctype/release_group_mount/release_group_mount.json
new file mode 100644
index 00000000000..bbd6fe7ca30
--- /dev/null
+++ b/press/press/doctype/release_group_mount/release_group_mount.json
@@ -0,0 +1,56 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2023-12-13 15:02:17.666052",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "is_absolute_path",
+ "section_break_nvom",
+ "source",
+ "column_break_zdvz",
+ "destination"
+ ],
+ "fields": [
+ {
+ "fieldname": "source",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Source (Host)",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_zdvz",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "destination",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Destination (Container)",
+ "reqd": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "is_absolute_path",
+ "fieldtype": "Check",
+ "label": "Is Absolute Path"
+ },
+ {
+ "fieldname": "section_break_nvom",
+ "fieldtype": "Section Break"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2023-12-14 11:01:27.384102",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Release Group Mount",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/release_group_mount/release_group_mount.py b/press/press/doctype/release_group_mount/release_group_mount.py
new file mode 100644
index 00000000000..8c2c295164a
--- /dev/null
+++ b/press/press/doctype/release_group_mount/release_group_mount.py
@@ -0,0 +1,25 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class ReleaseGroupMount(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ destination: DF.Data
+ is_absolute_path: DF.Check
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ source: DF.Data
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/release_group_package/release_group_package.json b/press/press/doctype/release_group_package/release_group_package.json
index c4e4dc992d6..6fd44f6aee5 100644
--- a/press/press/doctype/release_group_package/release_group_package.json
+++ b/press/press/doctype/release_group_package/release_group_package.json
@@ -8,8 +8,10 @@
"engine": "InnoDB",
"field_order": [
"package_manager",
+ "package_prerequisites",
"column_break_r6rj",
- "package"
+ "package",
+ "after_install"
],
"fields": [
{
@@ -30,12 +32,24 @@
{
"fieldname": "column_break_r6rj",
"fieldtype": "Column Break"
+ },
+ {
+ "description": "Use && for multiline prerequisites\n
\nUse jinja syntax to define dependancy variables
\neg: python{{ PYTHON_VERSION }} ",
+ "fieldname": "package_prerequisites",
+ "fieldtype": "Text",
+ "label": "Package Prerequisites"
+ },
+ {
+ "description": "Use && for multiline prerequisites\n",
+ "fieldname": "after_install",
+ "fieldtype": "Text",
+ "label": "After Install"
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
- "modified": "2023-05-12 16:25:43.924621",
+ "modified": "2023-12-26 18:17:13.374441",
"modified_by": "Administrator",
"module": "Press",
"name": "Release Group Package",
diff --git a/press/press/doctype/release_group_package/release_group_package.py b/press/press/doctype/release_group_package/release_group_package.py
index 7d574297815..b48d4ac7bb5 100644
--- a/press/press/doctype/release_group_package/release_group_package.py
+++ b/press/press/doctype/release_group_package/release_group_package.py
@@ -6,4 +6,21 @@
class ReleaseGroupPackage(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ after_install: DF.Text | None
+ package: DF.Data
+ package_manager: DF.Data
+ package_prerequisites: DF.Text | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/release_group_server/release_group_server.py b/press/press/doctype/release_group_server/release_group_server.py
index 474c2858eba..b8804eda285 100644
--- a/press/press/doctype/release_group_server/release_group_server.py
+++ b/press/press/doctype/release_group_server/release_group_server.py
@@ -8,4 +8,19 @@
class ReleaseGroupServer(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ default: DF.Check
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ server: DF.Link
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/release_group_variable/release_group_variable.json b/press/press/doctype/release_group_variable/release_group_variable.json
index ddd6f50932e..beb7ba9afbe 100644
--- a/press/press/doctype/release_group_variable/release_group_variable.json
+++ b/press/press/doctype/release_group_variable/release_group_variable.json
@@ -7,7 +7,8 @@
"engine": "InnoDB",
"field_order": [
"key",
- "value"
+ "value",
+ "internal"
],
"fields": [
{
@@ -19,16 +20,23 @@
},
{
"fieldname": "value",
- "fieldtype": "Data",
+ "fieldtype": "Text",
"in_list_view": 1,
"label": "Value",
"reqd": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "internal",
+ "fieldtype": "Check",
+ "in_list_view": 1,
+ "label": "Internal Usage"
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
- "modified": "2023-06-13 19:17:09.361096",
+ "modified": "2025-01-20 15:12:45.664299",
"modified_by": "Administrator",
"module": "Press",
"name": "Release Group Variable",
diff --git a/press/press/doctype/release_group_variable/release_group_variable.py b/press/press/doctype/release_group_variable/release_group_variable.py
index f5ffb2acbd0..c56639a984c 100644
--- a/press/press/doctype/release_group_variable/release_group_variable.py
+++ b/press/press/doctype/release_group_variable/release_group_variable.py
@@ -1,9 +1,33 @@
# Copyright (c) 2023, Frappe and contributors
# For license information, please see license.txt
-# import frappe
+import frappe
from frappe.model.document import Document
class ReleaseGroupVariable(Document):
- pass
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING, ClassVar
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ internal: DF.Check
+ key: DF.Data
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ value: DF.Text
+ # end: auto-generated types
+
+ dashboard_fields: ClassVar = ["key", "value"]
+
+ @staticmethod
+ def get_list_query(query, filters=None, **list_args):
+ environmentVariable = frappe.qb.DocType("Release Group Variable")
+ query = query.where(environmentVariable.internal == 0).orderby(
+ environmentVariable.key, order=frappe.qb.asc
+ )
+ return query.run(as_dict=True)
diff --git a/press/press/doctype/remote_file/remote_file.json b/press/press/doctype/remote_file/remote_file.json
index 064a5584a0e..fc94bf91e83 100644
--- a/press/press/doctype/remote_file/remote_file.json
+++ b/press/press/doctype/remote_file/remote_file.json
@@ -7,11 +7,13 @@
"engine": "InnoDB",
"field_order": [
"file_name",
- "site",
"status",
+ "column_break_uzdq",
+ "site",
"section_break_5",
"file_size",
"file_path",
+ "column_break_scaf",
"file_type",
"bucket",
"url"
@@ -40,7 +42,7 @@
},
{
"fieldname": "file_path",
- "fieldtype": "Data",
+ "fieldtype": "Text",
"label": "File Path",
"read_only": 1
},
@@ -76,14 +78,25 @@
{
"fieldname": "site",
"fieldtype": "Link",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
"label": "Site",
"options": "Site",
- "read_only": 1
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "column_break_uzdq",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "column_break_scaf",
+ "fieldtype": "Column Break"
}
],
"icon": "fa fa-file",
"links": [],
- "modified": "2020-08-20 15:53:44.984197",
+ "modified": "2025-03-18 10:10:24.353402",
"modified_by": "Administrator",
"module": "Press",
"name": "Remote File",
@@ -119,6 +132,7 @@
"search_fields": "file_name",
"sort_field": "modified",
"sort_order": "ASC",
+ "states": [],
"title_field": "file_name",
"track_changes": 1
}
\ No newline at end of file
diff --git a/press/press/doctype/remote_file/remote_file.py b/press/press/doctype/remote_file/remote_file.py
index 1eb72cc0b47..1ec85989e66 100644
--- a/press/press/doctype/remote_file/remote_file.py
+++ b/press/press/doctype/remote_file/remote_file.py
@@ -1,13 +1,13 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
import json
+import pprint
import frappe
import requests
-import pprint
from boto3 import client, resource
from frappe.model.document import Document
from frappe.utils.password import get_decrypted_password
@@ -27,101 +27,103 @@ def get_remote_key(file):
def poll_file_statuses():
- from press.utils import chunk
-
- available_files = {}
- doctype = "Remote File"
- aws_access_key = frappe.db.get_single_value(
- "Press Settings", "offsite_backups_access_key_id"
- )
+ aws_access_key = frappe.db.get_single_value("Press Settings", "offsite_backups_access_key_id")
aws_secret_key = get_decrypted_password(
"Press Settings", "Press Settings", "offsite_backups_secret_access_key"
)
default_region = frappe.db.get_single_value("Press Settings", "backup_region")
- buckets = {
- frappe.db.get_single_value("Press Settings", "aws_s3_bucket"): {
+ buckets = [
+ {
+ "name": frappe.db.get_single_value("Press Settings", "aws_s3_bucket"),
"region": default_region,
"access_key_id": aws_access_key,
"secret_access_key": aws_secret_key,
},
- frappe.db.get_single_value("Press Settings", "remote_uploads_bucket"): {
+ {
+ "name": frappe.db.get_single_value("Press Settings", "remote_uploads_bucket"),
"region": default_region,
- "access_key_id": frappe.db.get_single_value(
- "Press Settings", "remote_access_key_id"
- ),
+ "access_key_id": frappe.db.get_single_value("Press Settings", "remote_access_key_id"),
"secret_access_key": get_decrypted_password(
"Press Settings", "Press Settings", "remote_secret_access_key"
),
},
- }
+ ]
- [
- buckets.update(
+ for b in frappe.get_all("Backup Bucket", ["bucket_name", "cluster", "region"]):
+ buckets.append(
{
- b["bucket_name"]: {
- "region": b["region"],
- "access_key_id": aws_access_key,
- "secret_access_key": aws_secret_key,
- },
+ "name": b["bucket_name"],
+ "region": b["region"],
+ "access_key_id": aws_access_key,
+ "secret_access_key": aws_secret_key,
}
)
- for b in frappe.get_all("Backup Bucket", ["bucket_name", "cluster", "region"])
- ]
-
- for bucket_name, current_bucket in buckets.items():
- available_files[bucket_name] = []
- s3 = resource(
- "s3",
- aws_access_key_id=current_bucket["access_key_id"],
- aws_secret_access_key=current_bucket["secret_access_key"],
- region_name=current_bucket["region"],
+ for bucket in buckets:
+ frappe.enqueue(
+ "press.press.doctype.remote_file.remote_file.poll_file_statuses_from_bucket",
+ bucket=bucket,
+ job_id=f"poll_file_statuses:{bucket['name']}",
+ queue="long",
+ deduplicate=True,
+ enqueue_after_commit=True,
)
- for s3_object in s3.Bucket(bucket_name).objects.all():
- available_files[bucket_name].append(s3_object.key)
- all_files = set(available_files[bucket_name])
+def poll_file_statuses_from_bucket(bucket):
+ from press.utils import chunk
- remote_files = frappe.get_all(
- doctype,
- fields=["name", "file_path", "status"],
- filters={"bucket": bucket_name},
- )
+ s3 = resource(
+ "s3",
+ aws_access_key_id=bucket["access_key_id"],
+ aws_secret_access_key=bucket["secret_access_key"],
+ region_name=bucket["region"],
+ )
- set_to_available = []
- set_to_unavailable = []
- for remote_file in remote_files:
- name, file_path, status = (
- remote_file["name"],
- remote_file["file_path"],
- remote_file["status"],
- )
- if file_path not in all_files:
- if status == "Available":
- set_to_unavailable.append(name)
- else:
- if status == "Unavailable":
- set_to_available.append(name)
+ available_files = set()
+ for s3_object in s3.Bucket(bucket["name"]).objects.all():
+ available_files.add(s3_object.key)
- for files in chunk(set_to_unavailable, 1000):
- frappe.db.set_value(doctype, {"name": ("in", files)}, "status", "Unavailable")
+ doctype = "Remote File"
+ remote_files = frappe.get_all(
+ doctype,
+ fields=["name", "file_path", "status"],
+ filters={"bucket": bucket["name"]},
+ )
- for files in chunk(set_to_available, 1000):
- frappe.db.set_value(doctype, {"name": ("in", files)}, "status", "Available")
+ set_to_available = []
+ set_to_unavailable = []
+ for remote_file in remote_files:
+ name, file_path, status = (
+ remote_file["name"],
+ remote_file["file_path"],
+ remote_file["status"],
+ )
+ if file_path not in available_files:
+ if status == "Available":
+ set_to_unavailable.append(name)
+ else:
+ if status == "Unavailable":
+ set_to_available.append(name)
- frappe.db.commit()
+ for files in chunk(set_to_unavailable, 1000):
+ frappe.db.set_value(doctype, {"name": ("in", files)}, "status", "Unavailable")
+
+ for files in chunk(set_to_available, 1000):
+ frappe.db.set_value(doctype, {"name": ("in", files)}, "status", "Available")
+
+ # Delete s3 files that are not tracked with Remote Files
+ remote_file_paths = set(file["file_path"] for file in remote_files)
+ file_only_on_s3 = available_files - remote_file_paths
+ delete_s3_files({bucket["name"]: list(file_only_on_s3)})
+ frappe.db.commit()
def delete_remote_backup_objects(remote_files):
"""Delete specified objects identified by keys in the backups bucket."""
- from boto3 import resource
- from press.utils import chunk
-
- press_settings = frappe.get_single("Press Settings")
remote_files = list(set([x for x in remote_files if x]))
if not remote_files:
- return
+ return None
buckets = {bucket: [] for bucket in frappe.get_all("Backup Bucket", pluck="name")}
buckets.update({frappe.db.get_single_value("Press Settings", "aws_s3_bucket"): []})
@@ -135,49 +137,44 @@ def delete_remote_backup_objects(remote_files):
)
]
- for bucket_name in buckets.keys():
- s3 = resource(
- "s3",
- aws_access_key_id=press_settings.offsite_backups_access_key_id,
- aws_secret_access_key=press_settings.get_password(
- "offsite_backups_secret_access_key", raise_exception=False
- ),
- endpoint_url=frappe.db.get_value("Backup Bucket", bucket_name, "endpoint_url")
- or "https://s3.amazonaws.com",
- )
- bucket = s3.Bucket(bucket_name)
- for objects in chunk([{"Key": x} for x in buckets[bucket_name]], 1000):
- response = bucket.delete_objects(Delete={"Objects": objects})
- response = pprint.pformat(response)
- frappe.get_doc(
- doctype="Remote Operation Log", operation_type="Delete Files", response=response
- ).insert()
-
- frappe.db.set_value(
- "Remote File", {"name": ("in", remote_files)}, "status", "Unavailable"
- )
+ delete_s3_files(buckets)
+ frappe.db.set_value("Remote File", {"name": ("in", remote_files)}, "status", "Unavailable")
return remote_files
class RemoteFile(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ bucket: DF.Data | None
+ file_name: DF.Data | None
+ file_path: DF.Text | None
+ file_size: DF.Data | None
+ file_type: DF.Data | None
+ site: DF.Link | None
+ status: DF.Literal["Available", "Unavailable"]
+ url: DF.Code | None
+ # end: auto-generated types
+
@property
def s3_client(self):
if not self.bucket:
return None
- elif self.bucket == frappe.db.get_single_value(
- "Press Settings", "remote_uploads_bucket"
- ):
+ if self.bucket == frappe.db.get_single_value("Press Settings", "remote_uploads_bucket"):
access_key_id = frappe.db.get_single_value("Press Settings", "remote_access_key_id")
secret_access_key = get_decrypted_password(
"Press Settings", "Press Settings", "remote_secret_access_key"
)
elif self.bucket:
- access_key_id = frappe.db.get_single_value(
- "Press Settings", "offsite_backups_access_key_id"
- )
+ access_key_id = frappe.db.get_single_value("Press Settings", "offsite_backups_access_key_id")
secret_access_key = get_decrypted_password(
"Press Settings", "Press Settings", "offsite_backups_secret_access_key"
)
@@ -207,18 +204,17 @@ def exists(self):
return True
self.db_set("status", "Unavailable")
return False
- else:
- try:
- return self.s3_client.head_object(Bucket=self.bucket, Key=self.file_path)
- except Exception:
- self.db_set("status", "Unavailable")
- return False
+ try:
+ return self.s3_client.head_object(Bucket=self.bucket, Key=self.file_path)
+ except Exception:
+ self.db_set("status", "Unavailable")
+ return False
@frappe.whitelist()
def delete_remote_object(self):
self.db_set("status", "Unavailable")
return self.s3_client.delete_object(
- Bucket=frappe.db.get_single_value("Press Settings", "remote_uploads_bucket"),
+ Bucket=self.bucket or frappe.db.get_single_value("Press Settings", "remote_uploads_bucket"),
Key=self.file_path,
)
@@ -236,6 +232,47 @@ def get_download_link(self):
def get_content(self):
if self.url:
return json.loads(requests.get(self.url).content)
- else:
- obj = self.s3_client.get_object(Bucket=self.bucket, Key=self.file_path)
- return json.loads(obj["Body"].read().decode("utf-8"))
+
+ obj = self.s3_client.get_object(Bucket=self.bucket, Key=self.file_path)
+ return json.loads(obj["Body"].read().decode("utf-8"))
+
+ @property
+ def size(self) -> int:
+ """
+ Get the size of file in bytes
+
+ Sets the file_size field if not already set
+ """
+ if int(self.file_size or 0):
+ return int(self.file_size or 0)
+
+ response = requests.head(self.url)
+ self.file_size = int(response.headers.get("content-length", 0))
+ self.save()
+ return int(self.file_size)
+
+
+def delete_s3_files(buckets):
+ """Delete specified files from s3 buckets"""
+ from boto3 import resource
+
+ from press.utils import chunk
+
+ press_settings = frappe.get_single("Press Settings")
+ for bucket_name in buckets:
+ s3 = resource(
+ "s3",
+ aws_access_key_id=press_settings.offsite_backups_access_key_id,
+ aws_secret_access_key=press_settings.get_password(
+ "offsite_backups_secret_access_key", raise_exception=False
+ ),
+ endpoint_url=frappe.db.get_value("Backup Bucket", bucket_name, "endpoint_url")
+ or "https://s3.amazonaws.com",
+ )
+ bucket = s3.Bucket(bucket_name)
+ for objects in chunk([{"Key": x} for x in buckets[bucket_name]], 1000):
+ response = bucket.delete_objects(Delete={"Objects": objects})
+ response = pprint.pformat(response)
+ frappe.get_doc(
+ doctype="Remote Operation Log", operation_type="Delete Files", response=response
+ ).insert()
diff --git a/press/press/doctype/remote_file/test_remote_file.py b/press/press/doctype/remote_file/test_remote_file.py
index abcb731d85c..c1bb1aed088 100644
--- a/press/press/doctype/remote_file/test_remote_file.py
+++ b/press/press/doctype/remote_file/test_remote_file.py
@@ -1,26 +1,32 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
+from __future__ import annotations
-import unittest
-from datetime import datetime
+from typing import TYPE_CHECKING
import frappe
+from frappe.tests.utils import FrappeTestCase
+
+if TYPE_CHECKING:
+ from datetime import datetime
def create_test_remote_file(
- site: str, creation: datetime = None, file_path: str = None
+ site: str | None = None,
+ creation: datetime | None = None,
+ file_path: str | None = None,
+ file_size: int = 1024,
):
"""Create test remote file doc for required timestamp."""
- if not creation:
- creation = datetime.now()
+ creation = creation or frappe.utils.now_datetime()
remote_file = frappe.get_doc(
{
"doctype": "Remote File",
"status": "Available",
"site": site,
"file_path": file_path,
+ "file_size": file_size,
}
).insert(ignore_if_duplicate=True)
remote_file.db_set("creation", creation)
@@ -28,5 +34,5 @@ def create_test_remote_file(
return remote_file
-class TestRemoteFile(unittest.TestCase):
+class TestRemoteFile(FrappeTestCase):
pass
diff --git a/press/press/doctype/remote_operation_log/remote_operation_log.py b/press/press/doctype/remote_operation_log/remote_operation_log.py
index 5e78a45f513..8ec31208650 100644
--- a/press/press/doctype/remote_operation_log/remote_operation_log.py
+++ b/press/press/doctype/remote_operation_log/remote_operation_log.py
@@ -8,4 +8,16 @@
class RemoteOperationLog(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ operation_type: DF.Literal["Delete Files"]
+ response: DF.Code
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/remote_operation_log/test_remote_operation_log.py b/press/press/doctype/remote_operation_log/test_remote_operation_log.py
index 551d230db23..2fdfa758727 100644
--- a/press/press/doctype/remote_operation_log/test_remote_operation_log.py
+++ b/press/press/doctype/remote_operation_log/test_remote_operation_log.py
@@ -1,11 +1,10 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestRemoteOperationLog(unittest.TestCase):
+class TestRemoteOperationLog(FrappeTestCase):
pass
diff --git a/press/press/doctype/required_apps/__init__.py b/press/press/doctype/required_apps/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/required_apps/required_apps.json b/press/press/doctype/required_apps/required_apps.json
new file mode 100644
index 00000000000..074933ffe4a
--- /dev/null
+++ b/press/press/doctype/required_apps/required_apps.json
@@ -0,0 +1,35 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-03-28 11:42:58.586288",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "repository_url"
+ ],
+ "fields": [
+ {
+ "fieldname": "repository_url",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Repository URL ",
+ "read_only": 1,
+ "reqd": 1
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-03-28 11:43:50.259197",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Required Apps",
+ "owner": "Administrator",
+ "permissions": [],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/required_apps/required_apps.py b/press/press/doctype/required_apps/required_apps.py
new file mode 100644
index 00000000000..c81fdd9ae1a
--- /dev/null
+++ b/press/press/doctype/required_apps/required_apps.py
@@ -0,0 +1,23 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class RequiredApps(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ repository_url: DF.Data
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/resource_tag/resource_tag.json b/press/press/doctype/resource_tag/resource_tag.json
index 8c045a07046..6537a5b6a6e 100644
--- a/press/press/doctype/resource_tag/resource_tag.json
+++ b/press/press/doctype/resource_tag/resource_tag.json
@@ -25,18 +25,21 @@
"in_global_search": 1,
"in_list_view": 1,
"in_preview": 1,
- "label": "Tag Name"
+ "label": "Tag Name",
+ "search_index": 1
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
- "modified": "2023-07-02 00:37:51.667939",
+ "modified": "2025-11-19 14:03:45.601837",
"modified_by": "Administrator",
"module": "Press",
"name": "Resource Tag",
"owner": "Administrator",
"permissions": [],
+ "row_format": "Dynamic",
+ "rows_threshold_for_grid_search": 20,
"sort_field": "modified",
"sort_order": "DESC",
"states": []
diff --git a/press/press/doctype/resource_tag/resource_tag.py b/press/press/doctype/resource_tag/resource_tag.py
index a980c6668fa..5c114a3a54f 100644
--- a/press/press/doctype/resource_tag/resource_tag.py
+++ b/press/press/doctype/resource_tag/resource_tag.py
@@ -2,8 +2,25 @@
# For license information, please see license.txt
# import frappe
+import typing
+
from frappe.model.document import Document
class ResourceTag(Document):
- pass
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ tag: DF.Link | None
+ tag_name: DF.Data | None
+ # end: auto-generated types
+
+ dashboard_fields: typing.ClassVar = ["tag", "tag_name"]
diff --git a/press/press/doctype/resource_tag/tag_helpers.py b/press/press/doctype/resource_tag/tag_helpers.py
new file mode 100644
index 00000000000..0dcb631dc1a
--- /dev/null
+++ b/press/press/doctype/resource_tag/tag_helpers.py
@@ -0,0 +1,36 @@
+# Copyright (c) 2024, Frappe Technologies Pvt. Ltd. and Contributors
+
+from __future__ import unicode_literals
+
+import frappe
+
+from press.api.client import dashboard_whitelist
+
+
+class TagHelpers:
+ @dashboard_whitelist()
+ def add_resource_tag(self, tag):
+ team = frappe.local.team().name
+ existing_tags = [row.tag_name for row in self.tags]
+ if tag in existing_tags:
+ return
+
+ if not frappe.db.exists(
+ "Press Tag", {"tag": tag, "doctype_name": self.doctype, "team": team}
+ ):
+ tag_doc = frappe.new_doc(
+ "Press Tag", tag=tag, doctype_name=self.doctype, team=team
+ ).insert()
+ else:
+ tag_doc = frappe.get_doc(
+ "Press Tag",
+ {"tag": tag, "doctype_name": self.doctype, "team": team},
+ )
+
+ self.append("tags", {"tag": tag_doc.name})
+ self.save()
+
+ @dashboard_whitelist()
+ def remove_resource_tag(self, tag):
+ self.tags = [row for row in self.tags if row.tag_name != tag]
+ self.save()
diff --git a/press/press/doctype/root_domain/root_domain.js b/press/press/doctype/root_domain/root_domain.js
index fdfbb366b06..2a6e31f3ae9 100644
--- a/press/press/doctype/root_domain/root_domain.js
+++ b/press/press/doctype/root_domain/root_domain.js
@@ -2,6 +2,27 @@
// For license information, please see license.txt
frappe.ui.form.on('Root Domain', {
- // refresh: function(frm) {
- // }
+ refresh: function (frm) {
+ frm.trigger('set_mandatory_fields');
+ frm.add_custom_button('Add to proxies', () => {
+ frm.call('add_to_proxies').then((r) => frm.refresh());
+ });
+ },
+
+ dns_provider: function (frm) {
+ frm.trigger('set_mandatory_fields');
+ },
+
+ set_mandatory_fields: function (frm) {
+ frm.set_df_property(
+ 'aws_access_key_id',
+ 'reqd',
+ frm.doc.dns_provider === 'AWS Route 53',
+ );
+ frm.set_df_property(
+ 'aws_secret_access_key',
+ 'reqd',
+ frm.doc.dns_provider === 'AWS Route 53',
+ );
+ },
});
diff --git a/press/press/doctype/root_domain/root_domain.json b/press/press/doctype/root_domain/root_domain.json
index bf820aaaf32..f6cbee9296b 100644
--- a/press/press/doctype/root_domain/root_domain.json
+++ b/press/press/doctype/root_domain/root_domain.json
@@ -7,10 +7,14 @@
"engine": "InnoDB",
"field_order": [
"default_cluster",
+ "default_proxy_server",
+ "team",
+ "enabled",
"column_break_4",
"dns_provider",
"aws_access_key_id",
- "aws_secret_access_key"
+ "aws_secret_access_key",
+ "aws_region"
],
"fields": [
{
@@ -20,7 +24,7 @@
"in_list_view": 1,
"in_standard_filter": 1,
"label": "DNS Provider",
- "options": "AWS Route 53",
+ "options": "AWS Route 53\nGeneric",
"reqd": 1
},
{
@@ -28,16 +32,16 @@
"fieldtype": "Column Break"
},
{
+ "depends_on": "eval:doc.dns_provider=='AWS Route 53'",
"fieldname": "aws_access_key_id",
"fieldtype": "Data",
- "label": "AWS Access Key ID",
- "reqd": 1
+ "label": "AWS Access Key ID"
},
{
+ "depends_on": "eval:doc.dns_provider=='AWS Route 53'",
"fieldname": "aws_secret_access_key",
"fieldtype": "Password",
- "label": "AWS Secret Access Key",
- "reqd": 1
+ "label": "AWS Secret Access Key"
},
{
"fieldname": "default_cluster",
@@ -45,6 +49,30 @@
"label": "Default Cluster",
"options": "Cluster",
"reqd": 1
+ },
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "label": "Team",
+ "options": "Team"
+ },
+ {
+ "fieldname": "default_proxy_server",
+ "fieldtype": "Link",
+ "label": "Default Proxy Server",
+ "options": "Proxy Server"
+ },
+ {
+ "default": "1",
+ "fieldname": "enabled",
+ "fieldtype": "Check",
+ "label": "Enabled"
+ },
+ {
+ "description": "Sets AWS_DEFAULT_REGION environment variable",
+ "fieldname": "aws_region",
+ "fieldtype": "Data",
+ "label": "AWS Region"
}
],
"index_web_pages_for_search": 1,
@@ -80,10 +108,11 @@
"link_fieldname": "domain"
}
],
- "modified": "2021-03-25 11:42:01.377832",
+ "modified": "2025-12-09 14:04:47.403558",
"modified_by": "Administrator",
"module": "Press",
"name": "Root Domain",
+ "naming_rule": "Set by user",
"owner": "Administrator",
"permissions": [
{
@@ -99,7 +128,10 @@
"write": 1
}
],
+ "row_format": "Dynamic",
+ "rows_threshold_for_grid_search": 20,
"sort_field": "modified",
"sort_order": "DESC",
+ "states": [],
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/root_domain/root_domain.py b/press/press/doctype/root_domain/root_domain.py
index c67c3ce05d2..eb7a812516a 100644
--- a/press/press/doctype/root_domain/root_domain.py
+++ b/press/press/doctype/root_domain/root_domain.py
@@ -1,23 +1,48 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
-
-from datetime import datetime, timedelta
import json
-from typing import Iterable, List
+from datetime import datetime, timedelta
+from typing import TYPE_CHECKING
import boto3
import frappe
-from frappe.model.document import Document
from frappe.core.utils import find
+from frappe.model.document import Document
+from frappe.utils.caching import redis_cache
from press.utils import log_error
+if TYPE_CHECKING:
+ from collections.abc import Iterable
+
+ from press.press.doctype.proxy_server.proxy_server import ProxyServer
+
class RootDomain(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ aws_access_key_id: DF.Data | None
+ aws_region: DF.Data | None
+ aws_secret_access_key: DF.Password | None
+ default_cluster: DF.Link
+ default_proxy_server: DF.Link | None
+ dns_provider: DF.Literal["AWS Route 53", "Generic"]
+ enabled: DF.Check
+ team: DF.Link | None
+ # end: auto-generated types
+
def after_insert(self):
- if not frappe.db.exists("TLS Certificate", {"wildcard": True, "domain": self.name}):
+ if self.dns_provider != "Generic" and not frappe.db.exists(
+ "TLS Certificate", {"wildcard": True, "domain": self.name}
+ ):
frappe.enqueue_doc(
self.doctype,
self.name,
@@ -27,20 +52,26 @@ def after_insert(self):
def obtain_root_domain_tls_certificate(self):
try:
- rsa_key_size = frappe.db.get_value(
- "Press Settings", "Press Settings", "rsa_key_size"
- )
+ rsa_key_size = frappe.db.get_value("Press Settings", "Press Settings", "rsa_key_size")
frappe.get_doc(
{
"doctype": "TLS Certificate",
"wildcard": True,
"domain": self.name,
"rsa_key_size": rsa_key_size,
+ "provider": "Let's Encrypt",
}
).insert()
except Exception:
log_error("Root Domain TLS Certificate Exception")
+ @property
+ def generic_dns_provider(self):
+ if not hasattr(self, "_generic_dns_provider"):
+ self._generic_dns_provider = self.dns_provider == "Generic"
+
+ return self._generic_dns_provider
+
@property
def boto3_client(self):
if not hasattr(self, "_boto3_client"):
@@ -48,6 +79,7 @@ def boto3_client(self):
"route53",
aws_access_key_id=self.aws_access_key_id,
aws_secret_access_key=self.get_password("aws_secret_access_key"),
+ region_name=self.aws_region,
)
return self._boto3_client
@@ -60,34 +92,25 @@ def get_dns_record_pages(self) -> Iterable:
try:
paginator = self.boto3_client.get_paginator("list_resource_record_sets")
return paginator.paginate(
- PaginationConfig={"MaxItems": 5000, "PageSize": 300, "StartingToken": "0"},
+ PaginationConfig={"MaxItems": 10000, "PageSize": 300, "StartingToken": "0"},
HostedZoneId=self.hosted_zone.split("/")[-1],
)
except Exception:
log_error("Route 53 Pagination Error", domain=self.name)
+ return []
- def delete_dns_records(self, records: List[str], proxy: str):
+ def delete_dns_records(self, records: list[str]):
try:
changes = []
for record in records:
- changes.append(
- {
- "Action": "DELETE",
- "ResourceRecordSet": {
- "Name": record["name"],
- "Type": "CNAME",
- "TTL": record["ttl"],
- "ResourceRecords": [{"Value": proxy}],
- },
- }
- )
+ changes.append({"Action": "DELETE", "ResourceRecordSet": record})
self.boto3_client.change_resource_record_sets(
ChangeBatch={"Changes": changes}, HostedZoneId=self.hosted_zone
)
except Exception:
- log_error("Route 53 Record Deletion Error", domain=self.name, proxy=proxy)
+ log_error("Route 53 Record Deletion Error", domain=self.name)
def get_sites_being_renamed(self):
# get sites renamed in Server but doc not renamed in press
@@ -99,33 +122,98 @@ def get_sites_being_renamed(self):
)
return [json.loads(d_str)["new_name"] for d_str in renaming_sites]
+ def get_active_site_domains(self):
+ return frappe.get_all(
+ "Site Domain", {"domain": ("like", f"%{self.name}"), "status": "Active"}, pluck="name"
+ )
+
+ def get_active_sites(self):
+ return frappe.get_all("Site", {"status": ("!=", "Archived"), "domain": self.name}, pluck="name")
+
def get_active_domains(self):
- active_sites = frappe.get_all(
- "Site", {"status": ("!=", "Archived"), "domain": self.name}, pluck="name"
+ active_domains = self.get_active_sites()
+ active_domains.extend(self.get_sites_being_renamed())
+ active_domains.extend(self.get_active_site_domains())
+ return set(active_domains)
+
+ def get_default_cluster_proxies(self):
+ return frappe.get_all(
+ "Proxy Server", {"status": "Active", "cluster": self.default_cluster}, pluck="name"
)
- active_sites.extend(self.get_sites_being_renamed())
- return active_sites
- def remove_unused_cname_records(self, proxy: str):
+ def remove_unused_cname_records(self):
+ proxies = frappe.get_all("Proxy Server", {"status": "Active"}, pluck="name")
+
+ default_proxies = self.get_default_cluster_proxies()
+
for page in self.get_dns_record_pages():
to_delete = []
frappe.db.commit()
- active = self.get_active_domains()
+ active_domains = self.get_active_domains()
for record in page["ResourceRecordSets"]:
- if record["Type"] == "CNAME" and record["ResourceRecords"][0]["Value"] == proxy:
+ # Only look at CNAME records that point to a proxy server
+ value = record["ResourceRecords"][0]["Value"]
+ if record["Type"] == "CNAME" and value in proxies:
domain = record["Name"].strip(".")
- if domain not in active:
- to_delete.append({"name": domain, "ttl": record["TTL"]})
+ # Delete inactive records
+ if domain not in active_domains: # noqa: SIM114
+ record["Name"] = domain
+ to_delete.append(record)
+ # Delete records that point to a proxy in the default_cluster
+ # These are covered by * records
+ elif value in default_proxies:
+ record["Name"] = domain
+ to_delete.append(record)
if to_delete:
- self.delete_dns_records(to_delete, proxy)
+ self.delete_dns_records(to_delete)
+
+ def update_dns_records_for_sites(self, sites: list[str], proxy_server: str):
+ if self.generic_dns_provider:
+ return
+
+ # update records in batches of 500
+ batch_size = 500
+ for i in range(0, len(sites), batch_size):
+ changes = []
+ for site in sites[i : i + batch_size]:
+ changes.append(
+ {
+ "Action": "UPSERT",
+ "ResourceRecordSet": {
+ "Name": site,
+ "Type": "CNAME",
+ "TTL": 600,
+ "ResourceRecords": [{"Value": proxy_server}],
+ },
+ }
+ )
+
+ self.boto3_client.change_resource_record_sets(
+ ChangeBatch={"Changes": changes}, HostedZoneId=self.hosted_zone
+ )
+
+ @frappe.whitelist()
+ def add_to_proxies(self):
+ proxies = frappe.get_all("Proxy Server", {"status": "Active"}, pluck="name")
+ for proxy_name in proxies:
+ proxy: ProxyServer = frappe.get_doc("Proxy Server", proxy_name)
+ proxy.append("domains", {"domain": self.name})
+ proxy.save()
+ proxy.setup_wildcard_hosts()
def cleanup_cname_records():
domains = frappe.get_all("Root Domain", pluck="name")
- proxies = frappe.get_all("Proxy Server", pluck="name")
- for proxy in proxies:
- for domain_name in domains:
- domain = frappe.get_doc("Root Domain", domain_name)
- domain.remove_unused_cname_records(proxy)
+ for domain_name in domains:
+ domain = RootDomain("Root Domain", domain_name)
+ if domain.generic_dns_provider:
+ continue
+
+ domain.remove_unused_cname_records()
+
+
+@redis_cache(ttl=3600)
+def get_domains():
+ return frappe.get_all("Root Domain", filters={"enabled": ["=", "1"]}, pluck="name")
diff --git a/press/press/doctype/root_domain/test_root_domain.py b/press/press/doctype/root_domain/test_root_domain.py
index b2d20526365..9043305894e 100644
--- a/press/press/doctype/root_domain/test_root_domain.py
+++ b/press/press/doctype/root_domain/test_root_domain.py
@@ -1,22 +1,15 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2021, Frappe and Contributors
# See license.txt
-from datetime import datetime, timedelta
-
import json
-import unittest
+from datetime import datetime, timedelta
from unittest.mock import Mock, patch
import frappe
+from frappe.tests.utils import FrappeTestCase
from press.press.doctype.agent_job.agent_job import AgentJob
-from press.press.doctype.database_server.test_database_server import (
- create_test_database_server,
-)
-from press.press.doctype.proxy_server.test_proxy_server import create_test_proxy_server
from press.press.doctype.root_domain.root_domain import RootDomain
-from press.press.doctype.server.test_server import create_test_server
@patch.object(RootDomain, "after_insert", new=Mock())
@@ -38,16 +31,21 @@ def create_test_root_domain(
@patch.object(AgentJob, "after_insert", new=Mock())
-class TestRootDomain(unittest.TestCase):
+class TestRootDomain(FrappeTestCase):
def tearDown(self):
frappe.db.rollback()
def _create_fake_rename_job(self, site_name: str, creation=None):
- if not creation:
- creation = datetime.now()
- server = create_test_server(
- create_test_proxy_server().name, create_test_database_server().name
+ from press.press.doctype.database_server.test_database_server import (
+ create_test_database_server,
)
+ from press.press.doctype.proxy_server.test_proxy_server import (
+ create_test_proxy_server,
+ )
+ from press.press.doctype.server.test_server import create_test_server
+
+ creation = creation or frappe.utils.now_datetime()
+ server = create_test_server(create_test_proxy_server().name, create_test_database_server().name)
job = frappe.get_doc(
{
diff --git a/press/press/doctype/scale_step/__init__.py b/press/press/doctype/scale_step/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/scale_step/scale_step.json b/press/press/doctype/scale_step/scale_step.json
new file mode 100644
index 00000000000..d7d525c1c3d
--- /dev/null
+++ b/press/press/doctype/scale_step/scale_step.json
@@ -0,0 +1,82 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-11-16 15:08:56.885130",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "step_name",
+ "method_name",
+ "status",
+ "job_type",
+ "job",
+ "attempt",
+ "is_waiting",
+ "output"
+ ],
+ "fields": [
+ {
+ "fieldname": "step_name",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Name"
+ },
+ {
+ "fieldname": "method_name",
+ "fieldtype": "Data",
+ "label": "Method"
+ },
+ {
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Status",
+ "options": "Pending\nRunning\nSuccess\nFailure"
+ },
+ {
+ "fieldname": "job_type",
+ "fieldtype": "Link",
+ "label": "Job Type",
+ "options": "DocType"
+ },
+ {
+ "fieldname": "job",
+ "fieldtype": "Dynamic Link",
+ "in_list_view": 1,
+ "label": "Job",
+ "options": "job_type"
+ },
+ {
+ "fieldname": "attempt",
+ "fieldtype": "Int",
+ "label": "Attempt"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_waiting",
+ "fieldtype": "Check",
+ "label": "Is Waiting"
+ },
+ {
+ "fieldname": "output",
+ "fieldtype": "Long Text",
+ "label": "Output"
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-11-16 15:11:38.325437",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Scale Step",
+ "owner": "Administrator",
+ "permissions": [],
+ "row_format": "Dynamic",
+ "rows_threshold_for_grid_search": 20,
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/scale_step/scale_step.py b/press/press/doctype/scale_step/scale_step.py
new file mode 100644
index 00000000000..d80afb3056f
--- /dev/null
+++ b/press/press/doctype/scale_step/scale_step.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class ScaleStep(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ attempt: DF.Int
+ is_waiting: DF.Check
+ job: DF.DynamicLink | None
+ job_type: DF.Link | None
+ method_name: DF.Data | None
+ output: DF.LongText | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ status: DF.Literal["Pending", "Running", "Success", "Failure"]
+ step_name: DF.Data | None
+ # end: auto-generated types
+ pass
diff --git a/press/press/doctype/scheduled_auto_update_log/scheduled_auto_update_log.py b/press/press/doctype/scheduled_auto_update_log/scheduled_auto_update_log.py
index 8d97ca5ccd0..76b2ef87294 100644
--- a/press/press/doctype/scheduled_auto_update_log/scheduled_auto_update_log.py
+++ b/press/press/doctype/scheduled_auto_update_log/scheduled_auto_update_log.py
@@ -6,4 +6,23 @@
class ScheduledAutoUpdateLog(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ document_name: DF.DynamicLink | None
+ document_type: DF.Link | None
+ error: DF.Text | None
+ status: DF.Literal["Failed", "Success"]
+ was_scheduled_for_day: DF.Data | None
+ was_scheduled_for_frequency: DF.Data | None
+ was_scheduled_for_month_day: DF.Data | None
+ was_scheduled_for_month_end: DF.Check
+ was_scheduled_for_time: DF.Time | None
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/scheduled_auto_update_log/test_scheduled_auto_update_log.py b/press/press/doctype/scheduled_auto_update_log/test_scheduled_auto_update_log.py
index 1e6eaac2b8e..22ac2bc3c8b 100644
--- a/press/press/doctype/scheduled_auto_update_log/test_scheduled_auto_update_log.py
+++ b/press/press/doctype/scheduled_auto_update_log/test_scheduled_auto_update_log.py
@@ -2,8 +2,8 @@
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestScheduledAutoUpdateLog(unittest.TestCase):
+class TestScheduledAutoUpdateLog(FrappeTestCase):
pass
diff --git a/press/press/doctype/security_update/__init__.py b/press/press/doctype/security_update/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/security_update/security_update.js b/press/press/doctype/security_update/security_update.js
new file mode 100644
index 00000000000..e895c671095
--- /dev/null
+++ b/press/press/doctype/security_update/security_update.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2023, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Security Update", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/security_update/security_update.json b/press/press/doctype/security_update/security_update.json
new file mode 100644
index 00000000000..cd3a8cfb578
--- /dev/null
+++ b/press/press/doctype/security_update/security_update.json
@@ -0,0 +1,142 @@
+{
+ "actions": [],
+ "creation": "2023-08-03 15:58:45.992061",
+ "default_view": "List",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "server_type",
+ "server",
+ "column_break_4pwt",
+ "datetime",
+ "job_status",
+ "section_break_gadl",
+ "package",
+ "version",
+ "column_break_8rko",
+ "priority",
+ "priority_level",
+ "security_update_status",
+ "package_meta_tab",
+ "package_meta",
+ "change_log_tab",
+ "change_log"
+ ],
+ "fields": [
+ {
+ "fieldname": "server_type",
+ "fieldtype": "Select",
+ "label": "Server Type",
+ "options": "Server\nDatabase Server\nProxy Server"
+ },
+ {
+ "fieldname": "server",
+ "fieldtype": "Dynamic Link",
+ "in_list_view": 1,
+ "label": "Server",
+ "options": "server_type"
+ },
+ {
+ "fieldname": "datetime",
+ "fieldtype": "Datetime",
+ "label": "Datetime"
+ },
+ {
+ "fieldname": "column_break_4pwt",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "section_break_gadl",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "package",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Package",
+ "search_index": 1
+ },
+ {
+ "fieldname": "priority",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Priority",
+ "options": "High\nMedium\nLow"
+ },
+ {
+ "fieldname": "column_break_8rko",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "job_status",
+ "fieldtype": "Select",
+ "label": "Job Status",
+ "options": "Pending\nWIP\nCompleted",
+ "read_only": 1
+ },
+ {
+ "fieldname": "version",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Version",
+ "search_index": 1
+ },
+ {
+ "fieldname": "security_update_status",
+ "fieldtype": "Select",
+ "label": "Security Update Status",
+ "options": "Pending\nImplemented"
+ },
+ {
+ "fieldname": "change_log",
+ "fieldtype": "Code",
+ "label": "Change Log"
+ },
+ {
+ "fieldname": "change_log_tab",
+ "fieldtype": "Tab Break",
+ "label": "Change Log"
+ },
+ {
+ "fieldname": "package_meta_tab",
+ "fieldtype": "Tab Break",
+ "label": "Package Meta"
+ },
+ {
+ "fieldname": "package_meta",
+ "fieldtype": "Code",
+ "label": "Package Meta"
+ },
+ {
+ "fieldname": "priority_level",
+ "fieldtype": "Select",
+ "label": "Priority Level",
+ "options": "1\n2\n3"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2023-08-09 14:06:04.009765",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Security Update",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/security_update/security_update.py b/press/press/doctype/security_update/security_update.py
new file mode 100644
index 00000000000..0ec23850842
--- /dev/null
+++ b/press/press/doctype/security_update/security_update.py
@@ -0,0 +1,168 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+import re
+
+import frappe
+from frappe.model.document import Document
+from frappe.utils import now_datetime
+
+from press.runner import Ansible
+from press.utils import log_error
+
+
+class SecurityUpdate(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ change_log: DF.Code | None
+ datetime: DF.Datetime | None
+ job_status: DF.Literal["Pending", "WIP", "Completed"]
+ package: DF.Data | None
+ package_meta: DF.Code | None
+ priority: DF.Literal["High", "Medium", "Low"]
+ priority_level: DF.Literal["1", "2", "3"]
+ security_update_status: DF.Literal["Pending", "Implemented"]
+ server: DF.DynamicLink | None
+ server_type: DF.Literal["Server", "Database Server", "Proxy Server"]
+ version: DF.Data | None
+ # end: auto-generated types
+
+ @staticmethod
+ def fetch_security_updates(server_obj):
+ """Fetch security updates"""
+ try:
+ ansible = Ansible(
+ playbook="security_update.yml",
+ server=server_obj,
+ user=server_obj._ssh_user(),
+ port=server_obj._ssh_port(),
+ )
+ play = ansible.run()
+ if play.status == "Success":
+ package_list = SecurityUpdate._prepare_package_list(ansible.play)
+ SecurityUpdate._fetch_package_meta(package_list, server_obj)
+
+ except Exception:
+ log_error("Fetch security updates exception", server=server_obj.as_dict())
+
+ @staticmethod
+ def _prepare_package_list(play):
+ packages = []
+ filters = {"task": "Fetch packages due for security updates", "play": play}
+ packages_str = frappe.db.get_value("Ansible Task", filters, "output")
+
+ if packages_str:
+ for package_string in packages_str.split("\n"):
+ package_name = package_string.split("/")[0]
+
+ if package_name == "Listing...":
+ continue
+
+ packages.append(package_name)
+
+ return packages
+
+ @staticmethod
+ def _fetch_package_meta(package_list, server_obj):
+ package_list = package_list[:6]
+
+ for package in package_list:
+ try:
+ ansible = Ansible(
+ playbook="security_update.yml",
+ server=server_obj,
+ user=server_obj._ssh_user(),
+ port=server_obj._ssh_port(),
+ variables={"fetch_package_meta": True, "package": package},
+ )
+ play = ansible.run()
+ if play.status == "Success":
+ SecurityUpdate._create_security_update(package, ansible.play, server_obj)
+ except Exception:
+ log_error("Fetch package meta exception", server=server_obj.as_dict())
+
+ @staticmethod
+ def _create_security_update(package, play, server_obj):
+ package_meta = SecurityUpdate.get_package_meta_from_log(play)
+ package_change_log = SecurityUpdate.get_package_change_log(play)
+ version = SecurityUpdate.get_package_version(package_meta)
+ priority, level = SecurityUpdate.get_package_priority_and_level(package_meta)
+
+ if frappe.db.exists(
+ "Security Update",
+ {"package": package, "server": server_obj.name, "version": version},
+ ):
+ return
+
+ try:
+ security_update = frappe.new_doc("Security Update")
+ security_update.update(
+ {
+ "package": package,
+ "server_type": server_obj.doctype,
+ "server": server_obj.name,
+ "package_meta": package_meta,
+ "change_log": package_change_log,
+ "version": version,
+ "priority": priority,
+ "datetime": now_datetime(),
+ "priority_level": level,
+ }
+ )
+ security_update.insert(ignore_permissions=True)
+ frappe.db.commit()
+ except Exception:
+ log_error("Create security update exception", server=server_obj.as_dict())
+
+ @staticmethod
+ def get_package_meta_from_log(play):
+ filters = {"task": "Fetch package meta", "play": play}
+ package_meta_str = frappe.db.get_value("Ansible Task", filters, "output")
+
+ if package_meta_str:
+ return package_meta_str
+
+ return None
+
+ @staticmethod
+ def get_package_change_log(play):
+ filters = {"task": "Fetch package change log", "play": play}
+ package_change_log = frappe.db.get_value("Ansible Task", filters, "output")
+
+ if package_change_log:
+ return package_change_log
+
+ return None
+
+ @staticmethod
+ def get_package_version(package_meta):
+ version = re.search("Version:(.*)", package_meta)
+
+ try:
+ return version.group(1)
+ except Exception:
+ pass
+
+ return None
+
+ @staticmethod
+ def get_package_priority_and_level(package_meta):
+ priority_mapper = {"required": "High", "standard": "Medium", "optional": "Low"}
+ priority_level_mapper = {"High": 1, "Medium": 2, "Low": 3}
+ priority = re.search("Priority:(.*)", package_meta)
+
+ try:
+ priority = priority_mapper.get(priority.group(1).strip(), "Low")
+ priority_level = priority_level_mapper.get(priority, 3)
+
+ return priority, priority_level
+ except Exception:
+ pass
+
+ return "Low", 3
diff --git a/press/press/doctype/security_update/test_security_update.py b/press/press/doctype/security_update/test_security_update.py
new file mode 100644
index 00000000000..9d7291b88a9
--- /dev/null
+++ b/press/press/doctype/security_update/test_security_update.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2023, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestSecurityUpdate(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/security_update_check/security_update_check.py b/press/press/doctype/security_update_check/security_update_check.py
index b0c14c26d96..18b0d4b7abb 100644
--- a/press/press/doctype/security_update_check/security_update_check.py
+++ b/press/press/doctype/security_update_check/security_update_check.py
@@ -1,15 +1,35 @@
# Copyright (c) 2022, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
import frappe
from frappe.model.document import Document
-from press.telegram_utils import Telegram
+from press.press.doctype.telegram_message.telegram_message import TelegramMessage
from press.runner import Ansible
from press.utils import log_error
+if TYPE_CHECKING:
+ from press.press.doctype.server.server import Server
+
class SecurityUpdateCheck(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ play: DF.Link | None
+ server: DF.DynamicLink
+ server_type: DF.Link
+ status: DF.Literal["Pending", "Running", "Success", "Failure"]
+ # end: auto-generated types
+
def after_insert(self):
self.start()
@@ -22,9 +42,13 @@ def start(self):
def _start(self):
try:
+ server: Server = frappe.get_doc(self.server_type, self.server)
ansible = Ansible(
- playbook="security_update_check.yml",
- server=frappe.get_doc(self.server_type, self.server),
+ playbook="security_update.yml",
+ server=server,
+ user=server.ssh_user or "root",
+ port=server.ssh_port or 22,
+ variables={"validate_pending_security_updates": True},
)
self.reload()
self.play = ansible.play
@@ -52,5 +76,4 @@ def fail(self):
[Security Update Check]({domain}{self.get_url()})
"""
- telegram = Telegram()
- telegram.send(message)
+ TelegramMessage.enqueue(message=message)
diff --git a/press/press/doctype/self_hosted_server/self_hosted_server.js b/press/press/doctype/self_hosted_server/self_hosted_server.js
index e26a88d48a9..09269ff82c5 100644
--- a/press/press/doctype/self_hosted_server/self_hosted_server.js
+++ b/press/press/doctype/self_hosted_server/self_hosted_server.js
@@ -9,17 +9,29 @@ frappe.ui.form.on('Self Hosted Server', {
);
[
[__('Ping Server'), 'ping_ansible', false],
- [__('Fetch Ram'), 'fetch_system_ram', false, !frm.doc.ram],
- [__('Setup Nginx'), '_setup_nginx', false],
- [__('Update TLS'), 'update_tls', false],
- [__('Create Proxy Server'), 'create_proxy_server', false],
+ [
+ __('Create Proxy Server'),
+ 'create_proxy_server',
+ false,
+ !frm.doc.proxy_created && frm.doc.dedicated_proxy,
+ ],
[
__('Create Database Server'),
- 'create_db_server',
+ 'create_database_server',
false,
- frm.doc.proxy_created,
+ frm.doc.proxy_created &&
+ frm.doc.different_database_server &&
+ !frm.doc.database_setup,
],
- [__('Create App Server'), 'create_server', false, frm.doc.database_setup],
+ [
+ __('Create App Server'),
+ 'create_application_server',
+ false,
+ frm.doc.database_setup && !frm.doc.server_created,
+ ],
+ [__('Setup Nginx'), '_setup_nginx', false],
+ [__('Create TLS Certificate'), 'create_tls_certs', true],
+ [__('Update TLS'), 'update_tls', false],
[
__('Restore Files from Existing Sites'),
'restore_files',
@@ -44,7 +56,10 @@ frappe.ui.form.on('Self Hosted Server', {
true,
frm.doc.existing_bench_present && frm.doc.release_group,
],
- ,
+ [__('Fetch System Details'), 'fetch_system_specifications', false],
+ [__('Fetch Ram'), 'fetch_system_ram', false, !frm.doc.ram],
+ [__('Fetch Private IP'), 'fetch_private_ip', false, !frm.doc.private_ip],
+ [__('Fetch System Details'), 'fetch_system_specifications', false],
].forEach(([label, method, confirm, condition]) => {
if (typeof condition === 'undefined' || condition) {
frm.add_custom_button(
diff --git a/press/press/doctype/self_hosted_server/self_hosted_server.json b/press/press/doctype/self_hosted_server/self_hosted_server.json
index 75470d748c6..8a3d213bb1d 100644
--- a/press/press/doctype/self_hosted_server/self_hosted_server.json
+++ b/press/press/doctype/self_hosted_server/self_hosted_server.json
@@ -1,5 +1,6 @@
{
"actions": [],
+ "allow_rename": 1,
"creation": "2023-03-07 11:56:33.641999",
"default_view": "List",
"doctype": "DocType",
@@ -7,46 +8,69 @@
"engine": "InnoDB",
"field_order": [
"title",
+ "new_server",
+ "different_database_server",
+ "server_url",
"hostname",
"domain",
- "server_url",
- "ip",
- "proxy_created",
- "database_setup",
- "server_created",
"column_break_3eap",
"status",
"cluster",
"team",
- "plan",
"release_group",
- "private_ip",
- "different_database_server",
- "existing_bench_present",
- "new_server",
- "section_break_0fky",
- "proxy_server",
- "column_break_bcsw",
- "agent_password",
"server_section",
+ "server_created",
"server",
+ "ip",
"ssh_user",
"column_break_smwr",
+ "existing_bench_present",
+ "plan",
+ "private_ip",
"ssh_port",
- "ram",
+ "section_break_0fky",
+ "dedicated_proxy",
+ "proxy_public_ip",
+ "proxy_created",
+ "proxy_server",
+ "column_break_bcsw",
+ "proxy_private_ip",
+ "agent_password",
"database_section",
- "database_server",
+ "is_managed_database",
+ "database_setup",
"mariadb_ip",
- "column_break_qvmo",
"mariadb_root_user",
+ "database_server",
+ "database_service",
+ "column_break_qvmo",
+ "mariadb_private_ip",
"mariadb_root_password",
+ "database_plan",
"existing_bench_tab",
"bench_directory",
"column_break_yb8y",
"frappe_version",
"section_break_33uh",
"apps",
- "sites"
+ "sites",
+ "system_information_tab",
+ "application_server_section",
+ "vendor",
+ "vcpus",
+ "ram",
+ "total_storage",
+ "swap_total",
+ "column_break_ep4j",
+ "instance_type",
+ "architecture",
+ "processor",
+ "distribution",
+ "database_server_section",
+ "db_vcpus",
+ "db_total_storage",
+ "column_break_krqz",
+ "db_ram"
],
"fields": [
{
@@ -54,7 +78,8 @@
"fieldname": "server",
"fieldtype": "Link",
"label": "Server",
- "options": "Server"
+ "options": "Server",
+ "search_index": 1
},
{
"fieldname": "bench_directory",
@@ -82,25 +107,30 @@
"fieldtype": "Column Break"
},
{
+ "collapsible": 1,
+ "collapsible_depends_on": "eval:doc.different_database_server",
"fieldname": "database_section",
"fieldtype": "Section Break",
"label": "Database"
},
{
+ "depends_on": "eval:!doc.is_managed_database",
"fieldname": "mariadb_root_user",
"fieldtype": "Data",
"label": "MariaDB Root User"
},
{
+ "depends_on": "eval:doc.different_database_server && !doc.is_managed_database",
"fieldname": "mariadb_ip",
"fieldtype": "Data",
- "label": "MariaDB IP"
+ "label": "MariaDB Public IP"
},
{
"fieldname": "column_break_qvmo",
"fieldtype": "Column Break"
},
{
+ "depends_on": "eval:!doc.is_managed_database",
"description": "This will be the MariaDB Root password if you're setting up a new server. If you're server already has a MariaDB root password, please put in the same password",
"fieldname": "mariadb_root_password",
"fieldtype": "Password",
@@ -113,8 +143,7 @@
"fieldname": "private_ip",
"fieldtype": "Data",
"in_list_view": 1,
- "label": "Private IP",
- "reqd": 1
+ "label": "Private IP"
},
{
"fetch_from": "server.team",
@@ -203,11 +232,12 @@
"label": "Database Setup"
},
{
- "depends_on": "eval:doc.database_setup",
+ "depends_on": "eval:doc.database_setup && !doc.is_managed_database",
"fieldname": "database_server",
"fieldtype": "Link",
"label": "Database Server",
- "options": "Database Server"
+ "options": "Database Server",
+ "search_index": 1
},
{
"fieldname": "release_group",
@@ -239,6 +269,8 @@
"label": "New Server"
},
{
+ "collapsible": 1,
+ "collapsible_depends_on": "eval:doc.dedicated_proxy",
"fieldname": "section_break_0fky",
"fieldtype": "Section Break",
"label": "Proxy"
@@ -248,7 +280,8 @@
"fieldtype": "Link",
"label": "Proxy Server",
"mandatory_depends_on": "eval:doc.proxy_created",
- "options": "Proxy Server"
+ "options": "Proxy Server",
+ "search_index": 1
},
{
"fieldname": "column_break_bcsw",
@@ -277,14 +310,13 @@
{
"fieldname": "server_url",
"fieldtype": "Data",
- "label": "Server URL",
- "reqd": 1
+ "label": "Server URL"
},
{
"fieldname": "plan",
"fieldtype": "Link",
"label": "Plan",
- "options": "Plan"
+ "options": "Server Plan"
},
{
"default": "0",
@@ -296,10 +328,130 @@
"fieldname": "ram",
"fieldtype": "Data",
"label": "RAM"
+ },
+ {
+ "fieldname": "system_information_tab",
+ "fieldtype": "Tab Break",
+ "label": "System Information"
+ },
+ {
+ "fieldname": "vcpus",
+ "fieldtype": "Data",
+ "label": "vCPUs"
+ },
+ {
+ "fieldname": "total_storage",
+ "fieldtype": "Data",
+ "label": "Total Storage"
+ },
+ {
+ "fieldname": "swap_total",
+ "fieldtype": "Data",
+ "label": "Swap Total"
+ },
+ {
+ "fieldname": "column_break_ep4j",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "instance_type",
+ "fieldtype": "Data",
+ "label": "Instance Type"
+ },
+ {
+ "fieldname": "architecture",
+ "fieldtype": "Data",
+ "label": "Architecture"
+ },
+ {
+ "fieldname": "processor",
+ "fieldtype": "Data",
+ "label": "Processor"
+ },
+ {
+ "fieldname": "distribution",
+ "fieldtype": "Data",
+ "label": "Distribution"
+ },
+ {
+ "fieldname": "vendor",
+ "fieldtype": "Data",
+ "label": "Vendor"
+ },
+ {
+ "fieldname": "proxy_public_ip",
+ "fieldtype": "Data",
+ "label": "Proxy Public IP"
+ },
+ {
+ "fieldname": "proxy_private_ip",
+ "fieldtype": "Data",
+ "label": "Proxy Private IP"
+ },
+ {
+ "depends_on": "eval:doc.different_database_server && !doc.is_managed_database",
+ "fieldname": "mariadb_private_ip",
+ "fieldtype": "Data",
+ "label": "MariaDB Private IP"
+ },
+ {
+ "fieldname": "database_plan",
+ "fieldtype": "Link",
+ "label": "Database Plan",
+ "options": "Server Plan"
+ },
+ {
+ "default": "0",
+ "fieldname": "dedicated_proxy",
+ "fieldtype": "Check",
+ "label": "Dedicated Proxy"
+ },
+ {
+ "fieldname": "application_server_section",
+ "fieldtype": "Section Break",
+ "label": "Application Server"
+ },
+ {
+ "fieldname": "database_server_section",
+ "fieldtype": "Section Break",
+ "label": "Database Server"
+ },
+ {
+ "fieldname": "db_vcpus",
+ "fieldtype": "Data",
+ "label": "vCPUs"
+ },
+ {
+ "fieldname": "db_total_storage",
+ "fieldtype": "Data",
+ "label": "Total Storage"
+ },
+ {
+ "fieldname": "column_break_krqz",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "db_ram",
+ "fieldtype": "Data",
+ "label": "RAM"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_managed_database",
+ "fieldtype": "Check",
+ "label": "Is Managed Database"
+ },
+ {
+ "default": "AWS - RDS",
+ "depends_on": "eval:doc.is_managed_database",
+ "fieldname": "database_service",
+ "fieldtype": "Select",
+ "label": "Database Service",
+ "options": "AWS - RDS"
}
],
"links": [],
- "modified": "2023-06-26 15:28:02.524473",
+ "modified": "2024-05-29 11:41:41.304954",
"modified_by": "Administrator",
"module": "Press",
"name": "Self Hosted Server",
diff --git a/press/press/doctype/self_hosted_server/self_hosted_server.py b/press/press/doctype/self_hosted_server/self_hosted_server.py
index 4627c3db42a..327cc87daa0 100644
--- a/press/press/doctype/self_hosted_server/self_hosted_server.py
+++ b/press/press/doctype/self_hosted_server/self_hosted_server.py
@@ -1,34 +1,131 @@
# Copyright (c) 2023, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
+
import json
import frappe
from frappe.model.document import Document
+from frappe.model.naming import make_autoname
+
from press.runner import Ansible
from press.utils import log_error
-from tldextract import extract as sdext
-import time
+# from tldextract import extract as sdext
class SelfHostedServer(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.self_hosted_site_apps.self_hosted_site_apps import (
+ SelfHostedSiteApps,
+ )
+ from press.press.doctype.site_analytics_app.site_analytics_app import SiteAnalyticsApp
+
+ agent_password: DF.Password | None
+ apps: DF.Table[SiteAnalyticsApp]
+ architecture: DF.Data | None
+ bench_directory: DF.Data | None
+ cluster: DF.Link | None
+ database_plan: DF.Link | None
+ database_server: DF.Link | None
+ database_service: DF.Literal["AWS - RDS"]
+ database_setup: DF.Check
+ db_ram: DF.Data | None
+ db_total_storage: DF.Data | None
+ db_vcpus: DF.Data | None
+ dedicated_proxy: DF.Check
+ different_database_server: DF.Check
+ distribution: DF.Data | None
+ domain: DF.Data | None
+ existing_bench_present: DF.Check
+ frappe_version: DF.Data | None
+ hostname: DF.Data | None
+ instance_type: DF.Data | None
+ ip: DF.Data
+ is_managed_database: DF.Check
+ mariadb_ip: DF.Data | None
+ mariadb_private_ip: DF.Data | None
+ mariadb_root_password: DF.Password
+ mariadb_root_user: DF.Data | None
+ new_server: DF.Check
+ plan: DF.Link | None
+ private_ip: DF.Data | None
+ processor: DF.Data | None
+ proxy_created: DF.Check
+ proxy_private_ip: DF.Data | None
+ proxy_public_ip: DF.Data | None
+ proxy_server: DF.Link | None
+ ram: DF.Data | None
+ release_group: DF.Link | None
+ server: DF.Link | None
+ server_created: DF.Check
+ server_url: DF.Data | None
+ sites: DF.Table[SelfHostedSiteApps]
+ ssh_port: DF.Int
+ ssh_user: DF.Data | None
+ status: DF.Literal["Active", "Pending", "Broken", "Archived", "Unreachable"]
+ swap_total: DF.Data | None
+ team: DF.Link
+ title: DF.Data | None
+ total_storage: DF.Data | None
+ vcpus: DF.Data | None
+ vendor: DF.Data | None
+ # end: auto-generated types
+
+ def before_insert(self):
+ self.validate_is_duplicate()
+
def autoname(self):
- self.name = sdext(self.server_url).fqdn
+ series = make_autoname("SHS-.#####")
+ self.name = f"{series}.{self.hybrid_domain}"
+
+ self.hostname = series
+ self.domain = self.hybrid_domain
def validate(self):
+ self.set_proxy_details()
+ self.set_mariadb_config()
+ self.set_database_plan()
+
+ if not self.agent_password:
+ self.agent_password = frappe.generate_hash(length=32)
+
+ def validate_is_duplicate(self):
+ filters = {
+ "ip": self.ip,
+ "private_ip": self.private_ip,
+ "mariadb_ip": self.mariadb_ip,
+ "mariadb_private_ip": self.mariadb_private_ip,
+ "status": ("not in", ["Archived", "Broken"]),
+ }
+ duplicate_server = frappe.db.get_value("Self Hosted Server", filters, pluck="name")
+
+ if duplicate_server:
+ raise frappe.DuplicateEntryError(self.doctype, duplicate_server)
+
+ def set_proxy_details(self):
+ if self.proxy_created or self.proxy_server:
+ self.proxy_public_ip, self.proxy_private_ip = frappe.db.get_value(
+ "Proxy Server", self.proxy_server, ["ip", "private_ip"]
+ )
+
+ def set_mariadb_config(self):
if not self.mariadb_ip:
- self.mariadb_ip = self.private_ip
+ self.mariadb_ip = self.ip
+ if not self.mariadb_private_ip:
+ self.mariadb_private_ip = self.private_ip
if not self.mariadb_root_user:
self.mariadb_root_user = "root"
if not self.mariadb_root_password:
self.mariadb_root_password = frappe.generate_hash(length=32)
- if not self.agent_password:
- self.agent_password = frappe.generate_hash(length=32)
- if not self.hostname or not self.domain:
- extracted_url = sdext(self.server_url)
- self.hostname = extracted_url.subdomain
- self.domain = extracted_url.registered_domain
@frappe.whitelist()
def fetch_apps_and_sites(self):
@@ -61,7 +158,7 @@ def _get_sites(self):
playbook="get_sites.yml",
server=self,
user=self.ssh_user or "root",
- port=self.ssh_port or "22",
+ port=self.ssh_port or 22,
variables={"bench_path": self.bench_directory},
)
play = ansible.run()
@@ -174,7 +271,7 @@ def create_new_rg(self):
try:
for app in task_result:
branches.append(app["branch"])
- if not frappe.db.exists("App Source", {"app": app["app"], "branch": app["branch"]}):
+ if not frappe.db.exists("App Source", {"app": app["app"], "branch": app["branch"]}): # noqa: SIM102
if not frappe.db.exists("App", {"_newname": app["app"]}):
app_doc = frappe.get_doc(
{
@@ -224,29 +321,61 @@ def create_new_rg(self):
self.status = "Active"
self.save()
+ def set_database_plan(self):
+ if self.database_plan:
+ return
+
+ if not self.different_database_server and not frappe.db.exists("Server Plan", "Unlimited"):
+ self._create_server_plan("Unlimited")
+ self.database_plan = "Unlimited"
+
+ def _create_server_plan(self, plan_name):
+ plan = frappe.new_doc("Server Plan")
+ plan.name = plan_name
+ plan.title = plan_name
+ plan.price_inr = 0
+ plan.price_usd = 0
+ plan.save()
+
@frappe.whitelist()
- def create_db_server(self):
+ def create_database_server(self):
try:
- db_server = frappe.new_doc("Database Server")
- db_server.hostname = self.hostname
- db_server.title = self.title
- db_server.is_self_hosted = True
- db_server.self_hosted_server_domain = self.domain
- db_server.ip = self.ip
- db_server.private_ip = self.private_ip
- db_server.team = self.team
- db_server.ssh_user = self.ssh_user
- db_server.ssh_port = self.ssh_port
- db_server.mariadb_root_password = self.get_password("mariadb_root_password")
- db_server.cluster = self.cluster
- db_server.agent_password = self.get_password("agent_password")
- db_server.is_server_setup = False if self.new_server else True
- _db = db_server.insert()
- _db.create_subscription("Unlimited")
+ if not self.mariadb_ip:
+ frappe.throw("Public IP for MariaDB not found")
+
+ db_server = frappe.new_doc(
+ "Database Server",
+ **{
+ "hostname": self.get_hostname("Database Server"),
+ "title": f"{self.title} Database",
+ "is_self_hosted": True,
+ "domain": self.hybrid_domain,
+ "self_hosted_server_domain": self.hybrid_domain,
+ "ip": self.mariadb_ip,
+ "private_ip": self.mariadb_private_ip,
+ "team": self.team,
+ "ssh_user": self.ssh_user,
+ "ssh_port": self.ssh_port,
+ "mariadb_root_password": self.get_password("mariadb_root_password"),
+ "cluster": self.cluster,
+ "agent_password": self.get_password("agent_password"),
+ "is_server_setup": not self.new_server,
+ "plan": self.database_plan,
+ },
+ ).insert()
+
+ db_server.create_subscription(self.database_plan)
self.database_setup = True
- self.database_server = _db.name
+ self.database_server = db_server.name
self.status = "Active"
self.save()
+
+ if not frappe.flags.in_test:
+ db_server.create_dns_record()
+
+ frappe.db.commit()
+
+ frappe.msgprint(f"Database server record {db_server.name} created")
except Exception:
frappe.throw("Adding Server to Database Server Doctype failed")
self.status = "Broken"
@@ -264,10 +393,7 @@ def append_site_configs(self, play_name):
"output",
)
task_result = json.loads(
- ansible_task_op.replace("'", '"')
- .replace('"{', "{")
- .replace('}"', "}")
- .replace("\\n", "")
+ ansible_task_op.replace("'", '"').replace('"{', "{").replace('}"', "}").replace("\\n", "")
)
self.status = "Pending"
for site in task_result:
@@ -284,39 +410,54 @@ def append_site_configs(self, play_name):
self.save()
@frappe.whitelist()
- def create_server(self):
+ def create_application_server(self):
"""
Add a new record to the Server doctype
"""
+
try:
- server = frappe.new_doc("Server")
- server.hostname = self.hostname
- server.title = self.title
- server.is_self_hosted = True
- server.self_hosted_server_domain = self.domain
- server.self_hosted_mariadb_server = self.private_ip
- server.team = self.team
- server.ip = self.ip
- server.private_ip = self.private_ip
- server.ssh_user = self.ssh_user
- server.ssh_port = self.ssh_port
- server.proxy_server = self.proxy_server
- server.database_server = self.database_server
- server.cluster = self.cluster
- server.agent_password = self.get_password("agent_password")
- server.self_hosted_mariadb_root_password = self.get_password("mariadb_root_password")
- server.ram = self.ram
- server.new_worker_allocation = True
- new_server = server.insert()
- new_server.create_subscription("Unlimited")
- self.server = new_server.name
+ server = frappe.new_doc(
+ "Server",
+ **{
+ "hostname": self.get_hostname("Server"),
+ "title": f"{self.title} Application",
+ "is_self_hosted": True,
+ "domain": self.hybrid_domain,
+ "self_hosted_server_domain": self.hybrid_domain,
+ "team": self.team,
+ "ip": self.ip,
+ "private_ip": self.private_ip,
+ "ssh_user": self.ssh_user,
+ "ssh_port": self.ssh_port,
+ "proxy_server": self.proxy_server,
+ "database_server": self.database_server,
+ "cluster": self.cluster,
+ "agent_password": self.get_password("agent_password"),
+ "self_hosted_mariadb_root_password": self.get_password("mariadb_root_password"),
+ "ram": self.ram,
+ "new_worker_allocation": True,
+ "plan": self.plan,
+ },
+ ).insert()
+
+ self.server = server.name
self.status = "Active"
self.server_created = True
+
+ if not frappe.flags.in_test:
+ server.create_dns_record()
+
+ frappe.db.commit()
+
except Exception as e:
self.status = "Broken"
frappe.throw("Server Creation Error", exc=e)
+
self.save()
+ frappe.msgprint(f"Server record {server.name} created")
+ return server
+
@frappe.whitelist()
def create_new_sites(self):
"""
@@ -357,9 +498,7 @@ def create_new_sites(self):
@frappe.whitelist()
def restore_files(self):
- frappe.enqueue_doc(
- self.doctype, self.name, "_restore_files", queue="long", timeout=2400
- )
+ frappe.enqueue_doc(self.doctype, self.name, "_restore_files", queue="long", timeout=2400)
def _restore_files(self):
"""
@@ -396,70 +535,90 @@ def _restore_files(self):
log_error("Self Hosted Restore error", server=self.name)
self.save()
+ def get_hostname(self, server_type):
+ symbolic_name = get_symbolic_name(server_type)
+ series = f"{symbolic_name}-{self.cluster}.#####"
+
+ index = make_autoname(series)[-5:]
+
+ return f"{symbolic_name}-{index}-{self.cluster}".lower()
+
+ @property
+ def hybrid_domain(self):
+ return frappe.db.get_single_value("Press Settings", "hybrid_domain")
+
@frappe.whitelist()
def create_proxy_server(self):
"""
Add a new record to the Proxy Server doctype
"""
try:
- server = frappe.new_doc("Proxy Server")
- server.hostname = self.hostname
- server.title = self.title
- server.is_self_hosted = True
- server.domain = self.domain
- server.self_hosted_server_domain = self.domain
- server.team = self.team
- server.ip = self.ip
- server.private_ip = self.private_ip
- server.ssh_user = self.ssh_user
- server.is_primary = True
- server.cluster = self.cluster
- server.ssh_port = self.ssh_port
- new_server = server.insert()
- self.agent_password = new_server.get_password("agent_password")
- self.proxy_server = new_server.name
- self.proxy_server_ip = self.private_ip
+ proxy_server = frappe.new_doc(
+ "Proxy Server",
+ **{
+ "hostname": self.get_hostname("Proxy Server"),
+ "title": self.title,
+ "is_self_hosted": True,
+ "domain": self.hybrid_domain,
+ "self_hosted_server_domain": self.hybrid_domain,
+ "team": self.team,
+ "ip": self.proxy_public_ip,
+ "private_ip": self.proxy_private_ip,
+ "is_primary": True,
+ "cluster": self.cluster,
+ "ssh_user": self.ssh_user,
+ "ssh_port": self.ssh_port,
+ },
+ ).insert()
+
+ self.agent_password = proxy_server.get_password("agent_password")
+ self.proxy_server = proxy_server.name
self.status = "Active"
self.proxy_created = True
except Exception as e:
self.status = "Broken"
- frappe.throw("Server Creation Error", exc=e)
+ frappe.throw("Self Hosted Proxy Server Creation Error", exc=e)
self.save()
+ frappe.msgprint(f"Proxy server record {proxy_server.name} created")
+
@frappe.whitelist()
- def create_tls_certs(self):
+ def create_tls_certs(self, domain):
try:
- tls_cert = frappe.get_doc(
- {
- "doctype": "TLS Certificate",
- "domain": self.name,
- "team": self.team,
- "wildcard": False,
- }
- ).insert()
- return tls_cert.name
+ tls_cert = frappe.db.get_value("TLS Certificate", {"domain": f"{domain}"})
+
+ if not tls_cert:
+ tls_cert = frappe.new_doc(
+ "TLS Certificate",
+ **{
+ "domain": domain,
+ "team": self.team,
+ "wildcard": False,
+ },
+ ).insert()
+ tls_cert = tls_cert.name
+
+ return tls_cert
except Exception:
log_error("TLS Certificate(SelfHosted) Creation Error")
- @frappe.whitelist()
- def _setup_nginx(self):
- frappe.enqueue_doc(self.doctype, self.name, "setup_nginx", queue="long")
-
- @frappe.whitelist()
- def setup_nginx(self):
+ def setup_nginx(self, server):
try:
ansible = Ansible(
playbook="self_hosted_nginx.yml",
- server=self,
- user=self.ssh_user or "root",
- port=self.ssh_port or "22",
- variables={"domain": self.name},
+ server=server,
+ user=server.ssh_user or "root",
+ port=server.ssh_port or 22,
+ variables={
+ "domain": self.name,
+ "press_domain": frappe.db.get_single_value("Press Settings", "domain"), # for ssl renewal
+ },
)
play = ansible.run()
if play.status == "Success":
return True
except Exception:
- log_error("TLS Cert Generation Failed", server=self.as_dict())
+ log_error("Nginx setup failed for self hosted server", server=self.as_dict())
return False
@frappe.whitelist()
@@ -468,52 +627,29 @@ def update_tls(self):
update_server_tls_certifcate,
)
- cert = frappe.get_last_doc(
- "TLS Certificate", {"domain": self.name, "status": "Active"}
- )
+ try:
+ cert = frappe.get_last_doc("TLS Certificate", {"domain": self.server, "status": "Active"})
+ except frappe.DoesNotExistError:
+ cert = frappe.get_last_doc("TLS Certificate", {"domain": self.name, "status": "Active"})
+
update_server_tls_certifcate(self, cert)
def process_tls_cert_update(self):
- server = frappe.get_doc("Server", self.name)
- db_server = frappe.get_doc("Database Server", self.name)
- if not (server.is_server_setup and db_server.is_server_setup):
- db_server.setup_server()
- time.sleep(60)
- server.setup_server()
- else:
- self.update_tls()
-
- def create_subscription(self):
- frappe.get_doc(
- {
- "doctype": "Plan Change",
- "document_type": self.doctype,
- "document_name": self.name,
- "from_plan": "",
- "to_plan": self.plan,
- "type": "Initial Plan",
- "timestamp": self.creation,
- }
- ).insert(ignore_permissions=True)
+ self.update_tls()
- @frappe.whitelist()
- def fetch_system_ram(self, play_id=None):
- """
- Fetch the RAM from the Ping Ansible Play
- """
- if not play_id:
- play_id = frappe.get_last_doc(
- "Ansible Play", {"server": self.name, "play": "Ping Server"}
- ).name
- play = frappe.get_doc(
- "Ansible Task", {"status": "Success", "play": play_id, "task": "Gather Facts"}
- )
- try:
- result = json.loads(play.result)
- self.ram = result["ansible_facts"]["memtotal_mb"]
- self.save()
- except Exception:
- log_error("Fetching RAM failed", server=self.as_dict())
+ def setup_server(self):
+ self._setup_db_server()
+
+ if self.different_database_server:
+ self._setup_app_server()
+
+ def _setup_db_server(self):
+ db_server = frappe.get_doc("Database Server", self.database_server)
+ db_server.setup_server()
+
+ def _setup_app_server(self):
+ app_server = frappe.get_doc("Server", self.server)
+ app_server.setup_server()
@property
def subscription(self):
@@ -528,3 +664,174 @@ def can_charge_for_subscription(self, subscription=None):
and self.team
and self.team != "Administrator"
)
+
+ def _get_play_id(self):
+ try:
+ play_id = frappe.get_last_doc("Ansible Play", {"server": self.server, "play": "Ping Server"}).name
+ except frappe.DoesNotExistError:
+ play_id = frappe.get_last_doc("Ansible Play", {"server": self.name, "play": "Ping Server"}).name
+
+ return play_id
+
+ def _get_play(self, play_id):
+ play = frappe.get_doc("Ansible Task", {"status": "Success", "play": play_id, "task": "Gather Facts"})
+
+ return json.loads(play.result)
+
+ @frappe.whitelist()
+ def fetch_system_ram(self, play_id=None, server_type="app"):
+ """
+ Fetch the RAM from the Ping Ansible Play
+ """
+ if not play_id:
+ play_id = self._get_play_id()
+
+ try:
+ result = result = self._get_play(play_id)
+
+ if server_type == "app":
+ self.ram = result["ansible_facts"]["memtotal_mb"]
+ else:
+ self.db_ram = result["ansible_facts"]["memtotal_mb"]
+
+ self.save()
+ except Exception:
+ log_error("Fetching RAM failed", server=self.as_dict())
+
+ def validate_private_ip(self, play_id=None, server_type="app"):
+ if not play_id:
+ play_id = self._get_play_id()
+
+ all_ipv4_addresses = []
+ result = self._get_play(play_id)
+
+ try:
+ all_ipv4_addresses = result["ansible_facts"]["all_ipv4_addresses"]
+ except Exception:
+ log_error("Fetching Private IP failed", server=self.as_dict())
+ return
+
+ private_ip = self.private_ip
+ public_ip = self.ip
+ if server_type == "db":
+ private_ip = self.mariadb_private_ip
+ public_ip = self.mariadb_ip
+
+ if private_ip not in all_ipv4_addresses:
+ frappe.throw(f"Private IP {private_ip} is not associated with server having IP {public_ip} ")
+
+ @frappe.whitelist()
+ def fetch_private_ip(self, play_id=None, server_type="app"):
+ """
+ Fetch the Private IP from the Ping Ansible Play
+ """
+ if not play_id:
+ play_id = self._get_play_id()
+
+ try:
+ result = self._get_play(play_id)
+
+ if server_type == "app":
+ self.private_ip = fetch_private_ip_based_on_vendor(result)
+ else:
+ self.mariadb_private_ip = fetch_private_ip_based_on_vendor(result)
+
+ self.save()
+ except Exception:
+ log_error("Fetching Private IP failed", server=self.as_dict())
+
+ @frappe.whitelist()
+ def fetch_system_specifications(self, play_id=None, server_type="app"):
+ """
+ Fetch the RAM from the Ping Ansible Play
+ """
+ if not play_id:
+ play_id = self._get_play_id()
+
+ try:
+ result = self._get_play(play_id)
+ if server_type == "app":
+ self.vendor = result["ansible_facts"]["system_vendor"]
+ self.ram = result["ansible_facts"]["memtotal_mb"]
+ self.vcpus = result["ansible_facts"]["processor_vcpus"]
+ self.swap_total = result["ansible_facts"]["swaptotal_mb"]
+ self.architecture = result["ansible_facts"]["architecture"]
+ self.instance_type = result["ansible_facts"]["product_name"]
+ self.processor = result["ansible_facts"]["processor"][2]
+ self.distribution = result["ansible_facts"]["lsb"]["description"]
+ self.total_storage = self._get_total_storage(result)
+
+ else:
+ self.db_ram = result["ansible_facts"]["memtotal_mb"]
+ self.db_vcpus = result["ansible_facts"]["processor_vcpus"]
+ self.db_total_storage = self._get_total_storage(result)
+
+ self.save()
+ except Exception:
+ log_error("Fetching System Details Failed", server=self.as_dict())
+
+ def _get_total_storage(self, result):
+ match self.vendor:
+ case "DigitalOcean":
+ total_storage = result["ansible_facts"]["devices"]["vda"]["size"]
+ case "Amazon EC2":
+ total_storage = result["ansible_facts"]["devices"]["nvme0n1"]["size"]
+ case _:
+ total_storage = result["ansible_facts"]["devices"]["sda"]["size"]
+
+ return total_storage
+
+ def check_minimum_specs(self):
+ """
+ Check if the server meets the minimum requirements
+ ie: RAM >= 4GB,vCPUs >= 2,Storage >= 40GB
+ """
+
+ if round(int(self.ram), -3) < 4000: # Round to nearest thousand
+ frappe.throw(f"Minimum RAM requirement not met, Minimum is 4GB and available is {self.ram} MB")
+ if int(self.vcpus) < 2:
+ frappe.throw(
+ f"Minimum vCPU requirement not met, Minimum is 2 Cores and available is {self.vcpus}"
+ )
+
+ self._validate_disk()
+
+ return True
+
+ def _validate_disk(self):
+ disk_size = self.total_storage.split()[0]
+ disk_storage_unit = self.total_storage.split()[1]
+
+ if disk_storage_unit.upper() == "TB":
+ return True
+
+ if disk_storage_unit.upper() in ["GB", "MB"] and round(int(float(disk_size)), -1) < 40:
+ frappe.throw(
+ f"Minimum Storage requirement not met, Minimum is 50GB and available is {self.total_storage}"
+ )
+ return None
+
+
+def fetch_private_ip_based_on_vendor(play_result: dict):
+ vendor = play_result["ansible_facts"]["system_vendor"]
+ match vendor:
+ case "DigitalOcean":
+ return play_result["ansible_facts"]["all_ipv4_addresses"][1]
+ case "Hetzner":
+ return play_result["ansible_facts"]["all_ipv4_addresses"][1]
+ case "Amazon EC2":
+ return play_result["ansible_facts"]["default_ipv4"]["address"]
+ case "Microsoft Corporation":
+ return play_result["ansible_facts"]["all_ipv4_addresses"][0]
+ case "Google":
+ return play_result["ansible_facts"]["default_ipv4"]["address"]
+ case _:
+ return play_result["ansible_facts"]["default_ipv4"]["address"]
+
+
+def get_symbolic_name(server_type):
+ return {
+ "Proxy Server": "hybrid-n",
+ "Server": "hybrid-f",
+ "Database Server": "hybrid-m",
+ }.get(server_type, "hybrid-f")
diff --git a/press/press/doctype/self_hosted_server/test_self_hosted_server.py b/press/press/doctype/self_hosted_server/test_self_hosted_server.py
index 0bb9d0fcebf..f9e378d9539 100644
--- a/press/press/doctype/self_hosted_server/test_self_hosted_server.py
+++ b/press/press/doctype/self_hosted_server/test_self_hosted_server.py
@@ -2,70 +2,322 @@
# See license.txt
-from unittest.mock import Mock, patch
-from press.press.doctype.ansible_play.test_ansible_play import create_test_ansible_play
+import json
+from unittest.mock import patch
-from press.press.doctype.self_hosted_server.self_hosted_server import SelfHostedServer
import frappe
-from press.runner import Ansible
+from frappe.tests.utils import FrappeTestCase, change_settings
+
+from press.api.tests.test_server import create_test_server_plan
+from press.press.doctype.ansible_play.test_ansible_play import create_test_ansible_play
+from press.press.doctype.press_settings.test_press_settings import (
+ create_test_press_settings,
+)
+from press.press.doctype.self_hosted_server.self_hosted_server import SelfHostedServer
from press.press.doctype.team.test_team import create_test_team
-from frappe.tests.utils import FrappeTestCase
class TestSelfHostedServer(FrappeTestCase):
+ def setUp(self):
+ super().setUp()
+
+ create_test_press_settings()
+
def tearDown(self):
frappe.db.rollback()
- def test_autoname_to_fqdn(self):
- hostnames = ["a1", "a1.b1", "waaaaaaawwwaawwa", "1234561234"]
- for host in hostnames:
- server = create_test_self_hosted_server(host)
- self.assertEqual(server.name, f"{host}.fc.dev")
+ # def test_autoname_to_fqdn(self):
+ # hostnames = ["a1", "a1.b1", "waaaaaaawwwaawwa", "1234561234"]
+ # for host in hostnames:
+ # server = create_test_self_hosted_server(host)
+ # self.assertEqual(server.name, f"{host}.fc.dev")
- @patch(
- "press.press.doctype.self_hosted_server.self_hosted_server.Ansible",
- wraps=Ansible,
- )
- @patch.object(Ansible, "run", new=Mock())
- def test_setup_nginx_triggers_nginx_ssl_playbook(self, Mock_Ansible: Mock):
- server = create_test_self_hosted_server("ssl")
- server.setup_nginx()
- Mock_Ansible.assert_called_with(
- playbook="self_hosted_nginx.yml",
- server=server,
- user=server.ssh_user or "root",
- port=server.ssh_port or "22",
- variables={"domain": server.name},
- )
+ def test_successful_ping_ansible_sets_status_to_pending(self):
+ server = create_test_self_hosted_server("pinger")
+ with patch(
+ "press.press.doctype.self_hosted_server.self_hosted_server.Ansible.run",
+ new=lambda x: create_test_ansible_play(
+ "Ping Server",
+ "ping.yml",
+ server.doctype,
+ server.name,
+ {"server": server.name},
+ ),
+ ):
+ server.ping_ansible()
+ self.assertEqual(server.status, "Pending")
- def test_setup_nginx_creates_tls_certificate_post_success(self):
- server = create_test_self_hosted_server("ssl")
- pre_setup_count = frappe.db.count("TLS Certificate")
+ def test_failed_ping_ansible_sets_status_to_unreachable(self):
+ server = create_test_self_hosted_server("pinger")
with patch(
"press.press.doctype.self_hosted_server.self_hosted_server.Ansible.run",
new=lambda x: create_test_ansible_play(
- "Setup Self Hosted Nginx",
- "self_hosted_nginx.yml",
+ "Ping Server",
+ "ping.yml",
server.doctype,
server.name,
- {"server": "ssl.fc.dev"},
+ {"server": server.name},
+ "Failure",
+ ),
+ ):
+ server.ping_ansible()
+ self.assertEqual(server.status, "Unreachable")
+
+ def test_get_apps_populates_apps_child_table(self):
+ server = create_test_self_hosted_server("apps")
+ with patch(
+ "press.press.doctype.self_hosted_server.self_hosted_server.Ansible.run",
+ new=lambda x: _create_test_ansible_play_and_task(
+ server=server,
+ playbook="get_apps.yml",
+ _play="Get Bench data from Self Hosted Server",
+ task_1="Get Versions from Current Bench",
+ task_1_output=json.dumps(
+ [
+ {
+ "commit": "3672c9f",
+ "app": "frappe",
+ "branch": "version-14",
+ "version": "14.30.0",
+ }
+ ]
+ ),
+ task_1_result="",
),
):
- server.create_tls_certs()
- post_setup_count = frappe.db.count("TLS Certificate")
- self.assertEqual(pre_setup_count, post_setup_count - 1)
+ server._get_apps()
+ server.reload()
+ self.assertTrue(server.apps)
+ self.assertEqual(len(server.apps), 1)
+ self.assertEqual(server.apps[0].app_name, "frappe")
+ self.assertEqual(server.apps[0].branch, "version-14")
+ self.assertEqual(server.apps[0].version, "14.30.0")
+
+ def test_get_sites_populates_site_table_with_config(self):
+ server = create_test_self_hosted_server("sites")
+ server.bench_path = "/home/frappe/frappe-bench"
+ with patch(
+ "press.press.doctype.self_hosted_server.self_hosted_server.Ansible.run",
+ new=lambda x: _create_test_ansible_play_and_task(
+ server=server,
+ playbook="get_sites.yml",
+ _play="Sites from Current Bench",
+ task_1="Get Sites from Current Bench",
+ task_1_output=json.dumps({"site1.local": ["frappe", "erpnext"]}),
+ task_1_result="",
+ task_2="Get Site Configs from Existing Sites",
+ task_2_output=json.dumps(
+ [
+ {
+ "site": "site1.local",
+ "config": {
+ "activations_last_sync_date": "2023-05-07 00:00:49.152290",
+ "always_use_account_email_id_as_sender": 1,
+ },
+ }
+ ]
+ ),
+ task_2_result="",
+ ),
+ ):
+ server._get_sites()
+ server.reload()
+ self.assertTrue(server.sites)
+ self.assertTrue(server.sites[0].site_config)
+ self.assertEqual(len(server.sites), 1)
+ self.assertEqual(
+ server.sites[0].site_config,
+ json.dumps(
+ {
+ "activations_last_sync_date": "2023-05-07 00:00:49.152290",
+ "always_use_account_email_id_as_sender": 1,
+ }
+ ),
+ )
+ self.assertEqual(server.sites[0].apps, "frappe,erpnext")
+
+ def test_fetch_system_ram_from_ansible_and_update_ram_field(self):
+ server = create_test_self_hosted_server("ram")
+ _create_test_ansible_play_and_task(
+ server=server,
+ playbook="ping.yml",
+ _play="Ping Server",
+ task_1="Gather Facts",
+ task_1_output="",
+ task_1_result='{"ansible_facts": {"memtotal_mb": 16384}}',
+ )
+ server.fetch_system_ram()
+ server.reload()
+ self.assertEqual(server.ram, "16384")
+
+ def test_fetch_system_specifications_and_populate_fields_in_doc(self):
+ server = create_test_self_hosted_server("tester")
+ _create_test_ansible_play_and_task(
+ server=server,
+ playbook="ping.yml",
+ _play="Ping Server",
+ task_1="Gather Facts",
+ task_1_output="",
+ task_1_result="""{"ansible_facts": {"memtotal_mb": 16384,"system_vendor":"Amazon EC2","processor_vcpus":2,"swaptotal_mb":1024,"architecture":"x86_64","product_name":"c5a.6xLarge","processor":["0","GenuineIntel","Intel(R) Xeon(R) CPU @ 2.20GHz","1","GenuineIntel","Intel(R) Xeon(R) CPU @ 2.20GHz"],"lsb":{"description":"Debian GNU/Linux 11 (bullseye)"},"devices":{"nvme0n1":{"size":"25 GB"}}}}""",
+ )
+ server.fetch_system_specifications()
+ server.reload()
+ self.assertEqual(server.vendor, "Amazon EC2")
+ self.assertEqual(server.ram, "16384")
+ self.assertEqual(server.vcpus, "2")
+ self.assertEqual(server.processor, "Intel(R) Xeon(R) CPU @ 2.20GHz")
+ self.assertEqual(server.swap_total, "1024")
+ self.assertEqual(server.architecture, "x86_64")
+ self.assertEqual(server.instance_type, "c5a.6xLarge")
+ self.assertEqual(server.distribution, "Debian GNU/Linux 11 (bullseye)")
+ self.assertEqual(server.total_storage, "25 GB")
+
+ def test_fetch_private_ip_from_ansible_ping_and_populate_field(self):
+ server = create_test_self_hosted_server("tester")
+ _create_test_ansible_play_and_task(
+ server=server,
+ playbook="ping.yml",
+ _play="Ping Server",
+ task_1="Gather Facts",
+ task_1_output="",
+ task_1_result="""{"ansible_facts":{"default_ipv4":{"address":"192.168.1.1"},"system_vendor":"AWS EC2"}}""",
+ )
+ server.fetch_private_ip()
+ server.reload()
+ self.assertEqual(server.private_ip, "192.168.1.1")
+
+ @change_settings("Press Settings", {"hybrid_domain": "fc.dev"})
+ def test_create_server_and_check_total_records(self):
+ from press.press.doctype.cluster.test_cluster import create_test_cluster
+ from press.press.doctype.proxy_server.test_proxy_server import (
+ create_test_proxy_server,
+ )
+
+ create_test_cluster(name="Default", hybrid=True)
+ create_test_proxy_server()
+ plan = create_test_server_plan(document_type="Self Hosted Server")
+ pre_server_count = frappe.db.count("Server")
+
+ server = create_test_self_hosted_server("tester", plan=plan.name)
+ server.create_application_server()
+ server.reload()
+
+ post_server_count = frappe.db.count("Server")
+ new_server = frappe.get_last_doc("Server")
+ self.assertEqual(pre_server_count, post_server_count - 1)
+ self.assertEqual("hybrid-f-00001-default.fc.dev", new_server.name)
+
+ @change_settings("Press Settings", {"hybrid_domain": "fc.dev"})
+ def test_create_db_server_and_check_total_records(self):
+ from press.press.doctype.cluster.test_cluster import create_test_cluster
+ from press.press.doctype.proxy_server.test_proxy_server import (
+ create_test_proxy_server,
+ )
+
+ plan = create_test_server_plan(document_type="Database Server")
+ create_test_cluster(name="Default", hybrid=True)
+ create_test_proxy_server()
+ pre_server_count = frappe.db.count("Database Server")
+
+ server = create_test_self_hosted_server("tester", database_plan=plan.name)
+ server.create_database_server()
+ server.reload()
+ post_server_count = frappe.db.count("Database Server")
+ new_server = frappe.get_last_doc("Database Server")
+ self.assertEqual(pre_server_count, post_server_count - 1)
+ self.assertEqual("hybrid-m-00001-default.fc.dev", new_server.name)
-def create_test_self_hosted_server(host) -> SelfHostedServer:
+ def test_check_minimum_specs(self):
+ server = create_test_self_hosted_server("tester")
+ server.ram = 2500
+ with self.assertRaises(frappe.exceptions.ValidationError):
+ server.check_minimum_specs()
+ server.ram = 3853
+ server.vcpus = 1
+ server.total_storage = "100 GB"
+ with self.assertRaises(frappe.exceptions.ValidationError):
+ server.check_minimum_specs()
+ server.vcpus = 2
+ server.total_storage = "20 GB"
+ with self.assertRaises(frappe.exceptions.ValidationError):
+ server.check_minimum_specs()
+ server.total_storage = "100 GB"
+ self.assertTrue(server.check_minimum_specs())
+
+ def test_create_subscription_add_plan_change_and_check_for_new_subscription(self):
+ app_plan = create_test_server_plan("Self Hosted Server")
+ database_plan = create_test_server_plan(document_type="Database Server")
+
+ pre_plan_change_count = frappe.db.count("Plan Change")
+ pre_subscription_count = frappe.db.count("Subscription")
+
+ server = create_test_self_hosted_server(
+ "tester", database_plan=database_plan.name, plan=app_plan.name
+ )
+ server.create_application_server()
+ server.create_database_server()
+
+ post_plan_change_count = frappe.db.count("Plan Change")
+ post_subscription_count = frappe.db.count("Subscription")
+
+ self.assertEqual(pre_plan_change_count, post_plan_change_count - 2)
+ self.assertEqual(pre_subscription_count, post_subscription_count - 2)
+
+
+def create_test_self_hosted_server(host, database_plan=None, plan=None) -> SelfHostedServer:
+ """
+ Plan: is a string that represents the application servers subscription plan name
+ Database Plan: is a string that represents the database servers subscription plan name
+ """
server = frappe.get_doc(
{
"doctype": "Self Hosted Server",
- "status": "Active",
"ip": frappe.mock("ipv4"),
- "private_ip": frappe.mock("ipv4_private"),
+ "private_ip": "192.168.1.1",
+ "mariadb_ip": frappe.mock("ipv4"),
+ "mariadb_private_ip": "192.168.1.2",
"server_url": f"https://{host}.fc.dev",
"team": create_test_team().name,
+ "cluster": "Default",
}
- ).insert(ignore_if_duplicate=True)
+ )
+
+ if database_plan:
+ server.database_plan = database_plan
+ if plan:
+ server.plan = plan
+
+ server.insert(ignore_if_duplicate=True)
server.reload()
return server
+
+
+def _create_test_ansible_play_and_task(
+ server: SelfHostedServer, playbook: str, _play: str, **kwargs
+): # TODO: Move to AnsiblePlay and Make a generic one for AnsibleTask
+ play = create_test_ansible_play(
+ _play,
+ playbook,
+ server.doctype,
+ server.name,
+ {"server": server.name},
+ )
+
+ for i, _ in enumerate(kwargs):
+ try:
+ task = frappe.get_doc(
+ {
+ "doctype": "Ansible Task",
+ "status": "Success",
+ "play": play.name,
+ "role": play.playbook.split(".")[0],
+ "task": kwargs.get("task_" + str(i + 1)),
+ "output": kwargs.get("task_" + str(i + 1) + "_output"),
+ "result": kwargs.get("task_" + str(i + 1) + "_result"),
+ }
+ )
+ task.insert()
+ except Exception:
+ pass
+ return play
diff --git a/press/press/doctype/self_hosted_site_apps/self_hosted_site_apps.json b/press/press/doctype/self_hosted_site_apps/self_hosted_site_apps.json
index 875c28e309c..db28a99891f 100644
--- a/press/press/doctype/self_hosted_site_apps/self_hosted_site_apps.json
+++ b/press/press/doctype/self_hosted_site_apps/self_hosted_site_apps.json
@@ -32,13 +32,14 @@
"fieldname": "site",
"fieldtype": "Link",
"label": "Site",
- "options": "Site"
+ "options": "Site",
+ "search_index": 1
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
- "modified": "2023-03-28 12:24:09.561279",
+ "modified": "2025-03-18 10:09:56.500243",
"modified_by": "Administrator",
"module": "Press",
"name": "Self Hosted Site Apps",
diff --git a/press/press/doctype/self_hosted_site_apps/self_hosted_site_apps.py b/press/press/doctype/self_hosted_site_apps/self_hosted_site_apps.py
index 9e99eb17c4f..c0889af54f4 100644
--- a/press/press/doctype/self_hosted_site_apps/self_hosted_site_apps.py
+++ b/press/press/doctype/self_hosted_site_apps/self_hosted_site_apps.py
@@ -6,4 +6,22 @@
class SelfHostedSiteApps(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ apps: DF.Data | None
+ name: DF.Int | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ site: DF.Link | None
+ site_config: DF.Code | None
+ site_name: DF.Data | None
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/serial_console_log/__init__.py b/press/press/doctype/serial_console_log/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/serial_console_log/serial_console_log.js b/press/press/doctype/serial_console_log/serial_console_log.js
new file mode 100644
index 00000000000..65a63962826
--- /dev/null
+++ b/press/press/doctype/serial_console_log/serial_console_log.js
@@ -0,0 +1,13 @@
+// Copyright (c) 2023, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Serial Console Log', {
+ refresh(frm) {
+ frappe.realtime.off('serial_console_log_update');
+ frappe.realtime.on('serial_console_log_update', (message) => {
+ if (message.name == frm.doc.name) {
+ frm.set_value('output', message.output);
+ }
+ });
+ },
+});
diff --git a/press/press/doctype/serial_console_log/serial_console_log.json b/press/press/doctype/serial_console_log/serial_console_log.json
new file mode 100644
index 00000000000..70cf6ae77b8
--- /dev/null
+++ b/press/press/doctype/serial_console_log/serial_console_log.json
@@ -0,0 +1,116 @@
+{
+ "actions": [
+ {
+ "action": "press.press.doctype.serial_console_log.serial_console_log.run_sysrq",
+ "action_type": "Server Action",
+ "group": "Actions",
+ "label": "Run SysRQ Command"
+ }
+ ],
+ "allow_rename": 1,
+ "creation": "2023-12-29 13:02:05.574172",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "server_type",
+ "server",
+ "virtual_machine",
+ "column_break_oypq",
+ "action",
+ "command",
+ "message",
+ "section_break_vvrg",
+ "output"
+ ],
+ "fields": [
+ {
+ "fieldname": "server_type",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Server Type",
+ "link_filters": "[[{\"fieldname\":\"server_type\",\"field_option\":\"DocType\"},\"name\",\"like\",\"%Server\"]]",
+ "options": "DocType",
+ "reqd": 1,
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "server",
+ "fieldtype": "Dynamic Link",
+ "in_list_view": 1,
+ "label": "Server",
+ "options": "server_type",
+ "reqd": 1,
+ "set_only_once": 1
+ },
+ {
+ "fetch_from": "server.virtual_machine",
+ "fetch_if_empty": 1,
+ "fieldname": "virtual_machine",
+ "fieldtype": "Link",
+ "label": "Virtual Machine",
+ "options": "Virtual Machine",
+ "reqd": 1,
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "column_break_oypq",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "section_break_vvrg",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "output",
+ "fieldtype": "Code",
+ "label": "Output",
+ "read_only": 1
+ },
+ {
+ "fieldname": "action",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Action",
+ "options": "help\nreboot\ncrash\nsync\nshow-all-timers\nunmount\nshow-all-locks\nshow-backtrace-all-active-cpus\nshow-registers\nshow-task-states\nshow-blocked-tasks\ndump-ftrace-buffer\nshow-memory-usage\nterminate-all-tasks\nmemory-full-oom-kill\nthaw-filesystems\nkill-all-tasks\nnice-all-RT-tasks\nreplay-kernel-logs",
+ "reqd": 1,
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "command",
+ "fieldtype": "Data",
+ "label": "Command",
+ "read_only": 1
+ },
+ {
+ "fieldname": "message",
+ "fieldtype": "Data",
+ "label": "Message",
+ "read_only": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2024-06-12 20:07:15.077206",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Serial Console Log",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/serial_console_log/serial_console_log.py b/press/press/doctype/serial_console_log/serial_console_log.py
new file mode 100644
index 00000000000..94967d1d338
--- /dev/null
+++ b/press/press/doctype/serial_console_log/serial_console_log.py
@@ -0,0 +1,162 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+import time
+from io import StringIO
+
+import frappe
+import pexpect
+from frappe.model.document import Document
+
+from press.press.doctype.deploy_candidate.deploy_candidate import ansi_escape
+
+
+class SerialConsoleLog(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ action: DF.Literal[
+ "help",
+ "reboot",
+ "crash",
+ "sync",
+ "show-all-timers",
+ "unmount",
+ "show-all-locks",
+ "show-backtrace-all-active-cpus",
+ "show-registers",
+ "show-task-states",
+ "show-blocked-tasks",
+ "dump-ftrace-buffer",
+ "show-memory-usage",
+ "terminate-all-tasks",
+ "memory-full-oom-kill",
+ "thaw-filesystems",
+ "kill-all-tasks",
+ "nice-all-RT-tasks",
+ "replay-kernel-logs",
+ ]
+ command: DF.Data | None
+ message: DF.Data | None
+ output: DF.Code | None
+ server: DF.DynamicLink
+ server_type: DF.Link
+ virtual_machine: DF.Link
+ # end: auto-generated types
+
+ def validate(self):
+ self.command, self.message = SYSRQ_COMMANDS.get(self.action, ("h", "HELP"))
+
+ @frappe.whitelist()
+ def run_sysrq(self):
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ method="_run_sysrq",
+ queue="long",
+ enqueue_after_commit=True,
+ at_front=True,
+ )
+ frappe.db.commit()
+
+ def _run_sysrq(self):
+ credentials = frappe.get_doc("Virtual Machine", self.virtual_machine).get_serial_console_credentials()
+ ssh = pexpect.spawn(credentials["command"], encoding="utf-8")
+ ssh.logfile = FakeIO(self)
+
+ index = ssh.expect([credentials["fingerprint"], pexpect.TIMEOUT], timeout=3)
+ if index == 0:
+ ssh.expect("Are you sure you want to continue")
+ ssh.sendline("yes")
+
+ # Send a newline and wait for login prompt
+ # We don't want to send break too soon
+ time.sleep(0.5)
+ ssh.sendline("")
+ ssh.expect(["login:", "Password:"])
+
+ # Send ~B and expect SysRq help message
+ time.sleep(0.5)
+ ssh.send("~B")
+ time.sleep(0.1)
+ ssh.send("h")
+ ssh.expect(["sysrq: HELP", pexpect.TIMEOUT], timeout=1)
+
+ break_attempt = 0
+ while True:
+ break_attempt += 1
+
+ # Send ~B and then b for reboot
+ time.sleep(0.5)
+ ssh.sendline("")
+ ssh.send("~B")
+ time.sleep(0.1)
+ ssh.send(self.command)
+
+ # Wait for reboot
+ index = ssh.expect([f"sysrq: {self.message}", pexpect.TIMEOUT], timeout=1)
+ if index == 0 or break_attempt > 10:
+ break
+
+ # Wait for login prompt
+ ssh.expect("login:", timeout=300)
+
+
+class FakeIO(StringIO):
+ def __init__(self, serial_console_log, *args, **kwargs):
+ self.console = serial_console_log.name
+ super().__init__(*args, **kwargs)
+
+ def flush(self):
+ super().flush()
+ output = ansi_escape(self.getvalue())
+ frappe.db.set_value("Serial Console Log", self.console, "output", output, update_modified=False)
+
+ message = {"name": self.console, "output": output}
+ frappe.publish_realtime(
+ event="serial_console_log_update",
+ doctype="Serial Console Log",
+ docname=self.console,
+ user=frappe.session.user,
+ message=message,
+ )
+
+ frappe.db.commit()
+
+
+SYSRQ_COMMANDS = {
+ "crash": ("c", "Trigger a crash"),
+ "reboot": ("b", "Resetting"),
+ "sync": ("s", "Emergency Sync"),
+ "help": ("h", "HELP"),
+ "show-all-timers": ("q", "Show clockevent devices & pending hrtimers (no others)"),
+ "unmount": ("u", "Emergency Remount R/O"),
+ "show-all-locks": ("d", "Show Locks Held"),
+ "show-backtrace-all-active-cpus": ("l", "Show backtrace of all active CPUs"),
+ "show-registers": ("p", "Show Regs"),
+ "show-task-states": ("t", "Show State"),
+ "show-blocked-tasks": ("w", "Show Blocked State"),
+ "dump-ftrace-buffer": ("z", "Dump ftrace buffer"),
+ "show-memory-usage": ("m", "Show Memory"),
+ "terminate-all-tasks": ("e", "Terminate All Tasks"),
+ "memory-full-oom-kill": ("f", "Manual OOM execution"),
+ "thaw-filesystems": ("j", "Emergency Thaw of all frozen filesystems"),
+ "kill-all-tasks": ("i", "Kill All Tasks"),
+ "nice-all-RT-tasks": ("n", "Nice All RT Tasks"),
+ "replay-kernel-logs": ("R", "Replay kernel logs on consoles"),
+}
+
+
+@frappe.whitelist()
+def run_sysrq(doc):
+ frappe.only_for("System Manager")
+ parsed_doc = frappe.parse_json(doc)
+ frappe.get_doc(parsed_doc.doctype, parsed_doc.name).run_sysrq()
+ return doc
diff --git a/press/press/doctype/serial_console_log/test_serial_console_log.py b/press/press/doctype/serial_console_log/test_serial_console_log.py
new file mode 100644
index 00000000000..4d2c14bc176
--- /dev/null
+++ b/press/press/doctype/serial_console_log/test_serial_console_log.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2023, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestSerialConsoleLog(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/server/patches/set_bench_memory_limits.py b/press/press/doctype/server/patches/set_bench_memory_limits.py
new file mode 100644
index 00000000000..33e870b657d
--- /dev/null
+++ b/press/press/doctype/server/patches/set_bench_memory_limits.py
@@ -0,0 +1,8 @@
+# -*- coding: utf-8 -*- # noqa: UP009
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+import frappe
+
+
+def execute():
+ frappe.db.set_value("Server", {"status": "Active"}, "set_bench_memory_limits", True, debug=True)
diff --git a/press/press/doctype/server/patches/set_is_provisioning_press_job_completed.py b/press/press/doctype/server/patches/set_is_provisioning_press_job_completed.py
new file mode 100644
index 00000000000..c7cc8d95ac5
--- /dev/null
+++ b/press/press/doctype/server/patches/set_is_provisioning_press_job_completed.py
@@ -0,0 +1,18 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+import frappe
+
+
+def execute():
+ frappe.db.set_value(
+ "Server",
+ {"is_provisioning_press_job_completed": 0},
+ "is_provisioning_press_job_completed",
+ 1,
+ )
+ frappe.db.set_value(
+ "Database Server",
+ {"is_provisioning_press_job_completed": 0},
+ "is_provisioning_press_job_completed",
+ 1,
+ )
diff --git a/press/press/doctype/server/patches/set_plan_and_subscription.py b/press/press/doctype/server/patches/set_plan_and_subscription.py
new file mode 100644
index 00000000000..cf7063af418
--- /dev/null
+++ b/press/press/doctype/server/patches/set_plan_and_subscription.py
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+import frappe
+
+
+def execute():
+ DOCTYPES = ["Server", "Database Server"]
+ for doctype in DOCTYPES:
+ server_names = frappe.get_all(
+ doctype,
+ {"status": ("!=", "Archived"), "virtual_machine": ("is", "set")},
+ pluck="name",
+ )
+ for server_name in server_names:
+ server = frappe.get_doc(doctype, server_name)
+
+ subscription = frappe.get_all(
+ "Subscription", ["name", "plan"], {"enabled": True, "document_name": server.name}
+ )
+
+ if subscription and server.plan:
+ # Plan is set and an active subscription exists
+ # Nothing to do here
+ continue
+ if subscription and not server.plan:
+ # Subscription exists but plan is not set
+ # Set Server.plan to the plan of the subscription
+ print(
+ f"Subscription exists, but plan isn't set for {doctype} {server.name} setting plan to {subscription[0].plan}"
+ )
+ server.plan = subscription[0].plan
+ server.save()
+ if not subscription and server.plan:
+ # Plan is set but no subscription exists
+ # Create a subscription
+ print(
+ f"Plan is set but no subscription exists for {doctype} {server.name} creating subscription for {server.plan}"
+ )
+ server.create_subscription(server.plan)
+ if not subscription and not server.plan:
+ # Plan is not set and no subscription exists
+ # Find a plan based on the server's instance type
+ instance_type = frappe.db.get_value(
+ "Virtual Machine", server.virtual_machine, "machine_type"
+ )
+ plan = frappe.get_all(
+ "Server Plan",
+ {
+ "enabled": True,
+ "server_type": doctype,
+ "cluster": server.cluster,
+ "instance_type": instance_type,
+ "premium": False,
+ },
+ )
+ if plan:
+ print(
+ f"Found plan for {doctype} {server.name} based on instance_type {instance_type} setting plan to {plan}"
+ )
+ server.plan = plan[0].name
+ server.save()
+ server.create_subscription(server.plan)
+ else:
+ instance_type = instance_type.replace("7", "6")
+ instance_type = instance_type.replace("5", "6i")
+ plan = frappe.get_all(
+ "Server Plan",
+ {
+ "enabled": True,
+ "server_type": doctype,
+ "cluster": server.cluster,
+ "instance_type": instance_type,
+ "premium": False,
+ },
+ )
+
+ print(
+ f"No exact match plan found for {doctype} {server.name} based on instance_type {instance_type} found next best plan {plan[0].name}"
+ )
+ server.plan = plan[0].name
+ server.save()
+ server.create_subscription(server.plan)
diff --git a/press/press/doctype/server/patches/unset_bench_memory_limits_on_dedicated_servers.py b/press/press/doctype/server/patches/unset_bench_memory_limits_on_dedicated_servers.py
new file mode 100644
index 00000000000..e4319fbee18
--- /dev/null
+++ b/press/press/doctype/server/patches/unset_bench_memory_limits_on_dedicated_servers.py
@@ -0,0 +1,24 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+import frappe
+from tqdm import tqdm
+
+LIMIT_MULTIPLIER = 10
+# Increase memory limit. Nothing special about 10,
+# Just a number that seems reasonably high to never be reached
+
+
+def execute():
+ servers = frappe.get_all(
+ "Server", filters={"status": "Active", "set_bench_memory_limits": True, "public": False}, pluck="name"
+ )
+ for server in tqdm(servers):
+ frappe.db.set_value("Server", server, "set_bench_memory_limits", False)
+ benches = frappe.get_all("Bench", filters={"server": server, "status": "Active"}, pluck="name")
+ for bench in benches:
+ bench = frappe.get_doc("Bench", bench)
+ bench.memory_max = LIMIT_MULTIPLIER * bench.memory_max
+ bench.memory_swap = LIMIT_MULTIPLIER * bench.memory_swap
+ bench.memory_high = LIMIT_MULTIPLIER * bench.memory_high
+ bench.save()
+ frappe.db.commit()
diff --git a/press/press/doctype/server/scaleway-metal-build-server-raid.json b/press/press/doctype/server/scaleway-metal-build-server-raid.json
new file mode 100644
index 00000000000..a66a86d03e6
--- /dev/null
+++ b/press/press/doctype/server/scaleway-metal-build-server-raid.json
@@ -0,0 +1,87 @@
+{
+ "disks": [
+ {
+ "device": "/dev/nvme0n1",
+ "partitions": [
+ {
+ "label": "uefi",
+ "number": 1,
+ "size": 536870912
+ },
+ {
+ "label": "swap",
+ "number": 2,
+ "size": 4294967296
+ },
+ {
+ "label": "boot",
+ "number": 3,
+ "size": 536870912
+ },
+ {
+ "label": "root",
+ "number": 4,
+ "size": 924347006976
+ }
+ ]
+ },
+ {
+ "device": "/dev/nvme1n1",
+ "partitions": [
+ {
+ "label": "swap",
+ "number": 1,
+ "size": 4294967296
+ },
+ {
+ "label": "boot",
+ "number": 2,
+ "size": 536870912
+ },
+ {
+ "label": "root",
+ "number": 3,
+ "size": 924347006976
+ }
+ ]
+ }
+ ],
+ "filesystems": [
+ {
+ "device": "/dev/nvme0n1p1",
+ "format": "fat32",
+ "mountpoint": "/boot/efi"
+ },
+ {
+ "device": "/dev/md0",
+ "format": "ext4",
+ "mountpoint": "/boot"
+ },
+ {
+ "device": "/dev/md1",
+ "format": "ext4",
+ "mountpoint": "/"
+ }
+ ],
+ "raids": [
+ {
+ "devices": [
+ "/dev/nvme0n1p3",
+ "/dev/nvme1n1p2"
+ ],
+ "level": "raid_level_1",
+ "name": "/dev/md0"
+ },
+ {
+ "devices": [
+ "/dev/nvme0n1p4",
+ "/dev/nvme1n1p3"
+ ],
+ "level": "raid_level_0",
+ "name": "/dev/md1"
+ }
+ ],
+ "zfs": {
+ "pools": []
+ }
+}
diff --git a/press/press/doctype/server/server.js b/press/press/doctype/server/server.js
index afa5a21ede1..773bc05bb2e 100644
--- a/press/press/doctype/server/server.js
+++ b/press/press/doctype/server/server.js
@@ -7,8 +7,15 @@ frappe.ui.form.on('Server', {
`/dashboard/servers/${frm.doc.name}`,
__('Visit Dashboard'),
);
- [
+
+ const ping_actions = [
[__('Ping Agent'), 'ping_agent', false, frm.doc.is_server_setup],
+ [
+ __('Ping Agent (Job)'),
+ 'ping_agent_job',
+ false,
+ frm.doc.is_server_setup,
+ ],
[__('Ping Ansible'), 'ping_ansible', true, !frm.doc.is_server_prepared],
[
__('Ping Ansible Unprepared'),
@@ -16,13 +23,59 @@ frappe.ui.form.on('Server', {
true,
!frm.doc.is_server_prepared,
],
+ ];
+
+ for (const [label, method, confirm, condition] of ping_actions) {
+ if (!condition || typeof condition === 'undefined') {
+ continue;
+ }
+
+ async function callback() {
+ if (confirm && !(await frappe_confirm(label))) {
+ return;
+ }
+
+ const res = await frm.call(method);
+ if (res.message && method == 'ping_agent_job') {
+ frappe.msgprint(
+ `Agejt Job
${res?.message} created.`,
+ );
+ } else if (res.message) {
+ frappe.msgprint(res.message);
+ } else {
+ frm.refresh();
+ }
+ }
+
+ frm.add_custom_button(label, callback, __('Ping'));
+ }
+
+ [
[__('Update Agent'), 'update_agent', true, frm.doc.is_server_setup],
+ [
+ __('Install Filebeat'),
+ 'install_filebeat',
+ true,
+ frm.doc.is_server_setup,
+ ],
[
__('Update Agent Ansible'),
'update_agent_ansible',
true,
frm.doc.is_server_setup,
],
+ [
+ __('Get AWS Static IP'),
+ 'get_aws_static_ip',
+ false,
+ frm.doc.provider === 'AWS EC2',
+ ],
+ [
+ __('Setup PySpy'),
+ 'setup_pyspy',
+ false,
+ frm.doc.is_server_setup && !frm.doc.is_pyspy_setup,
+ ],
[
__('Prepare Server'),
'prepare_server',
@@ -30,7 +83,12 @@ frappe.ui.form.on('Server', {
!frm.doc.is_server_prepared,
],
[__('Setup Server'), 'setup_server', true, !frm.doc.is_server_setup],
- [__('Add 8GB Swap'), 'increase_swap', true, frm.doc.is_server_setup],
+ [
+ __('Setup Unified Server'),
+ 'setup_unified_server',
+ true,
+ frm.doc.is_unified_server,
+ ],
[
__('Add to Proxy'),
'add_upstream_to_proxy',
@@ -77,7 +135,12 @@ frappe.ui.form.on('Server', {
],
[__('Create Image'), 'create_image', true, frm.doc.status == 'Active'],
[__('Archive'), 'archive', true, frm.doc.status !== 'Archived'],
- [__('Setup Fail2ban'), 'setup_fail2ban', true, frm.doc.is_server_setup],
+ [
+ __('Setup MySQLdump'),
+ 'setup_mysqldump',
+ true,
+ frm.doc.is_server_setup && frm.doc.status == 'Active',
+ ],
[
__('Whitelist Server'),
'whitelist_ipaddress',
@@ -90,12 +153,30 @@ frappe.ui.form.on('Server', {
false,
frm.doc.is_server_setup,
],
+ [
+ __('Setup Agent Sentry'),
+ 'setup_agent_sentry',
+ false,
+ frm.doc.is_server_setup,
+ ],
+ [
+ __('Start Active Benches'),
+ 'start_active_benches',
+ true,
+ frm.doc.is_server_setup,
+ ],
[
__('Show Agent Password'),
'show_agent_password',
false,
frm.doc.is_server_setup,
],
+ [
+ __('Show Agent Version'),
+ 'show_agent_version',
+ false,
+ frm.doc.is_server_setup,
+ ],
[
__('Setup Standalone'),
'setup_standalone',
@@ -104,6 +185,83 @@ frappe.ui.form.on('Server', {
frm.doc.is_standalone &&
!frm.doc.is_standalone_setup,
],
+ [
+ __('Fetch Security Updates'),
+ 'fetch_security_updates',
+ false,
+ frm.doc.is_server_setup,
+ ],
+ [
+ __('Configure SSH logging'),
+ 'configure_ssh_logging',
+ false,
+ frm.doc.is_server_setup,
+ ],
+ [
+ __('Reset Usage for all sites'),
+ 'reset_sites_usage',
+ true,
+ frm.doc.is_server_setup,
+ ],
+ [
+ __('Reboot with serial console'),
+ 'reboot_with_serial_console',
+ true,
+ frm.doc.provider === 'AWS EC2',
+ ],
+ [
+ __('Enable Public Bench and Site Creation'),
+ 'enable_for_new_benches_and_sites',
+ true,
+ frm.doc.virtual_machine,
+ ],
+ [
+ __('Disable Public Bench and Site Creation'),
+ 'disable_for_new_benches_and_sites',
+ true,
+ frm.doc.virtual_machine,
+ ],
+ [
+ __('Set Swappiness and SysRq'),
+ 'set_swappiness',
+ false,
+ frm.doc.is_server_setup,
+ ],
+ [
+ __('Mount Volumes'),
+ 'mount_volumes',
+ true,
+ frm.doc.virtual_machine && frm.doc.mounts,
+ ],
+ [
+ __('Collect ARM Images'),
+ 'collect_arm_images',
+ true,
+ frm.doc.virtual_machine &&
+ frm.doc.status === 'Active' &&
+ frm.doc.platform === 'x86_64',
+ ],
+ [__('Scale Up'), 'scale_up', true, !frm.doc.scaled_up],
+ [__('Scale Down'), 'scale_down', true, frm.doc.scaled_up],
+ [__('Setup Firewall'), 'setup_firewall', true, frm.doc.is_server_setup],
+ [
+ __('Teardown Firewall'),
+ 'teardown_firewall',
+ true,
+ frm.doc.is_server_setup,
+ ],
+ [
+ __('Install Wazuh Agent'),
+ 'install_wazuh_agent',
+ true,
+ frm.doc.is_server_setup,
+ ],
+ [
+ __('Uninstall Wazuh Agent'),
+ 'uninstall_wazuh_agent',
+ true,
+ frm.doc.is_server_setup,
+ ],
].forEach(([label, method, confirm, condition]) => {
if (typeof condition === 'undefined' || condition) {
frm.add_custom_button(
@@ -135,5 +293,133 @@ frappe.ui.form.on('Server', {
);
}
});
+
+ if ((frm.doc.is_server_setup, frm.doc.is_primary)) {
+ frm.add_custom_button(
+ 'Setup Secondary Server',
+ () => {
+ frappe.prompt(
+ [
+ {
+ fieldtype: 'Link',
+ fieldname: 'server_plan',
+ label: __('Server Plan'),
+ options: 'Server Plan',
+ reqd: 1,
+ },
+ ],
+ ({ server_plan }) => {
+ frm
+ .call('setup_secondary_server', {
+ server_plan: server_plan,
+ })
+ .then((r) => {
+ frm.refresh();
+ });
+ },
+ );
+ },
+ __('Actions'),
+ );
+ }
+
+ if (frm.doc.is_server_setup) {
+ frm.add_custom_button(
+ __('Increase Swap'),
+ () => {
+ const dialog = new frappe.ui.Dialog({
+ title: __('Increase Swap'),
+ fields: [
+ {
+ fieldtype: 'Int',
+ label: __('Swap Size (GB)'),
+ description: __('Add additional swap'),
+ fieldname: 'swap_size',
+ default: 4,
+ },
+ ],
+ });
+
+ dialog.set_primary_action(__('Increase Swap'), (args) => {
+ frm.call('increase_swap', args).then(() => {
+ dialog.hide();
+ frm.refresh();
+ });
+ });
+ dialog.show();
+ },
+ __('Actions'),
+ );
+ frm.add_custom_button(
+ __('Reset Swap'),
+ () => {
+ const dialog = new frappe.ui.Dialog({
+ title: __('Swap Size'),
+ fields: [
+ {
+ fieldtype: 'Int',
+ label: __('Swap Size (GB)'),
+ description: __(
+ 'This will reset swap space to specified size. 0 or empty to remove all.',
+ ),
+ fieldname: 'swap_size',
+ default: 1,
+ },
+ ],
+ });
+
+ dialog.set_primary_action(__('Reset Swap'), (args) => {
+ frm.call('reset_swap', args).then(() => {
+ dialog.hide();
+ frm.refresh();
+ });
+ });
+ dialog.show();
+ },
+ __('Actions'),
+ );
+
+ frm.add_custom_button(
+ __('Snapshot Both Servers'),
+ () => {
+ const dialog = new frappe.ui.Dialog({
+ title: __('Snapshot Both Servers'),
+ fields: [
+ {
+ fieldtype: 'Check',
+ label: 'Consistent Snapshot',
+ description:
+ 'This will stop the running services during snapshot creation.',
+ fieldname: 'consistent',
+ default: 1,
+ },
+ ],
+ });
+
+ dialog.set_primary_action(__('Submit'), (args) => {
+ frm.call('create_snapshot', args).then(() => {
+ dialog.hide();
+ frm.refresh();
+ });
+ });
+ dialog.show();
+ },
+ __('Actions'),
+ );
+ }
+ },
+
+ hostname: function (frm) {
+ press.set_hostname_abbreviation(frm);
},
});
+
+async function frappe_confirm(label) {
+ return new Promise((r) => {
+ frappe.confirm(
+ `Are you sure you want to ${label.toLowerCase()}?`,
+ () => r(true),
+ () => r(false),
+ );
+ });
+}
diff --git a/press/press/doctype/server/server.json b/press/press/doctype/server/server.json
index 991d42ee643..3dc00aea17d 100644
--- a/press/press/doctype/server/server.json
+++ b/press/press/doctype/server/server.json
@@ -5,31 +5,53 @@
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
+ "title",
"status",
"hostname",
+ "hostname_abbreviation",
"domain",
"self_hosted_server_domain",
- "title",
+ "tls_certificate_renewal_failed",
+ "is_unified_server",
"column_break_4",
"cluster",
"provider",
"virtual_machine",
+ "ignore_incidents_till",
+ "section_break_mequ",
"is_server_setup",
- "is_self_hosted",
"is_server_prepared",
"is_server_renamed",
+ "is_provisioning_press_job_completed",
+ "is_self_hosted",
+ "keep_files_on_server_in_offsite_backup",
+ "public",
+ "column_break_laiq",
+ "use_agent_job_callbacks",
+ "is_pyspy_setup",
+ "halt_agent_jobs",
+ "stop_deployments",
+ "is_for_recovery",
+ "is_monitoring_disabled",
"billing_section",
"team",
- "column_break_11",
"plan",
+ "column_break_11",
+ "auto_increase_storage",
+ "auto_add_storage_min",
+ "auto_add_storage_max",
"networking_section",
"ip",
+ "is_static_ip",
+ "ipv6",
"column_break_3",
"private_ip",
"private_mac_address",
"private_vlan_id",
"agent_section",
"agent_password",
+ "column_break_pdbx",
+ "disable_agent_job_auto_retry",
"reverse_proxy_section",
"proxy_server",
"column_break_12",
@@ -37,32 +59,54 @@
"database_section",
"database_server",
"self_hosted_mariadb_server",
+ "is_managed_database",
+ "enable_logical_replication_during_site_update",
"column_break_jdiy",
"self_hosted_mariadb_root_password",
+ "managed_database_service",
"replication",
"is_primary",
"is_replication_setup",
"column_break_24",
"primary",
+ "auto_scale_section",
+ "secondary_server",
+ "is_secondary",
+ "benches_on_shared_volume",
+ "scaled_up",
+ "column_break_ywnx",
+ "auto_scale_trigger",
"ssh_section",
"ssh_user",
"ssh_port",
"frappe_user_password",
"frappe_public_key",
"column_break_20",
+ "bastion_server",
"root_public_key",
"section_break_22",
"use_for_new_benches",
"use_for_new_sites",
"staging",
+ "use_for_build",
+ "platform",
+ "column_break_ktkv",
"new_worker_allocation",
+ "set_bench_memory_limits",
"ram",
+ "backups_section",
+ "skip_scheduled_backups",
"standalone_section",
"is_standalone",
"column_break_edyf",
"is_standalone_setup",
"tags_section",
- "tags"
+ "tags",
+ "mounts_section",
+ "has_data_volume",
+ "mounts",
+ "notifications_section",
+ "communication_infos"
],
"fields": [
{
@@ -70,8 +114,7 @@
"fieldname": "ip",
"fieldtype": "Data",
"in_list_view": 1,
- "label": "IP",
- "set_only_once": 1
+ "label": "IP"
},
{
"fieldname": "proxy_server",
@@ -89,9 +132,10 @@
"fieldname": "agent_password",
"fieldtype": "Password",
"label": "Agent Password",
- "set_only_once": 1
+ "read_only": 1
},
{
+ "collapsible": 1,
"fieldname": "agent_section",
"fieldtype": "Section Break",
"label": "Agent"
@@ -100,7 +144,7 @@
"default": "0",
"fieldname": "is_server_setup",
"fieldtype": "Check",
- "label": "Server Setup",
+ "label": "Is Server Setup",
"read_only": 1
},
{
@@ -130,6 +174,7 @@
"fieldtype": "Column Break"
},
{
+ "collapsible": 1,
"fieldname": "reverse_proxy_section",
"fieldtype": "Section Break",
"label": "Reverse Proxy"
@@ -140,12 +185,14 @@
"label": "Database"
},
{
+ "depends_on": "eval:!doc.is_managed_database",
"fieldname": "database_server",
"fieldtype": "Link",
"label": "Database Server",
"options": "Database Server"
},
{
+ "collapsible": 1,
"fieldname": "ssh_section",
"fieldtype": "Section Break",
"label": "SSH"
@@ -174,7 +221,8 @@
"default": "0",
"fieldname": "use_for_new_benches",
"fieldtype": "Check",
- "label": "Use For New Benches"
+ "label": "Use For New Benches",
+ "read_only": 1
},
{
"fieldname": "hostname",
@@ -195,7 +243,8 @@
"default": "0",
"fieldname": "use_for_new_sites",
"fieldtype": "Check",
- "label": "Use For New Sites"
+ "label": "Use For New Sites",
+ "read_only": 1
},
{
"fieldname": "cluster",
@@ -211,6 +260,7 @@
"fieldtype": "Column Break"
},
{
+ "collapsible": 1,
"fieldname": "networking_section",
"fieldtype": "Section Break",
"label": "Networking"
@@ -236,7 +286,7 @@
"fieldname": "provider",
"fieldtype": "Select",
"label": "Provider",
- "options": "Generic\nScaleway\nAWS EC2",
+ "options": "Generic\nScaleway\nAWS EC2\nOCI\nHetzner\nVodacom\nDigitalOcean",
"set_only_once": 1
},
{
@@ -246,6 +296,7 @@
"set_only_once": 1
},
{
+ "collapsible": 1,
"fieldname": "replication",
"fieldtype": "Section Break",
"label": "Replication"
@@ -283,15 +334,15 @@
"label": "Staging"
},
{
- "depends_on": "eval:doc.provider == \"AWS EC2\"",
+ "depends_on": "eval:[\"AWS EC2\", \"OCI\", \"Hetzner\", \"DigitalOcean\"].includes(doc.provider)",
"fieldname": "virtual_machine",
"fieldtype": "Link",
"label": "Virtual Machine",
- "mandatory_depends_on": "eval:doc.provider == \"AWS EC2\"",
+ "mandatory_depends_on": "eval:[\"AWS EC2\", \"OCI\"].includes(doc.provider)",
"options": "Virtual Machine"
},
{
- "default": "0",
+ "default": "1",
"fieldname": "new_worker_allocation",
"fieldtype": "Check",
"label": "New Worker Allocation"
@@ -320,7 +371,7 @@
"fieldname": "plan",
"fieldtype": "Link",
"label": "Plan",
- "options": "Plan"
+ "options": "Server Plan"
},
{
"default": "0",
@@ -349,13 +400,12 @@
},
{
"default": "root",
- "depends_on": "eval:doc.is_self_hosted",
"fieldname": "ssh_user",
"fieldtype": "Data",
"label": "SSH User"
},
{
- "depends_on": "eval:doc.is_self_hosted==true",
+ "depends_on": "eval:doc.is_self_hosted==true && !doc.is_managed_database",
"fieldname": "self_hosted_mariadb_server",
"fieldtype": "Data",
"label": "Self Hosted MariaDB Server IP"
@@ -366,7 +416,7 @@
"fieldtype": "Column Break"
},
{
- "depends_on": "eval:doc.is_self_hosted==true",
+ "depends_on": "eval:doc.is_self_hosted==true && !doc.is_managed_database",
"fieldname": "self_hosted_mariadb_root_password",
"fieldtype": "Password",
"label": "Self Hosted MariaDB Root Password"
@@ -416,10 +466,290 @@
"fieldtype": "Table",
"label": "Tags",
"options": "Resource Tag"
+ },
+ {
+ "fieldname": "column_break_ktkv",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "set_bench_memory_limits",
+ "fieldtype": "Check",
+ "label": "Set Bench Memory Limits"
+ },
+ {
+ "fieldname": "hostname_abbreviation",
+ "fieldtype": "Data",
+ "label": "Hostname Abbreviation"
+ },
+ {
+ "collapsible": 1,
+ "fieldname": "backups_section",
+ "fieldtype": "Section Break",
+ "label": "Backups"
+ },
+ {
+ "default": "0",
+ "fieldname": "skip_scheduled_backups",
+ "fieldtype": "Check",
+ "label": "Skip Scheduled Backups"
+ },
+ {
+ "fieldname": "column_break_pdbx",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "disable_agent_job_auto_retry",
+ "fieldtype": "Check",
+ "label": "Disable Agent Job Auto Retry"
+ },
+ {
+ "default": "0",
+ "description": "If user opts DBaaS eg. RDS",
+ "fieldname": "is_managed_database",
+ "fieldtype": "Check",
+ "label": "Is Managed Database"
+ },
+ {
+ "fieldname": "managed_database_service",
+ "fieldtype": "Link",
+ "label": "Managed Database Service",
+ "options": "Managed Database Service"
+ },
+ {
+ "default": "0",
+ "description": "Public release groups will be deployed here",
+ "fieldname": "public",
+ "fieldtype": "Check",
+ "label": "Public"
+ },
+ {
+ "default": "0",
+ "description": "If checked, server will be used to run Docker builds.",
+ "fieldname": "use_for_build",
+ "fieldtype": "Check",
+ "label": "Use For Build",
+ "search_index": 1
+ },
+ {
+ "default": "25",
+ "description": "Minimum storage to add automatically each time",
+ "fieldname": "auto_add_storage_min",
+ "fieldtype": "Int",
+ "label": "Auto Add Storage Min",
+ "non_negative": 1
+ },
+ {
+ "default": "250",
+ "description": "Maximum storage to add automatically each time",
+ "fieldname": "auto_add_storage_max",
+ "fieldtype": "Int",
+ "label": "Auto Add Storage Max",
+ "non_negative": 1
+ },
+ {
+ "fieldname": "mounts_section",
+ "fieldtype": "Section Break",
+ "label": "Mounts"
+ },
+ {
+ "fieldname": "mounts",
+ "fieldtype": "Table",
+ "label": "Mounts",
+ "options": "Server Mount"
+ },
+ {
+ "default": "0",
+ "fetch_from": "virtual_machine.has_data_volume",
+ "fieldname": "has_data_volume",
+ "fieldtype": "Check",
+ "label": "Has Data Volume",
+ "read_only": 1
+ },
+ {
+ "fieldname": "ipv6",
+ "fieldtype": "Data",
+ "label": "IPv6"
+ },
+ {
+ "default": "0",
+ "fieldname": "use_agent_job_callbacks",
+ "fieldtype": "Check",
+ "label": "Use Agent Job Callbacks"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_pyspy_setup",
+ "fieldtype": "Check",
+ "label": "Is PySpy Setup",
+ "read_only": 1
+ },
+ {
+ "fieldname": "section_break_mequ",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "column_break_laiq",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "description": "Stop polling and queuing agent jobs",
+ "fieldname": "halt_agent_jobs",
+ "fieldtype": "Check",
+ "label": "Halt Agent Jobs"
+ },
+ {
+ "default": "x86_64",
+ "fieldname": "platform",
+ "fieldtype": "Select",
+ "label": "Platform",
+ "options": "x86_64\narm64"
+ },
+ {
+ "default": "1",
+ "fieldname": "auto_increase_storage",
+ "fieldtype": "Check",
+ "label": "Auto Increase Storage"
+ },
+ {
+ "default": "0",
+ "description": "Stop all deployments on this server.",
+ "fieldname": "stop_deployments",
+ "fieldtype": "Check",
+ "label": "Stop Deployments"
+ },
+ {
+ "default": "0",
+ "fieldname": "keep_files_on_server_in_offsite_backup",
+ "fieldtype": "Check",
+ "label": "Keep Backup Files Onsite"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_for_recovery",
+ "fieldtype": "Check",
+ "label": "Is for Recovery"
+ },
+ {
+ "default": "0",
+ "fieldname": "enable_logical_replication_during_site_update",
+ "fieldtype": "Check",
+ "label": "Enable Logical Replication During Site Update"
+ },
+ {
+ "fieldname": "ignore_incidents_till",
+ "fieldtype": "Datetime",
+ "label": "Ignore Incidents Till"
+ },
+ {
+ "default": "0",
+ "fieldname": "tls_certificate_renewal_failed",
+ "fieldtype": "Check",
+ "label": "TLS Certificate Renewal Failed",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "is_static_ip",
+ "fieldtype": "Check",
+ "label": "Is Static IP",
+ "read_only": 1
+ },
+ {
+ "fieldname": "notifications_section",
+ "fieldtype": "Section Break",
+ "label": "Notifications"
+ },
+ {
+ "fieldname": "communication_infos",
+ "fieldtype": "Table",
+ "label": "Communication Infos",
+ "options": "Communication Info"
+ },
+ {
+ "fieldname": "bastion_server",
+ "fieldtype": "Link",
+ "label": "Bastion Server",
+ "options": "Bastion Server"
+ },
+ {
+ "description": "Used during horizontal scaling.",
+ "fieldname": "secondary_server",
+ "fieldtype": "Link",
+ "label": "Secondary Server",
+ "options": "Server",
+ "read_only": 1
+ },
+ {
+ "collapsible": 1,
+ "fieldname": "auto_scale_section",
+ "fieldtype": "Section Break",
+ "label": "Auto Scale"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_monitoring_disabled",
+ "fieldtype": "Check",
+ "label": "Is Monitoring Disabled",
+ "search_index": 1
+ },
+ {
+ "default": "0",
+ "description": "Is this a secondary server",
+ "fieldname": "is_secondary",
+ "fieldtype": "Check",
+ "label": "Is Secondary",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "description": "Are the benches running on a shared volume",
+ "fieldname": "benches_on_shared_volume",
+ "fieldtype": "Check",
+ "label": "Benches on Shared Volume",
+ "read_only": 1
+ },
+ {
+ "fieldname": "auto_scale_trigger",
+ "fieldtype": "Table",
+ "label": "Auto Scale Trigger",
+ "options": "Auto Scale Trigger"
+ },
+ {
+ "default": "0",
+ "description": "Check if the benches are running on the secondary server",
+ "fieldname": "scaled_up",
+ "fieldtype": "Check",
+ "label": "Scaled Up",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_ywnx",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "description": "Checked if database and app are hosted on the server server",
+ "fieldname": "is_unified_server",
+ "fieldtype": "Check",
+ "label": "Is Unified Server"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_provisioning_press_job_completed",
+ "fieldtype": "Check",
+ "label": "Is Provisioning Job Completed"
+ }
+ ],
+ "links": [
+ {
+ "link_doctype": "Auto Scale Record",
+ "link_fieldname": "primary_server"
}
],
- "links": [],
- "modified": "2023-07-02 14:59:31.104759",
+ "modified": "2026-01-23 10:19:12.514309",
"modified_by": "Administrator",
"module": "Press",
"name": "Server",
@@ -450,8 +780,11 @@
"write": 1
}
],
+ "row_format": "Dynamic",
+ "rows_threshold_for_grid_search": 20,
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
+ "title_field": "title",
"track_changes": 1
}
\ No newline at end of file
diff --git a/press/press/doctype/server/server.py b/press/press/doctype/server/server.py
index 9f33f053ab0..999ef6e2bff 100644
--- a/press/press/doctype/server/server.py
+++ b/press/press/doctype/server/server.py
@@ -1,29 +1,513 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
+import datetime
+import ipaddress
+import json
+import shlex
+import typing
+from contextlib import suppress
+from datetime import timedelta
+from functools import cached_property
+
+import boto3
import frappe
+import semantic_version
from frappe import _
+from frappe.core.utils import find, find_all
+from frappe.installer import subprocess
from frappe.model.document import Document
+from frappe.utils.password import get_decrypted_password
+from frappe.utils.synchronization import filelock
+from frappe.utils.user import is_system_user
+
from press.agent import Agent
-from press.runner import Ansible
-from press.utils import log_error
-from frappe.core.utils import find
+from press.api.client import dashboard_whitelist
+from press.exceptions import VolumeResizeLimitError
+from press.guards import role_guard
from press.overrides import get_permission_query_conditions_for_doctype
-from frappe.utils.user import is_system_user
+from press.press.doctype.add_on_storage_log.add_on_storage_log import (
+ insert_addon_storage_log,
+)
+from press.press.doctype.ansible_console.ansible_console import AnsibleAdHoc
+from press.press.doctype.auto_scale_record.auto_scale_record import (
+ create_prometheus_rule_for_scaling,
+ is_secondary_ready_for_scale_down,
+ update_or_delete_prometheus_rule_for_scaling,
+)
+from press.press.doctype.communication_info.communication_info import get_communication_info
+from press.press.doctype.resource_tag.tag_helpers import TagHelpers
+from press.press.doctype.server_activity.server_activity import log_server_activity
+from press.press.doctype.telegram_message.telegram_message import TelegramMessage
+from press.runner import Ansible
+from press.utils import fmt_timedelta, log_error
+
+if typing.TYPE_CHECKING:
+ from press.infrastructure.doctype.arm_build_record.arm_build_record import ARMBuildRecord
+ from press.press.doctype.agent_job.agent_job import AgentJob
+ from press.press.doctype.ansible_play.ansible_play import AnsiblePlay
+ from press.press.doctype.auto_scale_record.auto_scale_record import AutoScaleRecord
+ from press.press.doctype.bench.bench import Bench
+ from press.press.doctype.cluster.cluster import Cluster
+ from press.press.doctype.database_server.database_server import DatabaseServer
+ from press.press.doctype.mariadb_variable.mariadb_variable import MariaDBVariable
+ from press.press.doctype.nfs_volume_detachment.nfs_volume_detachment import NFSVolumeDetachment
+ from press.press.doctype.press_job.press_job import PressJob
+ from press.press.doctype.release_group.release_group import ReleaseGroup
+ from press.press.doctype.server_mount.server_mount import ServerMount
+ from press.press.doctype.server_plan.server_plan import ServerPlan
+ from press.press.doctype.virtual_machine.virtual_machine import VirtualMachine
+ from press.press.doctype.virtual_machine_volume.virtual_machine_volume import VirtualMachineVolume
+
+
+from typing import Literal, TypedDict
+
+
+class BenchInfoType(TypedDict):
+ name: str
+ build: str
+ candidate: str
+
+
+class ARMDockerImageType(TypedDict):
+ build: str | None
+ status: Literal["Pending", "Preparing", "Running", "Failure", "Success"]
+ bench: str
+
+
+class AutoScaleTriggerRow(TypedDict):
+ metric: Literal["CPU", "Memory"]
+ action: Literal["Scale Up", "Scale Down"]
+
+
+PUBLIC_SERVER_AUTO_ADD_STORAGE_MIN = 50
+MARIADB_DATA_MNT_POINT = "/opt/volumes/mariadb"
+BENCH_DATA_MNT_POINT = "/opt/volumes/benches"
+
+
+class BaseServer(Document, TagHelpers):
+ dashboard_fields = (
+ "title",
+ "plan",
+ "cluster",
+ "provider",
+ "status",
+ "team",
+ "database_server",
+ "is_self_hosted",
+ "auto_add_storage_min",
+ "auto_add_storage_max",
+ "auto_increase_storage",
+ "is_monitoring_disabled",
+ "auto_purge_binlog_based_on_size",
+ "binlog_max_disk_usage_percent",
+ "is_monitoring_disabled",
+ "is_provisioning_press_job_completed",
+ "is_unified_server",
+ )
+
+ @staticmethod
+ def get_list_query(query, filters=None, **list_args):
+ Server = frappe.qb.DocType("Server")
+
+ status = filters.get("status")
+ if status == "Archived":
+ query = query.where(Server.status == status)
+ else:
+ # Show only Active and Installing servers ignore pending (secondary server)
+ query = query.where(
+ Server.status.isin(["Active", "Installing", "Broken"])
+ | ((Server.status == "Pending") & (Server.is_secondary != 1))
+ )
-from typing import List, Union
-import boto3
-import json
+ query = query.where(Server.is_for_recovery != 1).where(Server.team == frappe.local.team().name)
+ results = query.run(as_dict=True)
+
+ for result in results:
+ db_plan_name = frappe.db.get_value("Database Server", result.database_server, "plan")
+ result.db_plan = (
+ frappe.db.get_value(
+ "Server Plan", db_plan_name, ["title", "price_inr", "price_usd"], as_dict=True
+ )
+ if db_plan_name
+ else None
+ )
+
+ return results
+
+ @property
+ def _series(self):
+ return self.name[0]
+
+ def create_log(self, action: str, reason: str):
+ """Helper to log server activity"""
+ log_server_activity(self._series, self.name, action, reason)
+
+ def get_doc(self, doc): # noqa: C901
+ from press.api.client import get
+ from press.api.server import usage
+
+ warn_at_storage_percentage = 0.8
+
+ if doc.status in ("Active", "Pending") and not doc.is_provisioning_press_job_completed:
+ doc.status = "Installing"
+
+ if doc.database_server:
+ data = frappe.get_value(
+ "Database Server",
+ doc.database_server,
+ ["status", "is_provisioning_press_job_completed"],
+ as_dict=True,
+ )
+ if data and data.status in ("Active", "Pending") and not data.is_provisioning_press_job_completed:
+ doc.status = "Installing"
+
+ if self.plan:
+ doc.current_plan = get("Server Plan", self.plan)
+ if doc.current_plan and not doc.current_plan.get("plan_type"):
+ doc.current_plan["plan_type"] = frappe.db.get_single_value(
+ "Press Settings", "default_server_plan_type"
+ )
+ else:
+ if virtual_machine := frappe.db.get_value(
+ "Virtual Machine", self.virtual_machine, ["vcpu", "ram", "disk_size"], as_dict=True
+ ):
+ doc.current_plan = {
+ "vcpu": virtual_machine.vcpu,
+ "memory": virtual_machine.ram,
+ "disk": virtual_machine.disk_size,
+ }
+
+ doc.storage_plan = frappe.db.get_value(
+ "Server Storage Plan",
+ {"enabled": 1},
+ ["price_inr", "price_usd"],
+ as_dict=True,
+ )
+ doc.usage = usage(self.name)
+ doc.actions = self.get_actions()
+
+ if not self.is_self_hosted:
+ doc.disk_size = self.get_data_disk_size()
+
+ doc.communication_infos = self.get_communication_infos()
+
+ try:
+ doc.recommended_storage_increment = (
+ self.size_to_increase_by_for_20_percent_available(
+ mountpoint=self.guess_data_disk_mountpoint()
+ )
+ if (doc.usage.get("disk", 0) >= warn_at_storage_percentage * doc.disk_size)
+ else 0
+ )
+ except TypeError:
+ doc.recommended_storage_increment = 0
+
+ doc.replication_server = frappe.db.get_value(
+ "Database Server",
+ {"primary": doc.database_server, "is_replication_setup": 1},
+ "name",
+ )
+ doc.owner_email = frappe.db.get_value("Team", self.team, "user")
+
+ if self.doctype == "Server":
+ doc.secondary_server = self.secondary_server
+ doc.scaled_up = self.scaled_up
+
+ return doc
+
+ @dashboard_whitelist()
+ def get_communication_infos(self):
+ return (
+ [{"channel": c.channel, "type": c.type, "value": c.value} for c in self.communication_infos]
+ if hasattr(self, "communication_infos")
+ else []
+ )
+
+ @dashboard_whitelist()
+ def update_communication_infos(self, values: list[dict]):
+ if self.doctype != "Server":
+ frappe.throw("Setting up communication info is only allowed for App Server")
+ return
+
+ from press.press.doctype.communication_info.communication_info import (
+ update_communication_infos as update_infos,
+ )
+
+ update_infos("Server", self.name, values)
+
+ @dashboard_whitelist()
+ def get_storage_usage(self):
+ """Get storage usage of the application server"""
+ try:
+ return self.agent.get("/server/storage-breakdown")
+ except Exception:
+ frappe.throw("Failed to fetch storage usage. Try again later.")
+
+ @dashboard_whitelist()
+ def increase_disk_size_for_server(
+ self,
+ server: str | Server | DatabaseServer,
+ increment: int,
+ mountpoint: str | None = None,
+ is_auto_triggered: bool = False,
+ current_disk_usage: int | None = None,
+ ) -> None:
+ add_on_storage_log = None
+ storage_parameters = {
+ "doctype": "Add On Storage Log",
+ "adding_storage": increment,
+ is_auto_triggered: is_auto_triggered,
+ }
+
+ if not isinstance(server, str):
+ server = server.name
+
+ storage_parameters.update({"database_server" if server[0] == "m" else "server": server})
+
+ if server == self.name:
+ mountpoint = mountpoint or self.guess_data_disk_mountpoint()
+ storage_parameters.update(
+ {
+ "available_disk_space": round((self.disk_capacity(mountpoint) / 1024 / 1024 / 1024), 2),
+ "current_disk_usage": current_disk_usage
+ or round(
+ (self.disk_capacity(mountpoint) - self.free_space(mountpoint)) / 1024 / 1024 / 1024, 2
+ ),
+ "mountpoint": mountpoint,
+ }
+ )
+ if increment:
+ add_on_storage_log = insert_addon_storage_log(
+ adding_storage=increment,
+ available_disk_space=round((self.disk_capacity(mountpoint) / 1024 / 1024 / 1024), 2),
+ current_disk_usage=current_disk_usage
+ or round(
+ (self.disk_capacity(mountpoint) - self.free_space(mountpoint)) / 1024 / 1024 / 1024, 2
+ ),
+ mountpoint=mountpoint,
+ is_auto_triggered=is_auto_triggered,
+ is_warning=False,
+ server=storage_parameters.get("server"),
+ database_server=storage_parameters.get("database_server"),
+ )
+
+ self.increase_disk_size(
+ increment=increment,
+ mountpoint=mountpoint,
+ log=add_on_storage_log.name if add_on_storage_log else None,
+ )
+ else:
+ server_doc: DatabaseServer = frappe.get_doc("Database Server", server)
+ mountpoint = (
+ mountpoint or server_doc.guess_data_disk_mountpoint()
+ ) # Name will now be changed to m*
+ storage_parameters.update(
+ {
+ "available_disk_space": round((self.disk_capacity(mountpoint) / 1024 / 1024 / 1024), 2),
+ "current_disk_usage": current_disk_usage
+ or round(
+ (self.disk_capacity(mountpoint) - self.free_space(mountpoint)) / 1024 / 1024 / 1024, 2
+ ),
+ "mountpoint": mountpoint,
+ }
+ )
+ if increment:
+ add_on_storage_log = insert_addon_storage_log(
+ adding_storage=increment,
+ available_disk_space=round((self.disk_capacity(mountpoint) / 1024 / 1024 / 1024), 2),
+ current_disk_usage=current_disk_usage
+ or round(
+ (self.disk_capacity(mountpoint) - self.free_space(mountpoint)) / 1024 / 1024 / 1024, 2
+ ),
+ mountpoint=mountpoint,
+ is_auto_triggered=is_auto_triggered,
+ is_warning=False,
+ server=storage_parameters.get("server"),
+ database_server=storage_parameters.get("database_server"),
+ )
+
+ server_doc.increase_disk_size(
+ increment=increment,
+ mountpoint=mountpoint,
+ log=add_on_storage_log.name if add_on_storage_log else None,
+ )
+
+ @dashboard_whitelist()
+ def configure_auto_add_storage(self, server: str, enabled: bool, min: int = 0, max: int = 0) -> None:
+ if not enabled:
+ frappe.db.set_value(self.doctype, self.name, "auto_increase_storage", False)
+ return
+
+ if min < 0 or max < 0:
+ frappe.throw(_("Minimum and maximum storage sizes must be positive"))
+ if min > max:
+ frappe.throw(_("Minimum storage size must be less than the maximum storage size"))
+
+ if server == self.name:
+ self.auto_increase_storage = True
+ self.auto_add_storage_min = min
+ self.auto_add_storage_max = max
+ self.save()
+ else:
+ server_doc = frappe.get_doc("Database Server", server)
+ server_doc.auto_increase_storage = True
+ server_doc.auto_add_storage_min = min
+ server_doc.auto_add_storage_max = max
+ server_doc.save()
+
+ @staticmethod
+ def on_not_found(name):
+ # If name is of a db server then redirect to the app server
+ app_server = frappe.db.get_value("Server", {"database_server": name}, "name")
+ if app_server:
+ frappe.response.message = {
+ "redirect": f"/dashboard/servers/{app_server}",
+ }
+ raise
+
+ def _get_clusters_with_autoscale_support(self) -> list[str]:
+ """Get clusters which have autoscaling enabled"""
+ return frappe.db.get_all("Cluster", {"enable_autoscaling": 1}, pluck="name")
+
+ def get_actions(self):
+ server_type = ""
+ if self.doctype == "Server":
+ server_type = "application server" if not getattr(self, "is_unified_server", False) else "server"
+ elif self.doctype == "Database Server":
+ if self.is_replication_setup:
+ server_type = "replication server"
+ else:
+ server_type = (
+ "database server" if not getattr(self, "is_unified_server", False) else "database"
+ )
+
+ actions = [
+ {
+ "action": "Rename server",
+ "description": f"Rename the {server_type}",
+ "button_label": "Rename",
+ "condition": self.status == "Active",
+ "doc_method": "rename",
+ "group": f"{server_type.title()} Actions",
+ },
+ {
+ "action": "Reboot server",
+ "description": f"Reboot the {server_type}",
+ "button_label": "Reboot",
+ "condition": self.should_show_reboot(),
+ "doc_method": "reboot",
+ "group": f"{server_type.title()} Actions",
+ },
+ {
+ "action": "Cleanup Server",
+ "description": f"Cleanup unused files on the {server_type}",
+ "button_label": "Cleanup",
+ "condition": self.status == "Active" and self.doctype == "Server",
+ "doc_method": "cleanup_unused_files",
+ "group": f"{server_type.title()} Actions",
+ },
+ {
+ "action": "Enable Autoscale",
+ "description": "Setup a secondary application server to autoscale to during high loads",
+ "button_label": "Enable",
+ "condition": self.status == "Active"
+ and self.doctype == "Server"
+ and not self.secondary_server
+ and not getattr(self, "is_unified_server", False)
+ and self.cluster in self._get_clusters_with_autoscale_support(),
+ "group": "Application Server Actions",
+ },
+ {
+ "action": "Disable Autoscale",
+ "description": "Turn off autoscaling and remove the secondary application server.",
+ "button_label": "Disable",
+ "condition": (
+ self.status == "Active"
+ and self.doctype == "Server"
+ # Only applicable for primary application servers
+ and self.secondary_server
+ and self.benches_on_shared_volume
+ ),
+ "group": "Application Server Actions",
+ },
+ {
+ "action": "Drop server",
+ "description": "Drop both the application and database servers"
+ if not getattr(self, "is_unified_server", False)
+ else "Drop the unifed server",
+ "button_label": "Drop",
+ "condition": self.status == "Active" and self.doctype == "Server",
+ "doc_method": "drop_server",
+ "group": "Dangerous Actions",
+ },
+ ]
+
+ for action in actions:
+ action["server_doctype"] = self.doctype
+ action["server_name"] = self.name
+
+ return [action for action in actions if action.get("condition", True)]
+
+ def should_show_reboot(self) -> bool:
+ if self.doctype == "Server":
+ return True
+
+ if self.doctype == "Database Server":
+ return bool(not getattr(self, "is_unified_server", False))
+
+ return False
+
+ def get_data_disk_size(self) -> int:
+ """Get servers data disk size"""
+ mountpoint = self.guess_data_disk_mountpoint()
+ volume = self.find_mountpoint_volume(mountpoint)
+
+ if not volume: # Volume might not be attached as soon as the server is created
+ return 0
+
+ return frappe.db.get_value(
+ "Virtual Machine Volume", {"volume_id": volume.volume_id, "parent": volume.parent}, "size"
+ )
+
+ def _get_app_and_database_servers(self) -> tuple[Server, DatabaseServer]:
+ if self.doctype == "Database Server":
+ app_server_name = frappe.db.get_value("Server", {"database_server": self.name}, "name")
+ app_server = frappe.get_doc("Server", app_server_name)
+ return app_server, self
+
+ db_server = frappe.get_doc("Database Server", self.database_server)
+ return self, db_server
+
+ @dashboard_whitelist()
+ def drop_server(self):
+ app_server, db_server = self._get_app_and_database_servers()
+ app_server.archive()
+
+ # Don't need to archive db server explicitly if it's a unified server
+ if app_server.is_unified_server:
+ return
+
+ db_server.archive()
+ @dashboard_whitelist()
+ def toggle_auto_increase_storage(self, enable: bool):
+ """Toggle auto disk increase."""
+ app_server, database_server = self._get_app_and_database_servers()
+
+ app_server.auto_increase_storage = enable
+ database_server.auto_increase_storage = enable
+
+ app_server.save()
+ database_server.save()
-class BaseServer(Document):
def autoname(self):
if not self.domain:
self.domain = frappe.db.get_single_value("Press Settings", "domain")
self.name = f"{self.hostname}.{self.domain}"
- if self.is_self_hosted:
+ if self.doctype in ["Database Server", "Server", "Proxy Server"] and self.is_self_hosted:
self.name = f"{self.hostname}.{self.self_hosted_server_domain}"
def validate(self):
@@ -32,26 +516,40 @@ def validate(self):
if self.doctype == "Database Server" and not self.self_hosted_mariadb_server:
self.self_hosted_mariadb_server = self.private_ip
+ if not self.hostname_abbreviation:
+ self._set_hostname_abbreviation()
+
+ self.validate_mounts()
+
+ def _set_hostname_abbreviation(self):
+ self.hostname_abbreviation = get_hostname_abbreviation(self.hostname)
+
def after_insert(self):
- if self.ip and not self.is_self_hosted:
+ if self.ip and (
+ self.doctype not in ["Database Server", "Server", "Proxy Server"] or not self.is_self_hosted
+ ):
self.create_dns_record()
self.update_virtual_machine_name()
+ @frappe.whitelist()
def create_dns_record(self):
try:
domain = frappe.get_doc("Root Domain", self.domain)
+
+ if domain.generic_dns_provider:
+ return
+
client = boto3.client(
"route53",
aws_access_key_id=domain.aws_access_key_id,
aws_secret_access_key=domain.get_password("aws_secret_access_key"),
+ region_name=domain.aws_region,
)
zones = client.list_hosted_zones_by_name()["HostedZones"]
# list_hosted_zones_by_name returns a lexicographically ordered list of zones
# i.e. x.example.com comes after example.com
# Name field has a trailing dot
- hosted_zone = find(reversed(zones), lambda x: domain.name.endswith(x["Name"][:-1]))[
- "Id"
- ]
+ hosted_zone = find(reversed(zones), lambda x: domain.name.endswith(x["Name"][:-1]))["Id"]
client.change_resource_record_sets(
ChangeBatch={
"Changes": [
@@ -71,6 +569,71 @@ def create_dns_record(self):
except Exception:
log_error("Route 53 Record Creation Error", domain=domain.name, server=self.name)
+ def add_to_public_groups(self):
+ groups = frappe.get_all("Release Group", {"public": True, "enabled": True}, "name")
+ for group_name in groups:
+ group: ReleaseGroup = frappe.get_doc("Release Group", group_name)
+ with suppress(frappe.ValidationError):
+ group.add_server(str(self.name), deploy=True)
+
+ @frappe.whitelist()
+ def enable_for_new_benches_and_sites(self):
+ if not self.public:
+ frappe.throw("Action only allowed for public servers")
+
+ server = self.get_server_enabled_for_new_benches_and_sites()
+ self.add_to_public_groups()
+ if server:
+ frappe.msgprint(_("Server {0} is already enabled for new benches and sites").format(server))
+
+ else:
+ self.use_for_new_benches = True
+ self.use_for_new_sites = True
+ self.save()
+
+ def get_server_enabled_for_new_benches_and_sites(self):
+ return frappe.db.get_value(
+ "Server",
+ {
+ "name": ("!=", self.name),
+ "is_primary": True,
+ "status": "Active",
+ "use_for_new_benches": True,
+ "use_for_new_sites": True,
+ "public": True,
+ "cluster": self.cluster,
+ },
+ pluck=True,
+ )
+
+ @frappe.whitelist()
+ def disable_for_new_benches_and_sites(self):
+ self.use_for_new_benches = False
+ self.use_for_new_sites = False
+ self.save()
+
+ def remove_from_public_groups(self, force=False):
+ groups: list[str] = frappe.get_all(
+ "Release Group",
+ {
+ "public": True,
+ "enabled": True,
+ },
+ pluck="name",
+ )
+ active_benches_groups: list[str] = frappe.get_all(
+ "Bench", {"status": "Active", "group": ("in", groups), "server": self.name}, pluck="group"
+ )
+ parent_filter = {"parent": ("in", groups)}
+ if not force:
+ parent_filter = {"parent": ("in", set(groups) - set(active_benches_groups))}
+
+ frappe.db.delete(
+ "Release Group Server",
+ {"server": self.name, **parent_filter},
+ pluck="parent",
+ )
+
def validate_cluster(self):
if not self.cluster:
self.cluster = frappe.db.get_value("Root Domain", self.domain, "default_cluster")
@@ -78,20 +641,37 @@ def validate_cluster(self):
frappe.throw("Default Cluster not found", frappe.ValidationError)
def validate_agent_password(self):
+ # In case of unified servers the agent password is set during creation of the virtual machine
if not self.agent_password:
self.agent_password = frappe.generate_hash(length=32)
def get_agent_repository_url(self):
settings = frappe.get_single("Press Settings")
repository_owner = settings.agent_repository_owner or "frappe"
- url = f"https://github.com/{repository_owner}/agent"
- return url
+ return f"https://github.com/{repository_owner}/agent"
+
+ def get_agent_repository_branch(self):
+ settings = frappe.get_single("Press Settings")
+ return settings.branch or "master"
@frappe.whitelist()
def ping_agent(self):
agent = Agent(self.name, self.doctype)
return agent.ping()
+ @frappe.whitelist()
+ def ping_mariadb(self) -> bool:
+ try:
+ agent = Agent(self.name, self.doctype)
+ return agent.ping_database(self).get("reachable")
+ except Exception:
+ return False
+
+ @frappe.whitelist()
+ def ping_agent_job(self):
+ agent = Agent(self.name, self.doctype)
+ return agent.create_agent_job("Ping Job", "ping_job").name
+
@frappe.whitelist()
def update_agent(self):
agent = Agent(self.name, self.doctype)
@@ -99,12 +679,15 @@ def update_agent(self):
@frappe.whitelist()
def prepare_server(self):
- frappe.enqueue_doc(
- self.doctype, self.name, "_prepare_server", queue="long", timeout=2400
- )
+ if self.provider == "Generic":
+ self._prepare_server()
+ else:
+ frappe.enqueue_doc(self.doctype, self.name, "_prepare_server", queue="long", timeout=2400)
def _prepare_server(self):
try:
+ ansible = None
+
if self.provider == "Scaleway":
ansible = Ansible(
playbook="scaleway.yml",
@@ -118,35 +701,109 @@ def _prepare_server(self):
)
elif self.provider == "AWS EC2":
ansible = Ansible(playbook="aws.yml", server=self, user="ubuntu")
+ elif self.provider == "OCI":
+ ansible = Ansible(playbook="oci.yml", server=self, user="ubuntu")
+ elif self.provider == "Vodacom":
+ ansible = Ansible(playbook="vodacom.yml", server=self, user="ubuntu")
+
+ if self.provider != "Generic" and ansible:
+ ansible.run()
- ansible.run()
self.reload()
self.is_server_prepared = True
self.save()
except Exception:
log_error("Server Preparation Exception", server=self.as_dict())
+ @frappe.whitelist()
+ def setup_unified_server(self):
+ """Setup both the application server and its associated database server (unified plays on vm)."""
+ frappe.enqueue_doc(self.doctype, self.name, "_setup_unified_server", queue="long", timeout=2400)
+
+ def _setup_unified_server(self):
+ agent_password = self.get_password("agent_password")
+ agent_repository_url = self.get_agent_repository_url()
+ agent_branch = self.get_agent_repository_branch()
+ certificate = self.get_certificate()
+ log_server, kibana_password = self.get_log_server()
+ agent_sentry_dsn = frappe.db.get_single_value("Press Settings", "agent_sentry_dsn")
+ database_server: DatabaseServer = frappe.get_doc("Database Server", self.database_server)
+ database_server_config = database_server._get_config()
+
+ self.status = "Installing"
+ database_server.status = "Installing"
+ self.save()
+ database_server.save()
+
+ try:
+ ansible = Ansible(
+ playbook="unified_server.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables={
+ "server": self.name,
+ "private_ip": self.private_ip,
+ "proxy_ip": self.get_proxy_ip(),
+ "workers": "2",
+ "agent_password": agent_password,
+ "agent_repository_url": agent_repository_url,
+ "agent_branch": agent_branch,
+ "agent_sentry_dsn": agent_sentry_dsn,
+ "monitoring_password": self.get_monitoring_password(),
+ "log_server": log_server,
+ "kibana_password": kibana_password,
+ "certificate_private_key": certificate.private_key,
+ "certificate_full_chain": certificate.full_chain,
+ "certificate_intermediate_chain": certificate.intermediate_chain,
+ "docker_depends_on_mounts": self.docker_depends_on_mounts,
+ "db_port": database_server.db_port,
+ "agent_repository_branch_or_commit_ref": self.get_agent_repository_branch(),
+ "agent_update_args": " --skip-repo-setup=true",
+ "server_id": database_server.server_id,
+ "allocator": database_server.memory_allocator.lower(),
+ "mariadb_root_password": database_server_config.mariadb_root_password,
+ "mariadb_depends_on_mounts": database_server_config.mariadb_depends_on_mounts,
+ **self.get_mount_variables(), # Currently same as database server since no volumes
+ },
+ )
+ play = ansible.run()
+ self.reload()
+ database_server = database_server.reload()
+
+ if play.status == "Success":
+ self.status = "Active"
+ database_server.status = "Active"
+ else:
+ self.status = "Broken"
+ database_server.status = "Broken"
+ except Exception:
+ self.status = "Broken"
+ database_server.status = "Broken"
+ log_error("Unified Server Setup Exception", server=self.as_dict())
+
+ self.save()
+ database_server.save()
+
@frappe.whitelist()
def setup_server(self):
self.status = "Installing"
self.save()
- frappe.enqueue_doc(
- self.doctype, self.name, "_setup_server", queue="long", timeout=2400
- )
+ frappe.enqueue_doc(self.doctype, self.name, "_setup_server", queue="long", timeout=2400)
@frappe.whitelist()
def install_nginx(self):
self.status = "Installing"
self.save()
- frappe.enqueue_doc(
- self.doctype, self.name, "_install_nginx", queue="long", timeout=1200
- )
+ frappe.enqueue_doc(self.doctype, self.name, "_install_nginx", queue="long", timeout=1200)
def _install_nginx(self):
try:
ansible = Ansible(
playbook="nginx.yml",
server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
)
play = ansible.run()
self.reload()
@@ -161,16 +818,12 @@ def _install_nginx(self):
@frappe.whitelist()
def install_filebeat(self):
- frappe.enqueue_doc(
- self.doctype, self.name, "_install_filebeat", queue="long", timeout=1200
- )
+ frappe.enqueue_doc(self.doctype, self.name, "_install_filebeat", queue="long", timeout=1200)
def _install_filebeat(self):
log_server = frappe.db.get_single_value("Press Settings", "log_server")
if log_server:
- kibana_password = frappe.get_doc("Log Server", log_server).get_password(
- "kibana_password"
- )
+ kibana_password = frappe.get_doc("Log Server", log_server).get_password("kibana_password")
else:
kibana_password = None
@@ -178,9 +831,10 @@ def _install_filebeat(self):
ansible = Ansible(
playbook="filebeat.yml",
server=self,
- user=self.ssh_user or "root",
- port=self.ssh_port or 22,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
variables={
+ "server_type": self.doctype,
"server": self.name,
"log_server": log_server,
"kibana_password": kibana_password,
@@ -192,9 +846,7 @@ def _install_filebeat(self):
@frappe.whitelist()
def install_exporters(self):
- frappe.enqueue_doc(
- self.doctype, self.name, "_install_exporters", queue="long", timeout=1200
- )
+ frappe.enqueue_doc(self.doctype, self.name, "_install_exporters", queue="long", timeout=1200)
@frappe.whitelist()
def ping_ansible(self):
@@ -202,8 +854,8 @@ def ping_ansible(self):
ansible = Ansible(
playbook="ping.yml",
server=self,
- user=self.ssh_user or "root",
- port=self.ssh_port or 22,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
)
ansible.run()
except Exception:
@@ -215,12 +867,21 @@ def update_agent_ansible(self):
def _update_agent_ansible(self):
try:
+ agent_branch = frappe.get_value("Press Settings", "Press Settings", "branch")
+ if not agent_branch:
+ agent_branch = "upstream/master"
+ else:
+ agent_branch = f"upstream/{agent_branch}"
ansible = Ansible(
playbook="update_agent.yml",
- variables={"agent_repository_url": self.get_agent_repository_url()},
+ variables={
+ "agent_repository_url": self.get_agent_repository_url(),
+ "agent_repository_branch_or_commit_ref": agent_branch,
+ "agent_update_args": " --skip-repo-setup=true",
+ },
server=self,
- user=self.ssh_user or "root",
- port=self.ssh_port or 22,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
)
ansible.run()
except Exception:
@@ -237,54 +898,226 @@ def fetch_keys(self):
@frappe.whitelist()
def ping_ansible_unprepared(self):
try:
- if self.provider == "Scaleway":
+ if self.provider == "Scaleway" or self.provider in ("AWS EC2", "OCI"):
ansible = Ansible(
playbook="ping.yml",
server=self,
user="ubuntu",
)
- elif self.provider == "AWS EC2":
- ansible = Ansible(playbook="ping.yml", server=self, user="ubuntu")
- ansible.run()
+ ansible.run()
except Exception:
log_error("Unprepared Server Ping Exception", server=self.as_dict())
+ @dashboard_whitelist()
@frappe.whitelist()
- def cleanup_unused_files(self):
- frappe.enqueue_doc(
- self.doctype, self.name, "_cleanup_unused_files", queue="long", timeout=2400
+ def cleanup_unused_files(self, force: bool = False):
+ if self.is_build_server():
+ return
+
+ with suppress(frappe.DoesNotExistError):
+ cleanup_job: "AgentJob" = frappe.get_last_doc(
+ "Agent Job", {"server": self.name, "job_type": "Cleanup Unused Files"}
+ )
+ if cleanup_job.status in ["Running", "Pending"]:
+ frappe.throw("Cleanup job is already running")
+
+ self._cleanup_unused_files(force=force)
+
+ def is_build_server(self) -> bool:
+ # Not a field in all subclasses
+ if getattr(self, "use_for_build", False):
+ return True
+
+ name = frappe.db.get_single_value("Press Settings", "build_server")
+ if name == self.name:
+ return True
+
+ # Whether build_server explicitly set on Release Group
+ count = frappe.db.count(
+ "Release Group",
+ {
+ "enabled": True,
+ "build_server": self.name,
+ },
)
+ if isinstance(count, (int, float)):
+ return count > 0
+ return False
- def _cleanup_unused_files(self):
+ def _cleanup_unused_files(self, force: bool = False):
agent = Agent(self.name, self.doctype)
- agent.cleanup_unused_files()
+ if agent.should_skip_requests():
+ return
+ agent.cleanup_unused_files(force)
def on_trash(self):
plays = frappe.get_all("Ansible Play", filters={"server": self.name})
for play in plays:
frappe.delete_doc("Ansible Play", play.name)
+ def break_glass(self):
+ """
+ Remove glass file with simple ssh command to make free space
+
+ Space is required for playbooks to run, growpart command, etc.
+ """
+ try:
+ subprocess.check_output(
+ shlex.split(
+ f"ssh -o BatchMode=yes -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null root@{self.ip} -t rm /root/glass"
+ ),
+ stderr=subprocess.STDOUT,
+ )
+ except subprocess.CalledProcessError as e:
+ frappe.log_error(
+ title="Error removing glassfile",
+ message=e.output.decode(),
+ reference_doctype=self.doctype,
+ reference_name=self.name,
+ )
+
+ def get_server_from_device(self, device: str) -> "BaseServer":
+ if self.provider == "Hetzner":
+ volume_id = device.removeprefix("/dev/disk/by-id/scsi-0HC_Volume_")
+ else:
+ volume_id = device.removeprefix("/dev/disk/by-id/nvme-Amazon_Elastic_Block_Store_")
+ if not volume_id.startswith("vol-"):
+ volume_id = volume_id.replace("vol", "vol-", 1)
+
+ virtual_machine = frappe.get_value("Virtual Machine Volume", {"volume_id": volume_id}, "parent")
+
+ if virtual_machine == self.virtual_machine:
+ return self
+
+ nfs_server_name = frappe.get_value("NFS Server", {"virtual_machine": virtual_machine}, "name")
+ return frappe.get_doc("NFS Server", nfs_server_name)
+
@frappe.whitelist()
- def extend_ec2_volume(self):
- if self.provider != "AWS EC2":
+ def extend_ec2_volume(self, device=None, log: str | None = None):
+ if self.provider not in ("AWS EC2", "OCI"):
return
+ # Restart MariaDB if MariaDB disk is full
+ mountpoint = self.guess_data_disk_mountpoint()
+ restart_mariadb = self.doctype == "Database Server" and self.is_disk_full(
+ mountpoint
+ ) # check before breaking glass to ensure state of mariadb
+ self.break_glass()
+ if not device:
+ # Try the best guess. Try extending the data volume
+ volume = self.find_mountpoint_volume(mountpoint)
+ assert volume is not None, "Volume not found"
+ assert volume.volume_id is not None, "Volume ID not found"
+ device = self.get_device_from_volume_id(volume.volume_id)
+
+ server = self.get_server_from_device(device)
+
try:
- ansible = Ansible(playbook="extend_ec2_volume.yml", server=self)
- ansible.run()
+ ansible = Ansible(
+ playbook="extend_ec2_volume.yml",
+ server=server,
+ user=server._ssh_user(),
+ port=server._ssh_port(),
+ variables={"restart_mariadb": restart_mariadb, "device": device},
+ )
+ play = ansible.run()
+ if log:
+ frappe.db.set_value("Add On Storage Log", log, "extend_ec2_play", play.name)
+ frappe.db.commit()
except Exception:
- log_error("EC2 Volume Extend Exception", server=self.as_dict())
+ log_error("EC2 Volume Extend Exception", server=server.as_dict())
- @frappe.whitelist()
- def increase_disk_size(self, increment=50):
+ def enqueue_extend_ec2_volume(self, device, log):
+ frappe.enqueue_doc(
+ self.doctype, self.name, "extend_ec2_volume", device=device, log=log, at_front=True, queue="long"
+ )
+
+ @cached_property
+ def time_to_wait_before_updating_volume(self) -> timedelta | int:
if self.provider != "AWS EC2":
+ return 0
+
+ last_updated_at = frappe.get_value(
+ "Virtual Machine Volume",
+ {"parent": self.virtual_machine, "idx": 1}, # first volume is likely main
+ "last_updated_at",
+ )
+
+ if not last_updated_at:
+ return 0
+
+ diff = frappe.utils.now_datetime() - last_updated_at
+ return diff if diff < timedelta(hours=6) else 0
+
+ @frappe.whitelist()
+ def increase_disk_size(self, increment=50, mountpoint=None, log: str | None = None):
+ if self.provider not in ("AWS EC2", "OCI"):
return
- virtual_machine = frappe.get_doc("Virtual Machine", self.virtual_machine)
- virtual_machine.increase_disk_size(increment)
- self.extend_ec2_volume()
+ if self.provider == "AWS EC2" and self.time_to_wait_before_updating_volume:
+ frappe.throw(
+ f"Please wait {fmt_timedelta(self.time_to_wait_before_updating_volume)} before resizing volume",
+ VolumeResizeLimitError,
+ )
+ if not mountpoint:
+ mountpoint = self.guess_data_disk_mountpoint()
+
+ volume = self.find_mountpoint_volume(mountpoint)
+ assert volume is not None, f"Volume not found for mountpoint {mountpoint}"
+ # Get the parent of the volume directly instead of guessing.
+ assert volume.parent is not None, "Virtual Machine not found for volume"
+ assert volume.volume_id is not None, "Volume ID not found"
+ virtual_machine: "VirtualMachine" = frappe.get_doc("Virtual Machine", volume.parent)
+ virtual_machine.increase_disk_size(volume.volume_id, increment)
+ if self.provider == "AWS EC2":
+ device = self.get_device_from_volume_id(volume.volume_id)
+ self.enqueue_extend_ec2_volume(device, log)
+ elif self.provider == "OCI":
+ # TODO: Add support for volumes on OCI
+ # Non-boot volumes might not need resize
+ self.break_glass()
+ self.reboot()
+
+ def guess_data_disk_mountpoint(self) -> str:
+ if not hasattr(self, "has_data_volume") or not self.has_data_volume:
+ return "/"
+
+ volumes = self.get_volume_mounts()
+ if volumes or self.has_data_volume:
+ # Adding this condition since this method is called from both server and database server doctypes
+ if self.doctype == "Server":
+ mountpoint = BENCH_DATA_MNT_POINT
+ elif self.doctype == "Database Server":
+ mountpoint = MARIADB_DATA_MNT_POINT
+ else:
+ mountpoint = "/"
+ return mountpoint
+
+ def find_mountpoint_volume(self, mountpoint) -> "VirtualMachineVolume" | None:
+ volume_id = None
+ if self.provider == "Generic":
+ return None
+
+ machine: "VirtualMachine" = frappe.get_doc("Virtual Machine", self.virtual_machine)
+
+ if volume_id:
+ # Return the volume doc immediately
+ return find(machine.volumes, lambda x: x.volume_id == volume_id)
+
+ if len(machine.volumes) == 1:
+ # If there is only one volume,
+ # then all mountpoints are on the same volume
+ return machine.volumes[0]
+
+ volumes = self.get_volume_mounts()
+ volume = find(volumes, lambda x: x.mount_point == mountpoint)
+ if volume:
+ # If the volume is in `mounts`, that means it's a data volume
+ return volume
+ # Otherwise it's a root volume
+ return find(machine.volumes, lambda v: v.device == "/dev/sda1")
def update_virtual_machine_name(self):
- if self.provider != "AWS EC2":
- return
+ if self.provider not in ("AWS EC2", "OCI"):
+ return None
virtual_machine = frappe.get_doc("Virtual Machine", self.virtual_machine)
return virtual_machine.update_name_tag(self.name)
@@ -307,7 +1140,25 @@ def _create_initial_plan_change(self, plan):
@property
def subscription(self):
name = frappe.db.get_value(
- "Subscription", {"document_type": self.doctype, "document_name": self.name}
+ "Subscription",
+ {
+ "document_type": self.doctype,
+ "document_name": self.name,
+ "plan_type": "Server Plan",
+ "plan": self.plan,
+ },
+ )
+ return frappe.get_doc("Subscription", name) if name else None
+
+ @property
+ def add_on_storage_subscription(self):
+ name = frappe.db.get_value(
+ "Subscription",
+ {
+ "document_type": self.doctype,
+ "document_name": self.name,
+ "plan_type": "Server Storage Plan",
+ },
)
return frappe.get_doc("Subscription", name) if name else None
@@ -315,28 +1166,62 @@ def subscription(self):
def rename_server(self):
self.status = "Installing"
self.save()
- frappe.enqueue_doc(
- self.doctype, self.name, "_rename_server", queue="long", timeout=2400
- )
+ frappe.enqueue_doc(self.doctype, self.name, "_rename_server", queue="long", timeout=2400)
@frappe.whitelist()
- def archive(self):
+ def archive(self): # noqa: C901
+ if frappe.db.exists(
+ "Press Job",
+ {
+ "job_type": "Archive Server",
+ "server": self.name,
+ "server_type": self.doctype,
+ "status": "Success",
+ },
+ ):
+ if self.status != "Archived":
+ self.status = "Archived"
+ self.save()
+
+ frappe.msgprint(_("Server {0} has already been archived.").format(self.name))
+ return
+
+ if self.virtual_machine:
+ vm_status = frappe.db.get_value("Virtual Machine", self.virtual_machine, "status")
+ if vm_status == "Terminated":
+ self.status = "Archived"
+ self.save()
+ return
+
if frappe.get_all(
"Site",
filters={"server": self.name, "status": ("!=", "Archived")},
ignore_ifnull=True,
):
- frappe.throw(_("Cannot archive server with sites"))
+ frappe.throw(
+ _("Cannot archive server with sites. Please drop them from their respective dashboards.")
+ )
if frappe.get_all(
"Bench",
filters={"server": self.name, "status": ("!=", "Archived")},
ignore_ifnull=True,
):
- frappe.throw(_("Cannot archive server with benches"))
+ frappe.throw(
+ _("Cannot archive server with benches. Please drop them from their respective dashboards.")
+ )
+
self.status = "Pending"
self.save()
- frappe.enqueue_doc(self.doctype, self.name, "_archive", queue="long")
- self.disable_subscription()
+ if self.is_self_hosted:
+ self.status = "Archived"
+ self.save()
+
+ if self.doctype == "Server":
+ frappe.db.set_value("Self Hosted Server", {"server": self.name}, "status", "Archived")
+
+ else:
+ frappe.enqueue_doc(self.doctype, self.name, "_archive", queue="long")
+ self.disable_subscription()
def _archive(self):
self.run_press_job("Archive Server")
@@ -346,7 +1231,14 @@ def disable_subscription(self):
if subscription:
subscription.disable()
- def can_change_plan(self, ignore_card_setup):
+ # disable add-on storage subscription
+ add_on_storage_subscription = self.add_on_storage_subscription
+ if add_on_storage_subscription:
+ add_on_storage_subscription.disable()
+
+ def can_change_plan( # noqa: C901
+ self, ignore_card_setup: bool, new_plan: ServerPlan, upgrade_disk: bool = False
+ ) -> None:
if is_system_user(frappe.session.user):
return
@@ -355,25 +1247,40 @@ def can_change_plan(self, ignore_card_setup):
team = frappe.get_doc("Team", self.team)
- if team.is_defaulter():
- frappe.throw("Cannot change plan because you have unpaid invoices")
+ if team.parent_team:
+ team = frappe.get_doc("Team", team.parent_team)
- if team.payment_mode == "Partner Credits" and (
- not team.get_available_partner_credits() > 0
- ):
- frappe.throw("Cannot change plan because you don't have sufficient partner credits")
+ if team.payment_mode == "Paid By Partner" and team.billing_team:
+ team = frappe.get_doc("Team", team.billing_team)
- if team.payment_mode != "Partner Credits" and not (
- team.default_payment_method or team.get_balance()
- ):
+ if not (team.default_payment_method or team.get_balance()):
+ frappe.throw("Cannot change plan because you haven't added a card and not have enough balance")
+
+ cluster: Cluster = frappe.get_doc("Cluster", self.cluster)
+ if not cluster.check_machine_availability(new_plan.instance_type):
frappe.throw(
- "Cannot change plan because you haven't added a card and not have enough balance"
+ f"Cannot change plan right now since the instance type {new_plan.instance_type} is not available. Try again later."
)
- @frappe.whitelist()
- def change_plan(self, plan, ignore_card_setup=False):
- self.can_change_plan(ignore_card_setup)
- plan = frappe.get_doc("Plan", plan)
+ if self.provider == "Hetzner" and self.plan and self.plan == new_plan.name and upgrade_disk:
+ current_root_disk_size = frappe.db.get_value(
+ "Virtual Machine", self.virtual_machine, "root_disk_size"
+ )
+ if current_root_disk_size >= new_plan.disk:
+ frappe.throw(
+ "Cannot upgrade disk because the selected plan has the same or smaller disk size"
+ )
+
+ @dashboard_whitelist()
+ def change_plan(self, plan: str, ignore_card_setup=False, upgrade_disk: bool = False):
+ plan_doc: ServerPlan = frappe.get_doc("Server Plan", plan)
+ self.can_change_plan(ignore_card_setup, new_plan=plan_doc, upgrade_disk=upgrade_disk)
+ self._change_plan(plan_doc)
+ self.run_press_job(
+ "Resize Server", {"machine_type": plan_doc.instance_type, "upgrade_disk": upgrade_disk}
+ )
+
+ def _change_plan(self, plan):
self.ram = plan.memory
self.save()
self.reload()
@@ -386,13 +1293,12 @@ def change_plan(self, plan, ignore_card_setup=False):
"to_plan": plan.name,
}
).insert()
- self.run_press_job("Resize Server", {"machine_type": plan.instance_type})
@frappe.whitelist()
def create_image(self):
self.run_press_job("Create Server Snapshot")
- def run_press_job(self, job_name, arguments=None):
+ def run_press_job(self, job_name, arguments=None) -> PressJob:
if arguments is None:
arguments = {}
return frappe.get_doc(
@@ -406,125 +1312,1462 @@ def run_press_job(self, job_name, arguments=None):
}
).insert()
- def get_certificate(self):
- if self.is_self_hosted:
- certificate_name = frappe.db.get_value(
- "TLS Certificate",
- {"domain": f"{self.hostname}.{self.self_hosted_server_domain}"},
- "name",
- )
- else:
- certificate_name = frappe.db.get_value(
- "TLS Certificate", {"wildcard": True, "domain": self.domain}, "name"
- )
- return frappe.get_doc("TLS Certificate", certificate_name)
+ def get_certificate(self):
+ certificate_name = frappe.db.get_value(
+ "TLS Certificate", {"wildcard": True, "domain": self.domain}, "name"
+ )
+
+ if not certificate_name and self.is_self_hosted:
+ certificate_name = frappe.db.get_value("TLS Certificate", {"domain": f"{self.name}"}, "name")
+
+ if not certificate_name:
+ self_hosted_server = frappe.db.get_value(
+ "Self Hosted Server", {"server": self.name}, ["hostname", "domain"], as_dict=1
+ )
+
+ certificate_name = frappe.db.get_value(
+ "TLS Certificate",
+ {"domain": f"{self_hosted_server.hostname}.{self_hosted_server.domain}"},
+ "name",
+ )
+
+ return frappe.get_doc("TLS Certificate", certificate_name)
+
+ def get_log_server(self):
+ log_server = frappe.db.get_single_value("Press Settings", "log_server")
+ if log_server:
+ kibana_password = frappe.get_doc("Log Server", log_server).get_password("kibana_password")
+ else:
+ kibana_password = None
+ return log_server, kibana_password
+
+ def get_monitoring_password(self):
+ return frappe.get_doc("Cluster", self.cluster).get_password("monitoring_password")
+
+ @frappe.whitelist()
+ def setup_nfs(self):
+ """Allow nfs setup on this server"""
+ frappe.enqueue_doc(self.doctype, self.name, "_setup_nfs", queue="long", timeout=1200)
+
+ def _setup_nfs(self):
+ try:
+ ansible = Ansible(
+ playbook="nfs_server.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ )
+ ansible.run()
+ except Exception:
+ log_error("Exception while setting up NFS", doc=self)
+
+ @frappe.whitelist()
+ def increase_swap(self, swap_size=4):
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "increase_swap_locked",
+ queue="long",
+ timeout=1200,
+ **{"swap_size": swap_size},
+ )
+
+ def _increase_swap(self, swap_size=4):
+ """Increase swap by size defined"""
+ from press.api.server import calculate_swap
+
+ existing_swap_size = calculate_swap(self.name).get("swap", 0)
+ # We used to do 4 GB minimum swap files, to avoid conflict, name files accordingly
+ swap_file_name = "swap" + str(int((existing_swap_size // 4) + 1))
+ try:
+ ansible = Ansible(
+ playbook="increase_swap.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables={
+ "swap_size": swap_size,
+ "swap_file": swap_file_name,
+ },
+ )
+ ansible.run()
+ except Exception:
+ log_error("Increase swap exception", doc=self)
+
+ def increase_swap_locked(self, swap_size=4):
+ with filelock(f"{self.name}-swap-update"):
+ self._increase_swap(swap_size)
+
+ @frappe.whitelist()
+ def reset_swap(self, swap_size=1):
+ """
+ Replace existing swap files with new swap file of given size
+ """
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "reset_swap_locked",
+ queue="long",
+ timeout=1200,
+ **{"swap_size": swap_size},
+ )
+
+ def reset_swap_locked(self, swap_size=1):
+ with filelock(f"{self.name}-swap-update"):
+ self._reset_swap(swap_size)
+
+ def _reset_swap(self, swap_size=1):
+ """Reset swap by removing existing swap files and creating new swap"""
+ # list of swap files to remove assuming minimum swap size of 1 GB to be safe. Wrong names are handled in playbook
+ swap_files_to_remove = ["swap.default", "swap"]
+ swap_files_to_remove += ["swap" + str(i) for i in range(1, 30)]
+ try:
+ ansible = Ansible(
+ playbook="reset_swap.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables={
+ "swap_size": swap_size,
+ "swap_file": "swap",
+ "swap_files_to_remove": swap_files_to_remove,
+ },
+ )
+ ansible.run()
+ except Exception:
+ log_error("Reset swap exception", doc=self)
+
+ def add_glass_file(self):
+ frappe.enqueue_doc(self.doctype, self.name, "_add_glass_file")
+
+ def _add_glass_file(self):
+ try:
+ ansible = Ansible(
+ playbook="glass_file.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ )
+ ansible.run()
+ except Exception:
+ log_error("Add Glass File Exception", doc=self)
+
+ @frappe.whitelist()
+ def setup_mysqldump(self):
+ frappe.enqueue_doc(self.doctype, self.name, "_setup_mysqldump")
+
+ def _setup_mysqldump(self):
+ try:
+ ansible = Ansible(
+ playbook="mysqldump.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ )
+ ansible.run()
+ except Exception:
+ log_error("MySQLdump Setup Exception", doc=self)
+
+ def setup_iptables(self):
+ frappe.enqueue_doc(self.doctype, self.name, "_setup_iptables")
+
+ def _setup_iptables(self):
+ try:
+ ansible = Ansible(
+ playbook="iptables.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ )
+ ansible.run()
+ except Exception:
+ log_error("Iptables Setup Exception", doc=self)
+
+ @frappe.whitelist()
+ def set_swappiness(self):
+ frappe.enqueue_doc(self.doctype, self.name, "_set_swappiness")
+
+ def _set_swappiness(self):
+ try:
+ ansible = Ansible(
+ playbook="swappiness.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ )
+ ansible.run()
+ except Exception:
+ log_error("Swappiness Setup Exception", doc=self)
+
+ @frappe.whitelist()
+ def update_tls_certificate(self):
+ from press.press.doctype.tls_certificate.tls_certificate import (
+ update_server_tls_certifcate,
+ )
+
+ filters = {"wildcard": True, "status": "Active", "domain": self.domain}
+
+ if (
+ hasattr(self, "is_self_hosted")
+ and self.is_self_hosted
+ and self.domain != self.self_hosted_server_domain
+ ):
+ filters["domain"] = self.name
+ del filters["wildcard"]
+
+ certificate = frappe.get_last_doc("TLS Certificate", filters)
+
+ update_server_tls_certifcate(self, certificate)
+
+ @frappe.whitelist()
+ def show_agent_version(self) -> str:
+ return self.agent.get_version()["commit"]
+
+ @frappe.whitelist()
+ def show_agent_password(self) -> str:
+ return self.get_password("agent_password")
+
+ @property
+ def agent(self):
+ return Agent(self.name, server_type=self.doctype)
+
+ @frappe.whitelist()
+ def fetch_security_updates(self):
+ from press.press.doctype.security_update.security_update import SecurityUpdate
+
+ frappe.enqueue(SecurityUpdate.fetch_security_updates, server_obj=self)
+
+ @frappe.whitelist()
+ def configure_ssh_logging(self):
+ try:
+ ansible = Ansible(
+ playbook="configure_ssh_logging.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ )
+ ansible.run()
+ except Exception:
+ log_error("Set SSH Session Logging Exception", server=self.as_dict())
+
+ @property
+ def real_ram(self):
+ """Ram detected by OS after h/w reservation"""
+ return 0.972 * self.ram - 218
+
+ @frappe.whitelist()
+ def reboot_with_serial_console(self):
+ if self.provider != "AWS EC2":
+ raise NotImplementedError
+ console = frappe.new_doc("Serial Console Log")
+ console.server_type = self.doctype
+ console.server = self.name
+ console.virtual_machine = self.virtual_machine
+ console.action = "reboot"
+ console.save()
+ console.reload()
+ console.run_sysrq()
+
+ @dashboard_whitelist()
+ def reboot(self):
+ if self.provider not in ("AWS EC2", "OCI", "DigitalOcean", "Hetzner"):
+ raise NotImplementedError
+ virtual_machine = frappe.get_doc("Virtual Machine", self.virtual_machine)
+ virtual_machine.reboot()
+
+ @dashboard_whitelist()
+ def rename(self, title):
+ self.title = title
+ self.save()
+
+ def validate_mounts(self):
+ if not self.virtual_machine:
+ return
+ machine = frappe.get_doc("Virtual Machine", self.virtual_machine)
+ if machine.has_data_volume and len(machine.volumes) > 1 and not self.mounts:
+ self.fetch_volumes_from_virtual_machine()
+ self.set_default_mount_points()
+ self.set_mount_properties()
+
+ def fetch_volumes_from_virtual_machine(self):
+ machine = frappe.get_doc("Virtual Machine", self.virtual_machine)
+ for volume in machine.volumes:
+ if volume.device == "/dev/sda1" or (self.provider == "Hetzner" and volume.device == "/dev/sda"):
+ # Skip root volume. This is for AWS other providers may have different root volume
+ continue
+ self.append("mounts", {"volume_id": volume.volume_id})
+
+ def set_default_mount_points(self):
+ first = self.mounts[0]
+ if self.doctype == "Server":
+ first.mount_point = BENCH_DATA_MNT_POINT
+ self.append(
+ "mounts",
+ {
+ "mount_type": "Bind",
+ "mount_point": "/home/frappe/benches",
+ "source": f"{BENCH_DATA_MNT_POINT}/home/frappe/benches",
+ "mount_point_owner": "frappe",
+ "mount_point_group": "frappe",
+ },
+ )
+ self.append(
+ "mounts",
+ {
+ "mount_type": "Bind",
+ "mount_point": "/var/lib/docker",
+ "source": f"{BENCH_DATA_MNT_POINT}/var/lib/docker",
+ "mount_point_owner": "root",
+ "mount_point_group": "root",
+ },
+ )
+ elif self.doctype == "Database Server":
+ first.mount_point = MARIADB_DATA_MNT_POINT
+ self.append(
+ "mounts",
+ {
+ "mount_type": "Bind",
+ "mount_point": "/var/lib/mysql",
+ "source": f"{MARIADB_DATA_MNT_POINT}/var/lib/mysql",
+ "mount_point_owner": "mysql",
+ "mount_point_group": "mysql",
+ },
+ )
+ self.append(
+ "mounts",
+ {
+ "mount_type": "Bind",
+ "mount_point": "/etc/mysql",
+ "source": f"{MARIADB_DATA_MNT_POINT}/etc/mysql",
+ "mount_point_owner": "mysql",
+ "mount_point_group": "mysql",
+ },
+ )
+
+ def set_mount_properties(self):
+ for mount in self.mounts:
+ # set_defaults doesn't seem to work on children in a controller hook
+ default_fields = find_all(frappe.get_meta("Server Mount").fields, lambda x: x.default)
+ for field in default_fields:
+ fieldname = field.fieldname
+ if not mount.get(fieldname):
+ mount.set(fieldname, field.default)
+
+ mount_options = "defaults,nofail" # Set default mount options
+ if mount.mount_options:
+ mount_options = f"{mount_options},{mount.mount_options}"
+
+ mount.mount_options = mount_options
+ if mount.mount_type == "Bind":
+ mount.filesystem = "none"
+ mount.mount_options = f"{mount.mount_options},bind"
+
+ if mount.volume_id:
+ # EBS volumes are named by their volume id
+ # There's likely a better way to do this
+ # https://docs.aws.amazon.com/ebs/latest/userguide/ebs-using-volumes.html
+ stripped_id = mount.volume_id.replace("-", "")
+ mount.source = self.get_device_from_volume_id(mount.volume_id)
+ if not mount.mount_point:
+ # If we don't know where to mount, mount it in /mnt/
+ mount.mount_point = f"/mnt/{stripped_id}"
+
+ def get_device_from_volume_id(self, volume_id):
+ if self.provider == "Hetzner":
+ return f"/dev/disk/by-id/scsi-0HC_Volume_{volume_id}"
+ stripped_id = volume_id.replace("-", "")
+ return f"/dev/disk/by-id/nvme-Amazon_Elastic_Block_Store_{stripped_id}"
+
+ def get_mount_variables(self):
+ return {
+ "all_mounts_json": json.dumps([mount.as_dict() for mount in self.mounts], indent=4, default=str),
+ "volume_mounts_json": json.dumps(
+ self.get_volume_mounts(),
+ indent=4,
+ default=str,
+ ),
+ "bind_mounts_json": json.dumps(
+ [mount.as_dict() for mount in self.mounts if mount.mount_type == "Bind"],
+ indent=4,
+ default=str,
+ ),
+ }
+
+ def get_volume_mounts(self):
+ return [mount.as_dict() for mount in self.mounts if mount.mount_type == "Volume"]
+
+ def _create_arm_build(self, build: str) -> str | None:
+ from press.press.doctype.deploy_candidate_build.deploy_candidate_build import (
+ _create_arm_build as arm_build_util,
+ )
+
+ deploy_candidate = frappe.get_value("Deploy Candidate Build", build, "deploy_candidate")
+
+ try:
+ return arm_build_util(deploy_candidate)
+ except frappe.ValidationError:
+ frappe.log_error(
+ "Failed to create ARM build", message=f"Failed to create arm build for build {build}"
+ )
+ return None
+
+ def _process_bench(self, bench_info: BenchInfoType) -> ARMDockerImageType:
+ candidate = bench_info["candidate"]
+ build_id = bench_info["build"]
+
+ arm_build = frappe.get_value("Deploy Candidate", candidate, "arm_build")
+
+ if arm_build:
+ return {
+ "build": arm_build,
+ "status": frappe.get_value("Deploy Candidate Build", arm_build, "status"),
+ "bench": bench_info["name"],
+ }
+
+ new_arm_build = self._create_arm_build(build_id)
+ return {
+ "build": new_arm_build,
+ "status": "Pending",
+ "bench": bench_info["name"],
+ }
+
+ def _get_dependency_version(self, candidate: str, dependency: str) -> str:
+ return frappe.get_value(
+ "Deploy Candidate Dependency",
+ {"parent": candidate, "dependency": dependency},
+ "version",
+ )
+
+ @frappe.whitelist()
+ def collect_arm_images(self) -> str:
+ """Collect arm build images of all active benches on VM"""
+ # Need to disable all further deployments before collecting arm images.
+
+ def _parse_semantic_version(version_str: str) -> semantic_version.Version:
+ try:
+ return semantic_version.Version(version_str)
+ except ValueError:
+ return semantic_version.Version(f"{version_str}.0")
+
+ frappe.db.set_value("Server", self.name, "stop_deployments", 1)
+ frappe.db.commit()
+
+ benches = frappe.get_all(
+ "Bench",
+ {"server": self.name, "status": "Active"},
+ ["name", "build", "candidate"],
+ )
+
+ if not benches:
+ frappe.throw(f"No active benches found on Server ")
+
+ for bench in benches:
+ raw_bench_version = self._get_dependency_version(bench["candidate"], "BENCH_VERSION")
+ raw_python_version = self._get_dependency_version(bench["candidate"], "PYTHON_VERSION")
+ bench_version = _parse_semantic_version(raw_bench_version)
+ python_version = _parse_semantic_version(raw_python_version)
+
+ if python_version > semantic_version.Version(
+ "3.8.0"
+ ) and bench_version < semantic_version.Version("5.25.1"):
+ frappe.db.set_value(
+ "Deploy Candidate Dependency",
+ {"parent": bench["candidate"], "dependency": "BENCH_VERSION"},
+ "version",
+ "5.25.1",
+ )
+
+ frappe.db.commit()
+
+ arm_build_record: ARMBuildRecord = frappe.new_doc("ARM Build Record", server=self.name)
+
+ for bench_info in benches:
+ arm_build_record.append("arm_images", self._process_bench(bench_info))
+
+ arm_build_record.save()
+ return f" ARM Build Record"
+
+ @frappe.whitelist()
+ def start_active_benches(self):
+ benches = frappe.get_all("Bench", {"server": self.name, "status": "Active"}, pluck="name")
+ frappe.enqueue_doc(self.doctype, self.name, "_start_active_benches", benches=benches)
+
+ def _start_active_benches(self, benches: list[str]):
+ try:
+ ansible = Ansible(
+ playbook="start_benches.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables={"benches": " ".join(benches)},
+ )
+ ansible.run()
+ except Exception:
+ log_error("Start Benches Exception", server=self.as_dict())
+
+ def _stop_active_benches(self):
+ try:
+ ansible = Ansible(
+ playbook="stop_benches.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ )
+ ansible.run()
+ except Exception:
+ log_error("Start Benches Exception", server=self.as_dict())
+
+ @frappe.whitelist()
+ def mount_volumes(
+ self,
+ now: bool | None,
+ stop_docker_before_mount: bool | None = None,
+ stop_mariadb_before_mount: bool | None = None,
+ start_docker_after_mount: bool | None = None,
+ start_mariadb_after_mount: bool | None = None,
+ cleanup_db_replication_files: bool | None = None,
+ rotate_additional_volume_metadata: bool | None = None,
+ ):
+ if not cleanup_db_replication_files:
+ cleanup_db_replication_files = False
+
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_mount_volumes",
+ queue="long",
+ timeout=7200,
+ at_front=True,
+ now=now or False,
+ stop_docker_before_mount=stop_docker_before_mount or False,
+ stop_mariadb_before_mount=stop_mariadb_before_mount or False,
+ start_docker_after_mount=start_docker_after_mount or False,
+ start_mariadb_after_mount=start_mariadb_after_mount or False,
+ cleanup_db_replication_files=cleanup_db_replication_files,
+ rotate_additional_volume_metadata=rotate_additional_volume_metadata or False,
+ )
+
+ def _mount_volumes(
+ self,
+ stop_docker_before_mount: bool = False,
+ stop_mariadb_before_mount: bool = False,
+ start_docker_after_mount: bool = False,
+ start_mariadb_after_mount: bool = False,
+ cleanup_db_replication_files: bool = False,
+ rotate_additional_volume_metadata: bool = False,
+ ):
+ try:
+ variables = {
+ "stop_docker_before_mount": self.doctype == "Server" and stop_docker_before_mount,
+ "stop_mariadb_before_mount": self.doctype == "Database Server" and stop_mariadb_before_mount,
+ "start_docker_after_mount": self.doctype == "Server" and start_docker_after_mount,
+ "start_mariadb_after_mount": self.doctype == "Database Server" and start_mariadb_after_mount,
+ # If other services are stopped, we can skip filebeat restart
+ "stop_filebeat_before_mount": stop_docker_before_mount or stop_mariadb_before_mount,
+ "start_filebeat_after_mount": stop_docker_before_mount or stop_mariadb_before_mount,
+ "cleanup_db_replication_files": cleanup_db_replication_files,
+ "rotate_additional_volume_metadata": rotate_additional_volume_metadata,
+ "hetzner_cloud": self.provider == "Hetzner",
+ **self.get_mount_variables(),
+ }
+ if self.provider != "Generic" and (
+ self.doctype == "Database Server" or getattr(self, "has_unified_volume", False)
+ ):
+ variables["mariadb_bind_address"] = frappe.get_value(
+ "Virtual Machine", self.virtual_machine, "private_ip_address"
+ )
+
+ ansible = Ansible(
+ playbook="mount.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables=variables,
+ )
+ play = ansible.run()
+ self.reload()
+ if self._set_mount_status(play):
+ self.save()
+ except Exception:
+ log_error("Server Mount Exception", server=self.as_dict())
+
+ def _set_mount_status(self, play): # noqa: C901
+ tasks = frappe.get_all(
+ "Ansible Task",
+ ["result", "task"],
+ {
+ "play": play.name,
+ "status": ("in", ("Success", "Failure")),
+ "task": ("in", ("Mount Volumes", "Mount Bind Mounts", "Show Block Device UUIDs")),
+ },
+ )
+ mounts_changed = False
+ for task in tasks:
+ result = json.loads(task.result)
+ for row in result.get("results", []):
+ mount = find(self.mounts, lambda x: x.name == row.get("item", {}).get("name"))
+ if not mount:
+ mount = find(
+ self.mounts,
+ lambda x: x.name == row.get("item", {}).get("original_item", {}).get("name"),
+ )
+ if not mount:
+ mount = find(
+ self.mounts, lambda x: x.name == row.get("item", {}).get("item", {}).get("name")
+ )
+ if not mount:
+ mount = find(
+ self.mounts,
+ lambda x: x.name
+ == row.get("item", {}).get("item", {}).get("original_item", {}).get("name"),
+ )
+ if not mount:
+ continue
+ if task.task == "Show Block Device UUIDs":
+ mount.uuid = row.get("stdout", "").strip()
+ mounts_changed = True
+ else:
+ mount_status = {True: "Failure", False: "Success"}[row.get("failed", False)]
+ if mount.status != mount_status:
+ mount.status = mount_status
+ mounts_changed = True
+ return mounts_changed
+
+ def wait_for_cloud_init(self):
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_wait_for_cloud_init",
+ queue="short",
+ )
+
+ def _wait_for_cloud_init(self):
+ try:
+ ansible = Ansible(
+ playbook="wait_for_cloud_init.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ )
+ ansible.run()
+ except Exception:
+ log_error("Cloud Init Wait Exception", server=self.as_dict())
+
+ def free_space(self, mountpoint: str) -> int:
+ from press.api.server import prometheus_query
+
+ response = prometheus_query(
+ f"""node_filesystem_avail_bytes{{instance="{self.name}", job="node", mountpoint="{mountpoint}"}}""",
+ lambda x: x["mountpoint"],
+ "Asia/Kolkata",
+ 60,
+ 60,
+ )["datasets"]
+ if response:
+ return response[0]["values"][-1]
+ return 50 * 1024 * 1024 * 1024 # Assume 50GB free space
+
+ def is_disk_full(self, mountpoint: str) -> bool:
+ return self.free_space(mountpoint) == 0
+
+ def space_available_in_6_hours(self, mountpoint: str) -> int:
+ from press.api.server import prometheus_query
+
+ response = prometheus_query(
+ f"""predict_linear(
+node_filesystem_avail_bytes{{instance="{self.name}", mountpoint="{mountpoint}"}}[3h], 6*3600
+ )""",
+ lambda x: x["mountpoint"],
+ "Asia/Kolkata",
+ 120,
+ 120,
+ )["datasets"]
+ if not response:
+ return -20 * 1024 * 1024 * 1024
+ return response[0]["values"][-1]
+
+ def disk_capacity(self, mountpoint: str) -> int:
+ from press.api.server import prometheus_query
+
+ response = prometheus_query(
+ f"""node_filesystem_size_bytes{{instance="{self.name}", job="node", mountpoint="{mountpoint}"}}""",
+ lambda x: x["mountpoint"],
+ "Asia/Kolkata",
+ 120,
+ 120,
+ )["datasets"]
+ if response:
+ return response[0]["values"][-1]
+ return frappe.db.get_value("Virtual Machine", self.virtual_machine, "disk_size") * 1024 * 1024 * 1024
+
+ def size_to_increase_by_for_20_percent_available(self, mountpoint: str): # min 50 GB, max 250 GB
+ projected_usage = self.disk_capacity(mountpoint) - self.space_available_in_6_hours(mountpoint) * 5
+ projected_growth_gb = abs(projected_usage) / (4 * 1024 * 1024 * 1024)
+
+ if mountpoint == "/" and self.guess_data_disk_mountpoint() != "/":
+ # Ingore limits set in case of mountpoint being /
+ return int(projected_growth_gb)
+
+ return int(max(self.auto_add_storage_min, min(projected_growth_gb, self.auto_add_storage_max)))
+
+ def recommend_disk_increase(self, mountpoint: str):
+ """
+ Send disk expansion email to users with disabled auto addon storage at 80% capacity
+ Calculate the disk usage over a 30 hour period and take 25 percent of that
+ """
+ server: Server | DatabaseServer = frappe.get_doc(self.doctype, self.name) # type: ignore
+ if server.auto_increase_storage:
+ return
+
+ disk_capacity = self.disk_capacity(mountpoint)
+ current_disk_usage = disk_capacity - self.free_space(mountpoint)
+ recommended_increase = (
+ abs(self.disk_capacity(mountpoint) - self.space_available_in_6_hours(mountpoint) * 5)
+ / 4
+ / 1024
+ / 1024
+ / 1024
+ )
+
+ current_disk_usage_flt = round(current_disk_usage / 1024 / 1024 / 1024, 2)
+ disk_capacity_flt = round(disk_capacity / 1024 / 1024 / 1024, 2)
+
+ frappe.sendmail(
+ recipients=get_communication_info("Email", "Incident", self.doctype, self.name),
+ subject=f"Important: Server {server.name} has used 80% of the available space",
+ template="disabled_auto_disk_expansion",
+ args={
+ "server": server.name,
+ "current_disk_usage": f"{current_disk_usage_flt} Gib",
+ "available_disk_space": f"{disk_capacity_flt} GiB",
+ "used_storage_percentage": "80%",
+ "increase_by": f"{recommended_increase} GiB",
+ },
+ )
+
+ def calculated_increase_disk_size(
+ self,
+ mountpoint: str,
+ additional: int = 0,
+ ):
+ """
+ Calculate required disk increase for servers and handle notifications accordingly.
+ - For servers with `auto_increase_storage` enabled:
+ - Compute the required storage increase.
+ - Automatically apply the increase.
+ - Send an email notification about the auto-added storage.
+ - For servers with `auto_increase_storage` disabled:
+ - If disk usage exceeds 90%, send a warning email.
+ - We have also sent them emails at 80% if they haven't enabled auto add on yet then send here again.
+ - Notify the user to manually increase disk space.
+ """
+
+ buffer = self.size_to_increase_by_for_20_percent_available(mountpoint)
+ server: Server | DatabaseServer = frappe.get_doc(self.doctype, self.name)
+ disk_capacity = self.disk_capacity(mountpoint)
+
+ current_disk_usage = round((disk_capacity - self.free_space(mountpoint)) / 1024 / 1024 / 1024, 2)
+
+ if not server.auto_increase_storage and (not server.has_data_volume or mountpoint != "/"):
+ TelegramMessage.enqueue(
+ f"Not increasing disk (mount point {mountpoint}) on "
+ f"[{self.name}]({frappe.utils.get_url_to_form(self.doctype, self.name)}) "
+ f"by {buffer + additional}G as auto disk increase disabled by user",
+ "Information",
+ )
+ insert_addon_storage_log(
+ adding_storage=additional + buffer,
+ available_disk_space=round((self.disk_capacity(mountpoint) / 1024 / 1024 / 1024), 2),
+ current_disk_usage=current_disk_usage
+ or round(
+ (self.disk_capacity(mountpoint) - self.free_space(mountpoint)) / 1024 / 1024 / 1024, 2
+ ),
+ mountpoint=mountpoint or self.guess_data_disk_mountpoint(),
+ is_auto_triggered=True,
+ is_warning=True,
+ database_server=server.name if server.name[0] == "m" else None,
+ server=server.name if server.name[0] == "f" else None,
+ )
+
+ return
+
+ TelegramMessage.enqueue(
+ f"Increasing disk (mount point {mountpoint}) on "
+ f"[{self.name}]({frappe.utils.get_url_to_form(self.doctype, self.name)}) "
+ f"by {buffer + additional}G",
+ "Information",
+ )
+
+ self.increase_disk_size_for_server(
+ self.name,
+ buffer + additional,
+ mountpoint,
+ is_auto_triggered=True,
+ current_disk_usage=current_disk_usage,
+ )
+
+ def prune_docker_system(self):
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_prune_docker_system",
+ queue="long",
+ timeout=8000,
+ )
+
+ def _prune_docker_system(self):
+ try:
+ ansible = Ansible(
+ playbook="docker_system_prune.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ )
+ ansible.run()
+ except Exception:
+ log_error("Prune Docker System Exception", doc=self)
+
+ @frappe.whitelist()
+ def reload_nginx(self):
+ agent = Agent(self.name, server_type=self.doctype)
+ agent.reload_nginx()
+
+ def _ssh_user(self):
+ if not hasattr(self, "ssh_user"):
+ return "root"
+ return self.ssh_user or "root"
+
+ def _ssh_port(self):
+ if not hasattr(self, "ssh_port"):
+ return 22
+ return self.ssh_port or 22
+
+ def get_primary_frappe_public_key(self):
+ if primary_public_key := frappe.db.get_value(self.doctype, self.primary, "frappe_public_key"):
+ return primary_public_key
+
+ primary = frappe.get_doc(self.doctype, self.primary)
+ ansible = Ansible(
+ playbook="fetch_frappe_public_key.yml",
+ server=primary,
+ user=primary._ssh_user(),
+ port=primary._ssh_port(),
+ )
+ play = ansible.run()
+ if play.status == "Success":
+ return frappe.db.get_value(self.doctype, self.primary, "frappe_public_key")
+ frappe.throw(f"Failed to fetch {primary.name}'s Frappe public key")
+ return None
+
+ def copy_files(self, source, destination, extra_options=None):
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_copy_files",
+ source=source,
+ destination=destination,
+ extra_options=extra_options,
+ queue="long",
+ timeout=7200,
+ )
+
+ def _copy_files(self, source, destination, extra_options=None):
+ try:
+ ansible = Ansible(
+ playbook="copy.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables={
+ "source": source,
+ "destination": destination,
+ "extra_options": extra_options,
+ },
+ )
+ ansible.run()
+ except Exception:
+ log_error("Sever File Copy Exception", server=self.as_dict())
+
+ @frappe.whitelist()
+ def install_cadvisor(self):
+ frappe.enqueue_doc(self.doctype, self.name, "_install_cadvisor")
+
+ def _install_cadvisor(self):
+ try:
+ ansible = Ansible(
+ playbook="install_cadvisor.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ )
+ ansible.run()
+ except Exception:
+ log_error("Cadvisor Install Exception", server=self.as_dict())
+
+ def set_additional_unified_config(self):
+ """Set both `Server` and `Database Server` additional config for Unified Servers"""
+ # Common config for both Server and Database Server
+ self.set_swappiness()
+ self.add_glass_file()
+ self.install_filebeat()
+
+ # Server specific config
+ self.setup_mysqldump()
+ self.install_earlyoom()
+ self.setup_ncdu()
+ self.setup_iptables()
+ self.install_cadvisor()
+
+ # Database Server specific config
+ database_server: DatabaseServer = frappe.get_doc("Database Server", self.database_server)
+ default_variables = frappe.get_all("MariaDB Variable", {"set_on_new_servers": 1}, pluck="name")
+ for var_name in default_variables:
+ var: MariaDBVariable = frappe.get_doc("MariaDB Variable", var_name)
+ var.set_on_server(database_server)
+
+ database_server.adjust_memory_config()
+ database_server.provide_frappe_user_du_and_find_permission()
+ database_server.setup_logrotate()
+ database_server.setup_user_lingering()
+
+ self.validate_mounts()
+ self.save(ignore_permissions=True)
+
+ @frappe.whitelist()
+ def set_additional_config(self): # noqa: C901
+ """
+ Corresponds to Set additional config step in Create Server Press Job
+ """
+
+ if hasattr(self, "is_unified_server") and self.is_unified_server:
+ self.set_additional_unified_config()
+ return
+
+ if self.doctype == "Database Server":
+ default_variables = frappe.get_all("MariaDB Variable", {"set_on_new_servers": 1}, pluck="name")
+ for var_name in default_variables:
+ var: MariaDBVariable = frappe.get_doc("MariaDB Variable", var_name)
+ var.set_on_server(self)
+
+ self.set_swappiness()
+ self.add_glass_file()
+ self.install_filebeat()
+
+ if self.doctype == "Server":
+ self.install_nfs_common()
+ self.setup_mysqldump()
+ self.install_earlyoom()
+ self.setup_ncdu()
+ self.setup_iptables()
+
+ if self.has_data_volume:
+ self.setup_archived_folder()
+
+ self.install_cadvisor()
+
+ if self.is_secondary:
+ frappe.db.set_value(
+ "Server", {"secondary_server": self.name}, "status", self.status
+ ) # Update the status of the primary server
+ frappe.db.commit()
+
+ if self.doctype == "Database Server":
+ self.adjust_memory_config()
+ self.provide_frappe_user_du_and_find_permission()
+ self.setup_logrotate()
+ self.setup_user_lingering()
+
+ if self.has_data_volume:
+ self.setup_binlog_indexes_folder()
+
+ if self.doctype == "Proxy Server":
+ self.setup_wildcard_hosts()
+
+ self.validate_mounts()
+ self.save(ignore_permissions=True)
+
+ def get_wildcard_domains(self):
+ wildcard_domains = []
+ for domain in self.domains:
+ if domain.domain == self.domain and self.doctype == "Proxy Server":
+ # self.domain certs are symlinks
+ continue
+ certificate_name = frappe.db.get_value(
+ "TLS Certificate", {"wildcard": True, "domain": domain.domain}, "name"
+ )
+ certificate = frappe.get_doc("TLS Certificate", certificate_name)
+ wildcard_domains.append(
+ {
+ "domain": domain.domain,
+ "certificate": {
+ "privkey.pem": certificate.private_key,
+ "fullchain.pem": certificate.full_chain,
+ "chain.pem": certificate.intermediate_chain,
+ },
+ "code_server": domain.code_server,
+ }
+ )
+ return wildcard_domains
+
+ @frappe.whitelist()
+ def setup_wildcard_hosts(self):
+ agent = Agent(self.name, server_type=self.doctype)
+ wildcards = self.get_wildcard_domains()
+ agent.setup_wildcard_hosts(wildcards)
+
+ @property
+ def bastion_host(self):
+ if self.bastion_server:
+ return frappe.get_cached_value(
+ "Bastion Server", self.bastion_server, ["ssh_user", "ssh_port", "ip"], as_dict=True
+ )
+ return frappe._dict()
+
+ @frappe.whitelist()
+ def get_aws_static_ip(self):
+ if self.provider != "AWS EC2":
+ frappe.throw("Failed to proceed as VM is not AWS EC2")
+
+ vm_doc = frappe.get_doc("Virtual Machine", self.virtual_machine)
+
+ cluster_doc = frappe.get_doc("Cluster", self.cluster)
+ region_name = cluster_doc.region
+ aws_access_key_id = cluster_doc.aws_access_key_id
+ aws_secret_access_key = get_decrypted_password(
+ "Cluster", self.cluster, fieldname="aws_secret_access_key"
+ )
+
+ instance_id = vm_doc.instance_id
+
+ # Initialize EC2 client
+ ec2_client = boto3.client(
+ "ec2",
+ aws_access_key_id=aws_access_key_id,
+ aws_secret_access_key=aws_secret_access_key,
+ region_name=region_name,
+ )
+
+ # Allocate new Elastic IP
+ allocation = ec2_client.allocate_address(Domain="vpc")
+ allocation_id = allocation["AllocationId"]
+ public_ip = allocation["PublicIp"]
+
+ # Associate with instance
+ ec2_client.associate_address(InstanceId=instance_id, AllocationId=allocation_id)
+
+ # Trigger VM sync
+ vm_doc.sync()
+
+ return f"Static IP {public_ip} alloted to the VM (Allocation ID: {allocation_id})"
+
+
+class Server(BaseServer):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.auto_scale_trigger.auto_scale_trigger import AutoScaleTrigger
+ from press.press.doctype.communication_info.communication_info import CommunicationInfo
+ from press.press.doctype.resource_tag.resource_tag import ResourceTag
+ from press.press.doctype.server_mount.server_mount import ServerMount
+
+ agent_password: DF.Password | None
+ auto_add_storage_max: DF.Int
+ auto_add_storage_min: DF.Int
+ auto_increase_storage: DF.Check
+ auto_scale_trigger: DF.Table[AutoScaleTrigger]
+ bastion_server: DF.Link | None
+ benches_on_shared_volume: DF.Check
+ cluster: DF.Link | None
+ communication_infos: DF.Table[CommunicationInfo]
+ database_server: DF.Link | None
+ disable_agent_job_auto_retry: DF.Check
+ domain: DF.Link | None
+ enable_logical_replication_during_site_update: DF.Check
+ frappe_public_key: DF.Code | None
+ frappe_user_password: DF.Password | None
+ halt_agent_jobs: DF.Check
+ has_data_volume: DF.Check
+ hostname: DF.Data
+ hostname_abbreviation: DF.Data | None
+ ignore_incidents_till: DF.Datetime | None
+ ip: DF.Data | None
+ ipv6: DF.Data | None
+ is_for_recovery: DF.Check
+ is_managed_database: DF.Check
+ is_monitoring_disabled: DF.Check
+ is_primary: DF.Check
+ is_provisioning_press_job_completed: DF.Check
+ is_pyspy_setup: DF.Check
+ is_replication_setup: DF.Check
+ is_secondary: DF.Check
+ is_self_hosted: DF.Check
+ is_server_prepared: DF.Check
+ is_server_renamed: DF.Check
+ is_server_setup: DF.Check
+ is_standalone: DF.Check
+ is_standalone_setup: DF.Check
+ is_static_ip: DF.Check
+ is_unified_server: DF.Check
+ is_upstream_setup: DF.Check
+ keep_files_on_server_in_offsite_backup: DF.Check
+ managed_database_service: DF.Link | None
+ mounts: DF.Table[ServerMount]
+ new_worker_allocation: DF.Check
+ plan: DF.Link | None
+ platform: DF.Literal["x86_64", "arm64"]
+ primary: DF.Link | None
+ private_ip: DF.Data | None
+ private_mac_address: DF.Data | None
+ private_vlan_id: DF.Data | None
+ provider: DF.Literal["Generic", "Scaleway", "AWS EC2", "OCI", "Hetzner", "Vodacom", "DigitalOcean"]
+ proxy_server: DF.Link | None
+ public: DF.Check
+ ram: DF.Float
+ root_public_key: DF.Code | None
+ scaled_up: DF.Check
+ secondary_server: DF.Link | None
+ self_hosted_mariadb_root_password: DF.Password | None
+ self_hosted_mariadb_server: DF.Data | None
+ self_hosted_server_domain: DF.Data | None
+ set_bench_memory_limits: DF.Check
+ skip_scheduled_backups: DF.Check
+ ssh_port: DF.Int
+ ssh_user: DF.Data | None
+ staging: DF.Check
+ status: DF.Literal["Pending", "Installing", "Active", "Broken", "Archived"]
+ stop_deployments: DF.Check
+ tags: DF.Table[ResourceTag]
+ team: DF.Link | None
+ title: DF.Data | None
+ tls_certificate_renewal_failed: DF.Check
+ use_agent_job_callbacks: DF.Check
+ use_for_build: DF.Check
+ use_for_new_benches: DF.Check
+ use_for_new_sites: DF.Check
+ virtual_machine: DF.Link | None
+ # end: auto-generated types
+
+ GUNICORN_MEMORY = 150 # avg ram usage of 1 gunicorn worker
+ BACKGROUND_JOB_MEMORY = 3 * 80 # avg ram usage of 3 sets of bg workers
+
+ @role_guard.action()
+ def validate(self):
+ super().validate()
+ self.validate_managed_database_service()
+
+ def validate_managed_database_service(self):
+ if getattr(self, "is_managed_database", 0):
+ if not self.managed_database_service:
+ frappe.throw(_("Please select Managed Database Service"))
+ self.database_server = ""
+ else:
+ self.managed_database_service = ""
+
+ def on_update(self):
+ # If Database Server is changed for the server then change it for all the benches
+ if not self.is_new() and (
+ self.has_value_changed("database_server") or self.has_value_changed("managed_database_service")
+ ):
+ benches = frappe.get_all("Bench", {"server": self.name, "status": ("!=", "Archived")})
+ for bench in benches:
+ bench = frappe.get_doc("Bench", bench)
+ bench.database_server = self.database_server
+ bench.managed_database_service = self.managed_database_service
+ bench.save()
+
+ if self.database_server:
+ database_server_public = frappe.db.get_value("Database Server", self.database_server, "public")
+ if database_server_public != self.public:
+ frappe.db.set_value("Database Server", self.database_server, "public", self.public)
+
+ if not self.is_new() and self.has_value_changed("team"):
+ self.update_subscription()
+ self.update_db_server()
+
+ self.set_bench_memory_limits_if_needed(save=False)
+ if self.public:
+ self.auto_add_storage_min = max(self.auto_add_storage_min, PUBLIC_SERVER_AUTO_ADD_STORAGE_MIN)
+
+ if (
+ not self.is_new()
+ and self.has_value_changed("enable_logical_replication_during_site_update")
+ and self.enable_logical_replication_during_site_update
+ and frappe.db.count("Site", {"server": self.name, "status": ("!=", "Archived")}) > 1
+ ):
+ # Throw error if multiple sites are present on the server
+ frappe.throw(
+ "Cannot enable logical replication during site update if multiple sites are present on the server"
+ )
+
+ def update_db_server(self):
+ if not self.database_server:
+ return
+ db_server = frappe.get_doc("Database Server", self.database_server)
+ if self.team == db_server.team:
+ return
+
+ db_server.team = self.team
+ db_server.save()
+
+ def set_bench_memory_limits_if_needed(self, save: bool = False):
+ # Enable bench memory limits for public servers
+ if self.public:
+ self.set_bench_memory_limits = True
+ else:
+ self.set_bench_memory_limits = False
+
+ if save:
+ self.save()
+
+ def get_actions(self):
+ server_actions = super().get_actions()
+
+ return [
+ {
+ "action": "Notification Settings",
+ "description": "Manage notification channels",
+ "button_label": "Manage",
+ "doc_method": "dummy",
+ "group": "Application Server Actions" if not self.is_unified_server else "Server Actions",
+ "server_doctype": "Server",
+ "server_name": self.name,
+ },
+ *server_actions,
+ ]
+
+ def update_subscription(self):
+ subscription = self.subscription
+ if subscription:
+ if sub := frappe.db.get_value(
+ "Subscription",
+ {
+ "document_type": self.doctype,
+ "document_name": self.name,
+ "team": self.team,
+ "plan_type": "Server Plan",
+ "plan": self.plan,
+ },
+ ):
+ frappe.db.set_value("Subscription", sub, "enabled", 1)
+ subscription.disable()
+ else:
+ frappe.db.set_value("Subscription", subscription.name, {"team": self.team, "enabled": 1})
+ else:
+ try:
+ # create new subscription
+ self.create_subscription(self.plan)
+ except Exception:
+ frappe.log_error("Server Subscription Creation Error")
+
+ add_on_storage_subscription = self.add_on_storage_subscription
+ if add_on_storage_subscription:
+ if existing_subscription := frappe.db.get_value(
+ "Subscription",
+ filters={
+ "document_type": self.doctype,
+ "document_name": self.name,
+ "team": self.team,
+ "plan_type": "Server Storage Plan",
+ },
+ ):
+ frappe.db.set_value(
+ "Subscription",
+ existing_subscription,
+ {
+ "enabled": 1,
+ "additional_storage": add_on_storage_subscription.additional_storage,
+ },
+ )
+ add_on_storage_subscription.disable()
+ else:
+ frappe.db.set_value(
+ "Subscription", add_on_storage_subscription.name, {"team": self.team, "enabled": 1}
+ )
+
+ def create_secondary_server(self, plan_name: str) -> None:
+ """Create a secondary server for this server"""
+ plan: ServerPlan = frappe.get_cached_doc("Server Plan", plan_name)
+ team_name = frappe.db.get_value("Server", self.name, "team", "name")
+ cluster: "Cluster" = frappe.get_cached_doc("Cluster", self.cluster)
+ server_title = f"Secondary - {self.title or self.name}"
+
+ # This is horrible code, however it seems to be the standard
+ # https://github.com/frappe/press/blob/28c9ba67b15b5d8ba64e302d084d3289ea744c39/press/api/server.py/#L228-L229
+ cluster.database_server = self.database_server
+ cluster.proxy_server = self.proxy_server
+
+ secondary_server, _ = cluster.create_server(
+ "Server",
+ server_title,
+ plan,
+ team=team_name,
+ auto_increase_storage=self.auto_increase_storage,
+ is_secondary=True,
+ primary=self.name,
+ )
- def get_log_server(self):
- log_server = frappe.db.get_single_value("Press Settings", "log_server")
- if log_server:
- kibana_password = frappe.get_doc("Log Server", log_server).get_password(
- "kibana_password"
+ self.secondary_server = secondary_server.name
+ self.save()
+
+ def drop_secondary_server(self) -> None:
+ """Drop secondary server"""
+ server: "Server" = frappe.get_doc("Server", self.secondary_server)
+ server.archive()
+
+ @dashboard_whitelist()
+ @frappe.whitelist()
+ def setup_secondary_server(self, server_plan: str):
+ """Setup secondary server"""
+ if self.doctype == "Database Server" or self.is_secondary:
+ return
+
+ self.setup_nfs() # Setup nfs when creating a secondary server
+ self.status = "Installing"
+ self.save()
+
+ self.create_secondary_server(server_plan)
+
+ @dashboard_whitelist()
+ @frappe.whitelist()
+ def teardown_secondary_server(self):
+ if self.secondary_server:
+ nfs_volume_detachment: "NFSVolumeDetachment" = frappe.get_doc(
+ {"doctype": "NFS Volume Detachment", "primary_server": self.name}
)
- else:
- kibana_password = None
- return log_server, kibana_password
+ nfs_volume_detachment.insert(ignore_permissions=True)
- def get_monitoring_password(self):
- return frappe.get_doc("Cluster", self.cluster).get_password("monitoring_password")
+ @frappe.whitelist()
+ def setup_ncdu(self):
+ frappe.enqueue_doc(self.doctype, self.name, "_setup_ncdu")
@frappe.whitelist()
- def increase_swap(self):
- """Increase swap by size defined in playbook"""
- from press.api.server import calculate_swap
+ def install_nfs_common(self):
+ """Install nfs common on this server"""
+ frappe.enqueue_doc(self.doctype, self.name, "_install_nfs_common")
- swap_size = calculate_swap(self.name).get("swap", 0)
- # We used to do 4 GB minimum swap files, to avoid conflict, name files accordingly
- swap_file_name = "swap" + str(int((swap_size // 4) + 1))
+ def _install_nfs_common(self):
try:
ansible = Ansible(
- playbook="increase_swap.yml",
+ playbook="install_nfs_common.yml", server=self, user=self._ssh_user(), port=self._ssh_port()
+ )
+ ansible.run()
+ except Exception:
+ log_error("Unable to install nfs common", server=self.as_dict())
+
+ def _setup_ncdu(self):
+ try:
+ ansible = Ansible(
+ playbook="install_and_setup_ncdu.yml",
server=self,
- variables={
- "swap_file": swap_file_name,
- },
+ user=self._ssh_user(),
+ port=self._ssh_port(),
)
ansible.run()
except Exception:
- log_error("Increase swap exception", server=self.as_dict())
+ log_error("Install and ncdu Setup Exception", server=self.as_dict())
@frappe.whitelist()
- def update_tls_certificate(self):
- from press.press.doctype.tls_certificate.tls_certificate import (
- update_server_tls_certifcate,
- )
+ def add_upstream_to_proxy(self):
+ agent = Agent(self.proxy_server, server_type="Proxy Server")
+ agent.new_server(self.name)
- certificate = frappe.get_last_doc(
- "TLS Certificate",
- {"wildcard": True, "domain": self.domain, "status": "Active"},
- )
- update_server_tls_certifcate(self, certificate)
+ def ansible_run(self, command: str) -> dict[str, str]:
+ inventory = f"{self.ip},"
+ return AnsibleAdHoc(sources=inventory).run(command, self.name)[0]
- @frappe.whitelist()
- def show_agent_password(self):
- return self.get_password("agent_password")
+ def setup_docker(self, now: bool | None = None):
+ frappe.enqueue_doc(self.doctype, self.name, "_setup_docker", timeout=1200, now=now or False)
- @property
- def agent(self):
- return Agent(self.name, server_type=self.doctype)
+ def _setup_docker(self):
+ try:
+ ansible = Ansible(
+ playbook="docker.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ )
+ ansible.run()
+ except Exception:
+ log_error("Docker Setup Exception", server=self.as_dict())
+ def setup_archived_folder(self):
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_setup_archived_folder",
+ queue="short",
+ timeout=1200,
+ )
-class Server(BaseServer):
- def on_update(self):
- # If Database Server is changed for the server then change it for all the benches
- if not self.is_new() and self.has_value_changed("database_server"):
- benches = frappe.get_all(
- "Bench", {"server": self.name, "status": ("!=", "Archived")}
+ def _setup_archived_folder(self):
+ try:
+ ansible = Ansible(
+ playbook="setup_archived_folder.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
)
- for bench in benches:
- bench = frappe.get_doc("Bench", bench)
- bench.database_server = self.database_server
- bench.save()
-
- @frappe.whitelist()
- def add_upstream_to_proxy(self):
- agent = Agent(self.proxy_server, server_type="Proxy Server")
- agent.new_server(self.name)
+ ansible.run()
+ except Exception:
+ log_error("Archived folder setup error", server=self.as_dict())
def _setup_server(self):
agent_password = self.get_password("agent_password")
agent_repository_url = self.get_agent_repository_url()
+ agent_branch = self.get_agent_repository_branch()
certificate = self.get_certificate()
log_server, kibana_password = self.get_log_server()
- proxy_ip = frappe.db.get_value("Proxy Server", self.proxy_server, "private_ip")
+ agent_sentry_dsn = frappe.db.get_single_value("Press Settings", "agent_sentry_dsn")
+
+ # If database server is set, then define db port under configuration
+ db_port = (
+ frappe.db.get_value("Database Server", self.database_server, "db_port")
+ if self.database_server
+ else None
+ )
try:
ansible = Ansible(
- playbook="self_hosted.yml"
- if getattr(self, "is_self_hosted", False)
- else "server.yml",
+ playbook="self_hosted.yml" if getattr(self, "is_self_hosted", False) else "server.yml",
server=self,
- user=self.ssh_user or "root",
- port=self.ssh_port or 22,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
variables={
"server": self.name,
"private_ip": self.private_ip,
- "proxy_ip": proxy_ip,
+ "proxy_ip": self.get_proxy_ip(),
"workers": "2",
"agent_password": agent_password,
"agent_repository_url": agent_repository_url,
+ "agent_branch": agent_branch,
+ "agent_sentry_dsn": agent_sentry_dsn,
"monitoring_password": self.get_monitoring_password(),
"log_server": log_server,
"kibana_password": kibana_password,
"certificate_private_key": certificate.private_key,
"certificate_full_chain": certificate.full_chain,
"certificate_intermediate_chain": certificate.intermediate_chain,
+ "docker_depends_on_mounts": self.docker_depends_on_mounts,
+ "db_port": db_port,
+ "agent_repository_branch_or_commit_ref": self.get_agent_repository_branch(),
+ "agent_update_args": " --skip-repo-setup=true",
+ **self.get_mount_variables(),
},
)
play = ansible.run()
self.reload()
+ self._set_mount_status(play)
if play.status == "Success":
self.status = "Active"
self.is_server_setup = True
+ if self.provider == "DigitalOcean":
+ # To adjust docker permissions
+ self.reboot()
else:
self.status = "Broken"
except Exception:
@@ -532,19 +2775,25 @@ def _setup_server(self):
log_error("Server Setup Exception", server=self.as_dict())
self.save()
+ def get_proxy_ip(self):
+ # In case of standalone setup, proxy is not required.
+ if self.is_standalone:
+ return self.ip
+ private_ip = frappe.db.get_value("Proxy Server", self.proxy_server, "private_ip")
+ with_mask = private_ip + "/24"
+ return str(ipaddress.ip_network(with_mask, strict=False))
+
@frappe.whitelist()
def setup_standalone(self):
- frappe.enqueue_doc(
- self.doctype, self.name, "_setup_standalone", queue="short", timeout=1200
- )
+ frappe.enqueue_doc(self.doctype, self.name, "_setup_standalone", queue="short", timeout=1200)
def _setup_standalone(self):
try:
ansible = Ansible(
playbook="standalone.yml",
server=self,
- user=self.ssh_user or "root",
- port=self.ssh_port or 22,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
variables={
"server": self.name,
"domain": self.domain,
@@ -554,15 +2803,41 @@ def _setup_standalone(self):
self.reload()
if play.status == "Success":
self.is_standalone_setup = True
+ self.setup_wildcard_hosts()
+ self.update_benches_nginx()
except Exception:
log_error("Standalone Server Setup Exception", server=self.as_dict())
self.save()
+ @frappe.whitelist()
+ def update_benches_nginx(self):
+ """Update benches config for all benches in the server"""
+ benches = frappe.get_all("Bench", "name", {"server": self.name, "status": "Active"}, pluck="name")
+ for bench_name in benches:
+ bench: Bench = frappe.get_doc("Bench", bench_name)
+ bench.generate_nginx_config()
+
+ @frappe.whitelist()
+ def setup_agent_sentry(self):
+ frappe.enqueue_doc(self.doctype, self.name, "_setup_agent_sentry")
+
+ def _setup_agent_sentry(self):
+ agent_sentry_dsn = frappe.db.get_single_value("Press Settings", "agent_sentry_dsn")
+ try:
+ ansible = Ansible(
+ playbook="agent_sentry.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables={"agent_sentry_dsn": agent_sentry_dsn},
+ )
+ ansible.run()
+ except Exception:
+ log_error("Agent Sentry Setup Exception", server=self.as_dict())
+
@frappe.whitelist()
def whitelist_ipaddress(self):
- frappe.enqueue_doc(
- self.doctype, self.name, "_whitelist_ip", queue="short", timeout=1200
- )
+ frappe.enqueue_doc(self.doctype, self.name, "_whitelist_ip", queue="short", timeout=1200)
def _whitelist_ip(self):
proxy_server = frappe.get_value("Server", self.name, "proxy_server")
@@ -572,6 +2847,8 @@ def _whitelist_ip(self):
ansible = Ansible(
playbook="whitelist_ipaddress.yml",
server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
variables={"ip_address": proxy_server_ip},
)
play = ansible.run()
@@ -588,23 +2865,20 @@ def _whitelist_ip(self):
@frappe.whitelist()
def agent_set_proxy_ip(self):
- frappe.enqueue_doc(
- self.doctype, self.name, "_agent_set_proxy_ip", queue="short", timeout=1200
- )
+ frappe.enqueue_doc(self.doctype, self.name, "_agent_set_proxy_ip", queue="short", timeout=1200)
def _agent_set_proxy_ip(self):
- proxy_ip = frappe.db.get_value("Proxy Server", self.proxy_server, "private_ip")
agent_password = self.get_password("agent_password")
try:
ansible = Ansible(
playbook="agent_set_proxy_ip.yml",
server=self,
- user=self.ssh_user or "root",
- port=self.ssh_port or 22,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
variables={
"server": self.name,
- "proxy_ip": proxy_ip,
+ "proxy_ip": self.get_proxy_ip(),
"workers": "2",
"agent_password": agent_password,
},
@@ -615,37 +2889,25 @@ def _agent_set_proxy_ip(self):
self.save()
@frappe.whitelist()
- def setup_fail2ban(self):
- self.status = "Installing"
- self.save()
- frappe.enqueue_doc(
- self.doctype, self.name, "_setup_fail2ban", queue="long", timeout=1200
- )
+ def setup_pyspy(self):
+ frappe.enqueue_doc(self.doctype, self.name, "_setup_pyspy", queue="long")
- def _setup_fail2ban(self):
+ def _setup_pyspy(self):
try:
ansible = Ansible(
- playbook="fail2ban.yml",
- server=self,
+ playbook="setup_pyspy.yml", server=self, user=self._ssh_user(), port=self._ssh_port()
)
- play = ansible.run()
- self.reload()
- if play.status == "Success":
- self.status = "Active"
- else:
- self.status = "Broken"
+ play: AnsiblePlay = ansible.run()
+ self.is_pyspy_setup = play.status == "Success"
+ self.save()
except Exception:
- self.status = "Broken"
- log_error("Fail2ban Setup Exception", server=self.as_dict())
- self.save()
+ log_error("Setup PySpy Exception", server=self.as_dict())
@frappe.whitelist()
def setup_replication(self):
self.status = "Installing"
self.save()
- frappe.enqueue_doc(
- self.doctype, self.name, "_setup_replication", queue="long", timeout=1200
- )
+ frappe.enqueue_doc(self.doctype, self.name, "_setup_replication", queue="long", timeout=1200)
def _setup_replication(self):
self._setup_secondary()
@@ -657,12 +2919,19 @@ def _setup_replication(self):
self.save()
def _setup_primary(self, secondary):
- secondary_private_ip = frappe.db.get_value("Server", secondary, "private_ip")
+ secondary_private_ip, secondary_ssh_port = frappe.db.get_value(
+ "Server", secondary, ("private_ip", "ssh_port")
+ )
try:
ansible = Ansible(
playbook="primary_app.yml",
server=self,
- variables={"secondary_private_ip": secondary_private_ip},
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables={
+ "secondary_private_ip": secondary_private_ip,
+ "secondary_ssh_port": secondary_ssh_port,
+ },
)
play = ansible.run()
self.reload()
@@ -676,12 +2945,13 @@ def _setup_primary(self, secondary):
self.save()
def _setup_secondary(self):
- primary_public_key = frappe.db.get_value("Server", self.primary, "frappe_public_key")
try:
ansible = Ansible(
playbook="secondary_app.yml",
server=self,
- variables={"primary_public_key": primary_public_key},
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables={"primary_public_key": self.get_primary_frappe_public_key()},
)
play = ansible.run()
self.reload()
@@ -696,13 +2966,13 @@ def _setup_secondary(self):
self.save()
def _install_exporters(self):
- monitoring_password = frappe.get_doc("Cluster", self.cluster).get_password(
- "monitoring_password"
- )
+ monitoring_password = frappe.get_doc("Cluster", self.cluster).get_password("monitoring_password")
try:
ansible = Ansible(
playbook="server_exporters.yml",
server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
variables={
"private_ip": self.private_ip,
"monitoring_password": monitoring_password,
@@ -713,42 +2983,28 @@ def _install_exporters(self):
log_error("Exporters Install Exception", server=self.as_dict())
@classmethod
- def get_all_prod(cls, **kwargs) -> List[str]:
- """Active prod servers."""
- return frappe.get_all("Server", {"status": "Active"}, pluck="name", **kwargs)
-
- @classmethod
- def get_all_primary_prod(cls) -> List[str]:
+ def get_all_primary_prod(cls) -> list[str]:
"""Active primary prod servers."""
- return frappe.get_all(
- "Server", {"status": "Active", "is_primary": True}, pluck="name"
- )
+ return frappe.get_all("Server", {"status": "Active", "is_primary": True}, pluck="name")
@classmethod
- def get_all_staging(cls, **kwargs) -> List[str]:
+ def get_all_staging(cls, **kwargs) -> list[str]:
"""Active staging servers."""
- return frappe.get_all(
- "Server", {"status": "Active", "staging": True}, pluck="name", **kwargs
- )
+ return frappe.get_all("Server", {"status": "Active", "staging": True}, pluck="name", **kwargs)
@classmethod
def get_one_staging(cls) -> str:
return cls.get_all_staging(limit=1)[0]
@classmethod
- def get_prod_for_new_bench(cls, extra_filters={}) -> Union[str, None]:
+ def get_prod_for_new_bench(cls, extra_filters=None) -> str | None:
filters = {"status": "Active", "use_for_new_benches": True}
- servers = frappe.get_all(
- "Server", {**filters, **extra_filters}, pluck="name", limit=1
- )
+ if extra_filters:
+ filters.update(extra_filters)
+ servers = frappe.get_all("Server", {**filters}, pluck="name", limit=1)
if servers:
return servers[0]
-
- @frappe.whitelist()
- def reboot(self):
- if self.provider == "AWS EC2":
- virtual_machine = frappe.get_doc("Virtual Machine", self.virtual_machine)
- virtual_machine.reboot()
+ return None
def _rename_server(self):
agent_password = self.get_password("agent_password")
@@ -757,29 +3013,23 @@ def _rename_server(self):
"TLS Certificate", {"wildcard": True, "domain": self.domain}, "name"
)
certificate = frappe.get_doc("TLS Certificate", certificate_name)
- monitoring_password = frappe.get_doc("Cluster", self.cluster).get_password(
- "monitoring_password"
- )
+ monitoring_password = frappe.get_doc("Cluster", self.cluster).get_password("monitoring_password")
log_server = frappe.db.get_single_value("Press Settings", "log_server")
if log_server:
- kibana_password = frappe.get_doc("Log Server", log_server).get_password(
- "kibana_password"
- )
+ kibana_password = frappe.get_doc("Log Server", log_server).get_password("kibana_password")
else:
kibana_password = None
- proxy_ip = frappe.db.get_value("Proxy Server", self.proxy_server, "private_ip")
-
try:
ansible = Ansible(
playbook="rename.yml",
server=self,
- user=self.ssh_user or "root",
- port=self.ssh_port or 22,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
variables={
"server": self.name,
"private_ip": self.private_ip,
- "proxy_ip": proxy_ip,
+ "proxy_ip": self.get_proxy_ip(),
"workers": "2",
"agent_password": agent_password,
"agent_repository_url": agent_repository_url,
@@ -804,23 +3054,14 @@ def _rename_server(self):
self.save()
@frappe.whitelist()
- def auto_scale_workers(self):
+ def auto_scale_workers(self, commit=True):
if self.new_worker_allocation:
- self._auto_scale_workers_new()
+ self._auto_scale_workers_new(commit)
else:
self._auto_scale_workers_old()
- def _auto_scale_workers_new(self):
- usable_ram = max(
- self.ram - 3000, self.ram * 0.75
- ) # in MB (leaving some for disk cache + others)
- usable_ram_for_gunicorn = 0.6 * usable_ram # 60% of usable ram
- usable_ram_for_bg = 0.4 * usable_ram # 40% of usable ram
- max_gunicorn_workers = (
- usable_ram_for_gunicorn / 150
- ) # avg ram usage of 1 gunicorn worker
- max_bg_workers = usable_ram_for_bg / (3 * 80) # avg ram usage of 3 sets of bg workers
-
+ @cached_property
+ def bench_workloads(self) -> dict["Bench", int]:
bench_workloads = {}
benches = frappe.get_all(
"Bench",
@@ -829,33 +3070,51 @@ def _auto_scale_workers_new(self):
)
for bench_name in benches:
bench = frappe.get_doc("Bench", bench_name)
- bench_workloads[bench_name] = bench.work_load
+ bench_workloads[bench] = bench.workload
+ return bench_workloads
- total_workload = sum(bench_workloads.values())
+ @cached_property
+ def workload(self) -> int:
+ return sum(self.bench_workloads.values())
- for bench_name, workload in bench_workloads.items():
+ @cached_property
+ def usable_ram(self) -> float:
+ return max(self.ram - 3000, self.ram * 0.75) # in MB (leaving some for disk cache + others)
+
+ @cached_property
+ def max_gunicorn_workers(self) -> float:
+ usable_ram_for_gunicorn = 0.6 * self.usable_ram # 60% of usable ram
+ return usable_ram_for_gunicorn / self.GUNICORN_MEMORY
+
+ @cached_property
+ def max_bg_workers(self) -> float:
+ usable_ram_for_bg = 0.4 * self.usable_ram # 40% of usable ram
+ return usable_ram_for_bg / self.BACKGROUND_JOB_MEMORY
+
+ def _auto_scale_workers_new(self, commit):
+ for bench in self.bench_workloads:
try:
- bench = frappe.get_doc("Bench", bench_name, for_update=True)
- try:
- gunicorn_workers = min(
- 24,
- max(2, round(workload / total_workload * max_gunicorn_workers)), # min 2 max 24
- )
- background_workers = min(
- 8, max(1, round(workload / total_workload * max_bg_workers)) # min 1 max 8
- )
- except ZeroDivisionError: # when total_workload is 0
- gunicorn_workers = 2
- background_workers = 1
- bench.gunicorn_workers = gunicorn_workers
- bench.background_workers = background_workers
- bench.save()
- frappe.db.commit()
+ bench.reload()
+ bench.allocate_workers(
+ self.workload,
+ self.max_gunicorn_workers,
+ self.max_bg_workers,
+ self.set_bench_memory_limits,
+ self.GUNICORN_MEMORY,
+ self.BACKGROUND_JOB_MEMORY,
+ )
+ if commit:
+ frappe.db.commit()
+ except frappe.TimestampMismatchError:
+ if commit:
+ frappe.db.rollback()
+ continue
except Exception:
- log_error("Bench Auto Scale Worker Error", bench=bench, workload=workload)
- frappe.db.rollback()
+ log_error("Bench Auto Scale Worker Error", bench=bench, workload=self.bench_workloads[bench])
+ if commit:
+ frappe.db.rollback()
- def _auto_scale_workers_old(self):
+ def _auto_scale_workers_old(self): # noqa: C901
benches = frappe.get_all(
"Bench",
filters={"server": self.name, "status": "Active", "auto_scale_workers": True},
@@ -863,21 +3122,21 @@ def _auto_scale_workers_old(self):
)
for bench_name in benches:
bench = frappe.get_doc("Bench", bench_name)
- work_load = bench.work_load
+ workload = bench.workload
- if work_load <= 10:
+ if workload <= 10:
background_workers, gunicorn_workers = 1, 2
- elif work_load <= 20:
+ elif workload <= 20:
background_workers, gunicorn_workers = 2, 4
- elif work_load <= 30:
+ elif workload <= 30:
background_workers, gunicorn_workers = 3, 6
- elif work_load <= 50:
+ elif workload <= 50:
background_workers, gunicorn_workers = 4, 8
- elif work_load <= 100:
+ elif workload <= 100:
background_workers, gunicorn_workers = 6, 12
- elif work_load <= 250:
+ elif workload <= 250:
background_workers, gunicorn_workers = 8, 16
- elif work_load <= 500:
+ elif workload <= 500:
background_workers, gunicorn_workers = 16, 32
else:
background_workers, gunicorn_workers = 24, 48
@@ -893,12 +3152,327 @@ def _auto_scale_workers_old(self):
)
bench.save()
+ @frappe.whitelist()
+ def reset_sites_usage(self):
+ sites = frappe.get_all(
+ "Site",
+ filters={"server": self.name, "status": "Active"},
+ pluck="name",
+ )
+ for site_name in sites:
+ site = frappe.get_doc("Site", site_name)
+ site.reset_site_usage()
+
+ def install_earlyoom(self):
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_install_earlyoom",
+ )
+
+ def _install_earlyoom(self):
+ try:
+ ansible = Ansible(
+ playbook="server_memory_limits.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ )
+ ansible.run()
+ except Exception:
+ log_error("Earlyoom Install Exception", server=self.as_dict())
+
+ @frappe.whitelist()
+ def install_wazuh_agent(self):
+ wazuh_server = frappe.get_value("Press Settings", "Press Settings", "wazuh_server")
+ if not wazuh_server:
+ frappe.throw("Please configure Wazuh Server in Press Settings")
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_install_wazuh_agent",
+ wazuh_server=wazuh_server,
+ )
+
+ def _install_wazuh_agent(self, wazuh_server: str):
+ try:
+ ansible = Ansible(
+ playbook="wazuh_agent_install.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ variables={
+ "wazuh_manager": wazuh_server,
+ "wazuh_agent_name": self.name,
+ },
+ )
+ ansible.run()
+ except Exception:
+ log_error("Wazuh Agent Install Exception", server=self.as_dict())
+
+ @frappe.whitelist()
+ def uninstall_wazuh_agent(self):
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_uninstall_wazuh_agent",
+ )
+
+ def _uninstall_wazuh_agent(self):
+ try:
+ ansible = Ansible(
+ playbook="wazuh_agent_uninstall.yml",
+ server=self,
+ user=self._ssh_user(),
+ port=self._ssh_port(),
+ )
+ ansible.run()
+ except Exception:
+ log_error("Wazuh Agent Uninstall Exception", server=self.as_dict())
+
+ @property
+ def docker_depends_on_mounts(self):
+ mount_points = set(mount.mount_point for mount in self.mounts)
+ bench_mount_points = set(["/home/frappe/benches"])
+ return bench_mount_points.issubset(mount_points)
+
+ @dashboard_whitelist()
+ def create_snapshot(self, consistent: bool = False) -> str:
+ return self._create_snapshot(consistent)
+
+ def _create_snapshot(
+ self, consistent: bool = False, expire_at: datetime.datetime | None = None, free: bool = False
+ ) -> str:
+ doc = frappe.get_doc(
+ {
+ "doctype": "Server Snapshot",
+ "app_server": self.name,
+ "consistent": consistent,
+ "expire_at": expire_at,
+ "free": free,
+ }
+ ).insert(ignore_permissions=True)
+ frappe.msgprint(
+ f"Snapshot created successfully. Check Here "
+ )
+ return doc.name
+
+ @dashboard_whitelist()
+ def delete_snapshot(self, snapshot_name: str) -> None:
+ doc = frappe.get_doc("Server Snapshot", snapshot_name)
+ if doc.app_server != self.name:
+ frappe.throw("Snapshot does not belong to this server")
+ doc.delete_snapshots()
+
+ @dashboard_whitelist()
+ def lock_snapshot(self, snapshot_name: str) -> None:
+ doc = frappe.get_doc("Server Snapshot", snapshot_name)
+ if doc.app_server != self.name:
+ frappe.throw("Snapshot does not belong to this server")
+ doc.lock()
+
+ @dashboard_whitelist()
+ def unlock_snapshot(self, snapshot_name: str) -> None:
+ doc = frappe.get_doc("Server Snapshot", snapshot_name)
+ if doc.app_server != self.name:
+ frappe.throw("Snapshot does not belong to this server")
+ doc.unlock()
+
+ def validate_bench_status_before_scaling(self) -> bool:
+ "Ensures no new bench job is pending/running before scaling"
+ return bool(
+ frappe.db.get_value(
+ "Bench", {"server": self.name, "status": ("IN", ["Pending", "Installing", "Updating"])}
+ )
+ )
+
+ def validate_scale(self):
+ """
+ Check if the server can auto scale, the following parameters before creating a scale record
+ - Benches being modified
+ - Server is configured for auto scale.
+ - Was the last auto scale modified before the cool of period (don't create new auto scale).
+ - There is a auto scale operation running on the server.
+ - There are no active sites on the server.
+ - Check if there are active deployments on primary server
+ """
+ if not self.can_scale:
+ frappe.throw("Server is not configured for auto scaling", frappe.ValidationError)
+
+ if self.validate_bench_status_before_scaling():
+ frappe.throw(
+ "Please wait for all bench related jobs to complete before scaling the server.",
+ )
+
+ last_auto_scale_at = frappe.db.get_value(
+ "Auto Scale Record", {"primary_server": self.name, "status": "Success"}, "modified"
+ )
+ cool_off_period = frappe.db.get_single_value("Press Settings", "cool_off_period")
+ time_diff = (
+ (frappe.utils.now_datetime() - last_auto_scale_at)
+ if last_auto_scale_at
+ else timedelta(seconds=cool_off_period + 1)
+ )
+
+ running_auto_scale = frappe.db.get_value(
+ "Auto Scale Record", {"primary_server": self.name, "status": "Running"}
+ )
+
+ if running_auto_scale:
+ frappe.throw("Auto scale is already running", frappe.ValidationError)
+
+ if time_diff < timedelta(seconds=cool_off_period or 300):
+ frappe.throw(
+ f"Please wait for {fmt_timedelta(timedelta(seconds=cool_off_period or 300) - time_diff)} before scaling again",
+ frappe.ValidationError,
+ )
+
+ active_sites_on_primary = frappe.db.get_value(
+ "Site", {"server": self.name, "status": "Active"}, pluck="name"
+ )
+ active_sites_on_secondary = frappe.db.get_value(
+ "Site", {"server": self.secondary_server, "status": "Active"}, pluck="name"
+ )
+
+ if not active_sites_on_primary and not active_sites_on_secondary:
+ frappe.throw("There are no active sites on this server!", frappe.ValidationError)
+
+ active_deployments = frappe.db.get_value(
+ "Bench", {"server": self.name, "status": ("in", ["Installing", "Pending"])}
+ )
+
+ if active_deployments:
+ frappe.throw(
+ "Please wait for all active deployments to complete before scaling the server.",
+ )
+
+ @dashboard_whitelist()
+ @frappe.whitelist()
+ def remove_automated_scaling_triggers(self, triggers: list[str]):
+ """Currently we need to remove both since we can't support scaling up trigger without a scaling down trigger"""
+ trigger_filters = {"parent": self.name, "name": ("in", triggers)}
+ matching_triggers: list[AutoScaleTriggerRow] = frappe.db.get_values(
+ "Auto Scale Trigger", trigger_filters, ["metric", "action"], as_dict=True
+ )
+ frappe.db.delete("Auto Scale Trigger", trigger_filters)
+
+ for trigger in matching_triggers:
+ update_or_delete_prometheus_rule_for_scaling(
+ self.name, metric=trigger["metric"], action=trigger["action"]
+ )
+
+ @dashboard_whitelist()
+ @frappe.whitelist()
+ def add_automated_scaling_triggers(
+ self, metric: Literal["CPU", "Memory"], action: Literal["Scale Up", "Scale Down"], threshold: float
+ ):
+ """Configure automated scaling based on cpu loads"""
+
+ if not self.secondary_server:
+ frappe.throw("Please setup a secondary server to enable auto scaling", frappe.ValidationError)
+
+ threshold = round(threshold, 2)
+ existing_trigger = frappe.db.get_value(
+ "Auto Scale Trigger", {"action": action, "parent": self.name, "metric": metric}
+ )
+
+ if existing_trigger:
+ frappe.db.set_value(
+ "Auto Scale Trigger",
+ existing_trigger,
+ {"action": action, "threshold": threshold, "metric": metric},
+ )
+ else:
+ self.append(
+ "auto_scale_trigger",
+ {"metric": metric, "threshold": threshold, "action": action},
+ )
+ self.save()
+
+ create_prometheus_rule_for_scaling(
+ self.name,
+ metric=metric,
+ threshold=threshold,
+ action=action,
+ )
+
+ @dashboard_whitelist()
+ @frappe.whitelist()
+ def scale_up(self, is_automatically_triggered: bool = False):
+ if self.scaled_up:
+ frappe.throw("Server is already scaled up", frappe.ValidationError)
+
+ self.validate_scale()
+
+ auto_scale_record = self._create_auto_scale_record(action="Scale Up")
+ auto_scale_record.is_automatically_triggered = is_automatically_triggered
+ auto_scale_record.insert()
+
+ @dashboard_whitelist()
+ @frappe.whitelist()
+ def scale_down(self, is_automatically_triggered: bool = False):
+ if not self.scaled_up:
+ frappe.throw("Server is already scaled down", frappe.ValidationError)
+
+ self.validate_scale()
+
+ if is_automatically_triggered and not is_secondary_ready_for_scale_down(self):
+ return
+
+ auto_scale_record = self._create_auto_scale_record(action="Scale Down")
+ auto_scale_record.is_automatically_triggered = is_automatically_triggered
+ auto_scale_record.insert()
+
+ @property
+ def can_scale(self) -> bool:
+ """
+ Check if server is configured for auto scaling
+ and all release groups on this server have a password
+ """
+ has_release_groups_without_redis_password = bool(
+ frappe.db.get_all(
+ "Release Group", {"server": self.name, "enabled": 1, "redis_password": ("LIKE", "")}
+ )
+ )
+ return self.benches_on_shared_volume and not has_release_groups_without_redis_password
+
+ def _create_auto_scale_record(self, action: Literal["Scale Up", "Scale Down"]) -> AutoScaleRecord:
+ """Create up/down scale record"""
+ return frappe.get_doc({"doctype": "Auto Scale Record", "primary_server": self.name, "action": action})
+
+ @property
+ def domains(self):
+ filters = {}
+ if (
+ self.is_self_hosted
+ ): # to avoid pushing certificates to self hosted servers on setup_wildcard_hosts
+ filters = {"name": self.name}
+ return [
+ frappe._dict({"domain": domain.name, "code_server": False})
+ for domain in frappe.get_all(
+ "Root Domain",
+ filters={"enabled": 1} | filters,
+ fields=["name"],
+ )
+ ] # To avoid adding child table in server doc
-def scale_workers():
+
+def scale_workers(now=False):
servers = frappe.get_all("Server", {"status": "Active", "is_primary": True})
for server in servers:
try:
- frappe.get_doc("Server", server.name).auto_scale_workers()
+ if now:
+ frappe.get_doc("Server", server.name).auto_scale_workers()
+ else:
+ frappe.enqueue_doc(
+ "Server",
+ server.name,
+ method="auto_scale_workers",
+ job_id=f"auto_scale_workers:{server.name}",
+ deduplicate=True,
+ queue="long",
+ enqueue_after_commit=True,
+ )
frappe.db.commit()
except Exception:
log_error("Auto Scale Worker Error", server=server)
@@ -920,3 +3494,22 @@ def cleanup_unused_files():
get_permission_query_conditions = get_permission_query_conditions_for_doctype("Server")
+
+
+def get_hostname_abbreviation(hostname):
+ hostname_parts = hostname.split("-")
+
+ abbr = hostname_parts[0]
+
+ for part in hostname_parts[1:]:
+ if part:
+ abbr += part[0]
+
+ return abbr
+
+
+def is_dedicated_server(server_name):
+ if not isinstance(server_name, str):
+ frappe.throw("Invalid argument")
+ is_public = frappe.db.get_value("Server", server_name, "public")
+ return not is_public
diff --git a/press/press/doctype/server/test_server.py b/press/press/doctype/server/test_server.py
index 35f76e14021..ec5d0cbbfac 100644
--- a/press/press/doctype/server/test_server.py
+++ b/press/press/doctype/server/test_server.py
@@ -1,14 +1,17 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe and Contributors
# See license.txt
+from __future__ import annotations
-import unittest
+import typing
from unittest.mock import Mock, patch
import frappe
from frappe.model.naming import make_autoname
+from frappe.tests.utils import FrappeTestCase
+from moto import mock_aws
+from press.press.doctype.app.test_app import create_test_app
from press.press.doctype.database_server.test_database_server import (
create_test_database_server,
)
@@ -16,20 +19,44 @@
create_test_press_settings,
)
from press.press.doctype.proxy_server.test_proxy_server import create_test_proxy_server
+from press.press.doctype.release_group.test_release_group import create_test_release_group
from press.press.doctype.server.server import BaseServer
+from press.press.doctype.server_plan.test_server_plan import create_test_server_plan
+from press.press.doctype.site.test_site import create_test_bench
+from press.press.doctype.team.test_team import create_test_team
+from press.press.doctype.virtual_machine.test_virtual_machine import create_test_virtual_machine
+
+if typing.TYPE_CHECKING:
+ from press.press.doctype.server.server import Server
+ from press.press.doctype.server_plan.server_plan import ServerPlan
+ from press.press.doctype.virtual_machine.virtual_machine import VirtualMachine
@patch.object(BaseServer, "after_insert", new=Mock())
def create_test_server(
- proxy_server=None,
- database_server=None,
+ proxy_server: str | None = None,
+ database_server: str | None = None,
cluster: str = "Default",
-):
+ plan: str | None = None,
+ team: str | None = None,
+ public: bool = False,
+ platform: str = "x86_64",
+ use_for_build: bool = False,
+ is_self_hosted: bool = False,
+ auto_increase_storage: bool = False,
+ provider: str | None = None,
+ has_data_volume: bool = False,
+) -> "Server":
"""Create test Server doc."""
if not proxy_server:
proxy_server = create_test_proxy_server().name
if not database_server:
database_server = create_test_database_server().name
+ if not team:
+ team = create_test_team().name
+
+ plan_doc: "ServerPlan" | None = frappe.get_doc("Server Plan", plan) if plan else None
+
server = frappe.get_doc(
{
"doctype": "Server",
@@ -41,6 +68,23 @@ def create_test_server(
"domain": "fc.dev",
"hostname": make_autoname("f-.####"),
"cluster": cluster,
+ "new_worker_allocation": True,
+ "ram": 16000,
+ "team": team,
+ "plan": plan,
+ "public": public,
+ "virtual_machine": create_test_virtual_machine(
+ platform=plan_doc.platform if plan_doc else "x86_64",
+ disk_size=plan_doc.disk if plan_doc else 25,
+ has_data_volume=has_data_volume,
+ series="f",
+ ).name,
+ "platform": platform,
+ "use_for_build": use_for_build,
+ "is_self_hosted": is_self_hosted,
+ "auto_increase_storage": auto_increase_storage,
+ "provider": provider,
+ "has_data_volume": has_data_volume,
}
).insert()
server.reload()
@@ -48,7 +92,7 @@ def create_test_server(
@patch.object(BaseServer, "after_insert", new=Mock())
-class TestServer(unittest.TestCase):
+class TestServer(FrappeTestCase):
def test_create_generic_server(self):
create_test_press_settings()
proxy_server = create_test_proxy_server()
@@ -89,5 +133,135 @@ def test_set_agent_password(self):
server.insert()
self.assertEqual(len(server.get_password("agent_password")), 32)
- def tearDown(self):
- frappe.db.rollback()
+ def test_subscription_creation_on_server_creation(self):
+ create_test_press_settings()
+ server_plan = create_test_server_plan()
+ server = create_test_server(plan=server_plan.name)
+ self.assertEqual(server.team, server.subscription.team)
+ self.assertEqual(server.plan, server.subscription.plan)
+
+ @mock_aws
+ @patch.object(BaseServer, "enqueue_extend_ec2_volume", new=Mock())
+ @patch("boto3.client")
+ def test_subscription_creation_on_addon_storage(self, _):
+ """Test subscription creation with a fixed increment"""
+ increment = 10
+ create_test_press_settings()
+ server_plan = create_test_server_plan()
+ server: "Server" = create_test_server(plan=server_plan.name, provider="AWS EC2")
+ plan_disk_size = server_plan.disk
+ actual_disk_size = frappe.db.get_value("Virtual Machine", server.virtual_machine, "disk_size")
+ self.assertEqual(plan_disk_size, actual_disk_size)
+
+ vm: "VirtualMachine" = frappe.get_doc("Virtual Machine", server.virtual_machine)
+ root_volume = vm.volumes[0]
+ self.assertEqual(plan_disk_size, root_volume.size)
+
+ server.increase_disk_size_for_server(server.name, increment=increment)
+ new_actual_disk_size = frappe.db.get_value("Virtual Machine", server.virtual_machine, "disk_size")
+ self.assertEqual(plan_disk_size + increment, new_actual_disk_size)
+
+ subscription_doc = frappe.get_doc(
+ "Subscription",
+ {
+ "team": server.team,
+ "plan_type": "Server Storage Plan",
+ "plan": "Add-on Storage Plan",
+ "document_type": server.doctype,
+ "document_name": server.name,
+ },
+ )
+
+ self.assertEqual(subscription_doc.enabled, 1)
+
+ self.assertEqual(int(subscription_doc.additional_storage), increment)
+
+ # Increase by another 10
+ server.increase_disk_size_for_server(server.name, increment=increment)
+ new_actual_disk_size = frappe.db.get_value("Virtual Machine", server.virtual_machine, "disk_size")
+
+ self.assertEqual(plan_disk_size + increment + increment, new_actual_disk_size)
+
+ subscription_doc = frappe.get_doc(
+ "Subscription",
+ {
+ "team": server.team,
+ "plan_type": "Server Storage Plan",
+ "plan": "Add-on Storage Plan",
+ "document_type": server.doctype,
+ "document_name": server.name,
+ },
+ )
+
+ self.assertEqual(subscription_doc.enabled, 1)
+
+ self.assertEqual(int(subscription_doc.additional_storage), increment + increment)
+
+ def test_subscription_team_update_on_server_team_update(self):
+ create_test_press_settings()
+ server_plan = create_test_server_plan()
+ server = create_test_server(plan=server_plan.name)
+
+ self.assertEqual(server.team, server.subscription.team)
+ self.assertEqual(server.plan, server.subscription.plan)
+
+ # update server team
+ team2 = create_test_team()
+ server.team = team2.name
+ server.save()
+ self.assertEqual(server.team, server.subscription.team)
+
+ def test_db_server_team_update_on_server_team_update(self):
+ create_test_press_settings()
+ server_plan = create_test_server_plan()
+ db_server_plan = create_test_server_plan("Database Server")
+ server = create_test_server(plan=server_plan.name)
+ db_server = frappe.get_doc("Database Server", server.database_server)
+ db_server.plan = db_server_plan.name
+ db_server.save()
+
+ self.assertEqual(server.team, db_server.team)
+
+ # update server team
+ team2 = create_test_team()
+ server.team = team2.name
+ server.save()
+ server.reload()
+ db_server.reload()
+ self.assertEqual(server.team, db_server.team)
+ self.assertEqual(server.subscription.team, server.team)
+ self.assertEqual(server.subscription.team, db_server.subscription.team)
+
+ def test_remove_from_public_groups_removes_server_from_release_groups_child_table(self):
+ # Create three public release groups, add server to all
+ server = create_test_server(public=True)
+ apps = [create_test_app()]
+ group1 = create_test_release_group(apps, public=True, servers=[server.name])
+ group2 = create_test_release_group(apps, public=True, servers=[server.name])
+ group3 = create_test_release_group(apps, public=True, servers=[server.name])
+
+ # Add an active bench to group2 on the server
+ bench = create_test_bench(group=group2, server=server.name)
+ frappe.db.set_value("Bench", bench.name, "status", "Active")
+
+ self.assertTrue(any(s.server == server.name for s in group2.servers))
+ self.assertTrue(any(s.server == server.name for s in group3.servers))
+ self.assertTrue(any(s.server == server.name for s in group1.servers))
+
+ server.remove_from_public_groups()
+
+ # Reload groups
+ group1.reload()
+ group2.reload()
+ group3.reload()
+
+ # Assert server removed from groups without active benches
+ self.assertFalse(any(s.server == server.name for s in group1.servers))
+ self.assertFalse(any(s.server == server.name for s in group3.servers))
+ # Assert server still present in group2 (has active bench)
+ self.assertTrue(any(s.server == server.name for s in group2.servers))
+
+ server.remove_from_public_groups(force=True)
+ group2.reload()
+ # Assert server removed from group2
+ self.assertFalse(any(s.server == server.name for s in group2.servers))
diff --git a/press/press/doctype/server_activity/__init__.py b/press/press/doctype/server_activity/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/server_activity/server_activity.js b/press/press/doctype/server_activity/server_activity.js
new file mode 100644
index 00000000000..4ab33a63cc2
--- /dev/null
+++ b/press/press/doctype/server_activity/server_activity.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Server Activity", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/server_activity/server_activity.json b/press/press/doctype/server_activity/server_activity.json
new file mode 100644
index 00000000000..f15b0a09b4f
--- /dev/null
+++ b/press/press/doctype/server_activity/server_activity.json
@@ -0,0 +1,98 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-08-06 13:10:00.206404",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "column_break_ceix",
+ "document_type",
+ "document_name",
+ "team",
+ "action",
+ "reason"
+ ],
+ "fields": [
+ {
+ "fieldname": "column_break_ceix",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "label": "Team",
+ "options": "Team"
+ },
+ {
+ "fieldname": "action",
+ "fieldtype": "Select",
+ "label": "Action",
+ "options": "Created\nReboot\nVolume\nDisk Size Change\nTerminated\nIncident",
+ "reqd": 1
+ },
+ {
+ "fieldname": "reason",
+ "fieldtype": "Small Text",
+ "label": "Reason"
+ },
+ {
+ "fieldname": "document_type",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Document Type",
+ "options": "DocType",
+ "reqd": 1
+ },
+ {
+ "fieldname": "document_name",
+ "fieldtype": "Dynamic Link",
+ "label": "Document Name",
+ "options": "document_type",
+ "reqd": 1
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-08-07 12:09:37.291972",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Server Activity",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/server_activity/server_activity.py b/press/press/doctype/server_activity/server_activity.py
new file mode 100644
index 00000000000..56d0f7d0118
--- /dev/null
+++ b/press/press/doctype/server_activity/server_activity.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+from typing import Literal
+
+import frappe
+from frappe.model.document import Document
+
+
+class ServerActivity(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ action: DF.Literal[
+ "Created",
+ "Reboot",
+ "Volume",
+ "Disk Size Change",
+ "Terminated",
+ "Incident",
+ ]
+ document_name: DF.DynamicLink
+ document_type: DF.Link
+ reason: DF.SmallText | None
+ team: DF.Link | None
+ # end: auto-generated types
+
+ dashboard_fields = ("action", "reason", "document_name")
+
+
+def log_server_activity(
+ series: Literal["f", "m"],
+ server: str,
+ action: Literal["Created", "Reboot", "Volume", "Terminated", "Disk Size Change"],
+ reason: str | None = None,
+) -> None:
+ """Create a log of server activity"""
+ if series not in ["f", "m"]:
+ return
+
+ document_type = "Server" if series == "f" else "Database Server"
+ team = frappe.db.get_value(document_type, server, "team")
+
+ frappe.get_doc(
+ {
+ "doctype": "Server Activity",
+ "document_type": document_type,
+ "document_name": server,
+ "action": action,
+ "reason": reason,
+ "team": team,
+ }
+ ).insert()
diff --git a/press/press/doctype/server_activity/test_server_activity.py b/press/press/doctype/server_activity/test_server_activity.py
new file mode 100644
index 00000000000..da6037240bd
--- /dev/null
+++ b/press/press/doctype/server_activity/test_server_activity.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class IntegrationTestServerActivity(FrappeTestCase):
+ """
+ Integration tests for ServerActivity.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/server_firewall/__init__.py b/press/press/doctype/server_firewall/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/server_firewall/server_firewall.js b/press/press/doctype/server_firewall/server_firewall.js
new file mode 100644
index 00000000000..74433048e08
--- /dev/null
+++ b/press/press/doctype/server_firewall/server_firewall.js
@@ -0,0 +1,18 @@
+// Copyright (c) 2026, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Server Firewall', {
+ refresh(frm) {
+ [
+ ['↑', 'Setup', 'setup'],
+ ['↓', 'Teardown', 'teardown'],
+ ['🗘', 'Sync', 'sync'],
+ ].forEach(([icon, label, action]) => {
+ frm.add_custom_button(icon + ' ' + __(label), () => {
+ frm.call(action).then(() => {
+ frm.refresh();
+ });
+ });
+ });
+ },
+});
diff --git a/press/press/doctype/server_firewall/server_firewall.json b/press/press/doctype/server_firewall/server_firewall.json
new file mode 100644
index 00000000000..e81c5ef5f94
--- /dev/null
+++ b/press/press/doctype/server_firewall/server_firewall.json
@@ -0,0 +1,95 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "autoname": "field:server_id",
+ "creation": "2026-01-22 13:48:10.312595",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "server_id",
+ "rules_section",
+ "enabled",
+ "rules"
+ ],
+ "fields": [
+ {
+ "fieldname": "rules_section",
+ "fieldtype": "Section Break",
+ "label": "Rules"
+ },
+ {
+ "default": "0",
+ "fieldname": "enabled",
+ "fieldtype": "Check",
+ "label": "Enabled"
+ },
+ {
+ "fieldname": "rules",
+ "fieldtype": "Table",
+ "label": "Rules",
+ "options": "Server Firewall Rule"
+ },
+ {
+ "fieldname": "server_id",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Server",
+ "options": "Server",
+ "reqd": 1,
+ "unique": 1
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2026-01-22 13:53:22.726072",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Server Firewall",
+ "naming_rule": "By fieldname",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "rows_threshold_for_grid_search": 20,
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": [],
+ "track_changes": 1
+}
diff --git a/press/press/doctype/server_firewall/server_firewall.py b/press/press/doctype/server_firewall/server_firewall.py
new file mode 100644
index 00000000000..5fbc5daf3ef
--- /dev/null
+++ b/press/press/doctype/server_firewall/server_firewall.py
@@ -0,0 +1,199 @@
+# Copyright (c) 2026, Frappe and contributors
+# For license information, please see license.txt
+
+import ipaddress
+
+import frappe
+from frappe import _
+from frappe.model.document import Document
+
+from press.press.doctype.server.server import Server
+from press.runner import Ansible
+from press.utils import log_error
+
+
+class ServerFirewall(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.server_firewall_rule.server_firewall_rule import ServerFirewallRule
+
+ enabled: DF.Check
+ rules: DF.Table[ServerFirewallRule]
+ server_id: DF.Link
+ # end: auto-generated types
+
+ dashboard_fields = (
+ "enabled",
+ "rules",
+ )
+
+ def has_permission(self, permtype="read", *, debug=False, user=None) -> bool:
+ return self.server.has_permission(permtype, debug=debug, user=user)
+
+ def after_insert(self):
+ self.setup()
+
+ @frappe.whitelist()
+ def setup(self):
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_setup",
+ )
+
+ def _setup(self):
+ try:
+ Ansible(
+ playbook="firewall_setup.yml",
+ server=self.server,
+ user=self.server._ssh_user(),
+ port=self.server._ssh_port(),
+ ).run()
+ except Exception:
+ log_error("Failed to setup firewall", doc=self)
+
+ def on_trash(self):
+ self.teardown()
+
+ @frappe.whitelist()
+ def teardown(self):
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_teardown",
+ )
+
+ def _teardown(self):
+ try:
+ Ansible(
+ playbook="firewall_teardown.yml",
+ server=self.server,
+ user=self.server._ssh_user(),
+ port=self.server._ssh_port(),
+ ).run()
+ except Exception:
+ log_error("Failed to teardown firewall", doc=self)
+
+ def before_validate(self):
+ self.deduplicate_rules()
+
+ def deduplicate_rules(self):
+ """
+ Remove duplicate entries from rules. This will not save the doc.
+ """
+ rules_seen = set()
+ unique_rules = []
+ for rule in self.rules:
+ rule_tuple = (rule.source, rule.destination, rule.protocol, rule.action)
+ if rule_tuple not in rules_seen:
+ rules_seen.add(rule_tuple)
+ unique_rules.append(rule)
+ self.rules = unique_rules
+
+ def validate(self):
+ self.prevent_selfhosted()
+ self.validate_rules()
+
+ def prevent_selfhosted(self):
+ if self.server.is_self_hosted:
+ message = _("Firewall cannot be enabled for self-hosted servers.")
+ frappe.throw(message, frappe.ValidationError)
+
+ def validate_rules(self):
+ for rule in self.rules:
+ self.validate_ip(rule.source)
+ self.validate_ip(rule.destination)
+
+ def on_update(self):
+ self.sync()
+
+ @frappe.whitelist()
+ def sync(self):
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_sync",
+ queue="default",
+ enqueue_after_commit=True,
+ deduplicate=True,
+ job_id=f"sync_firewall_{self.name}",
+ )
+
+ def _sync(self):
+ try:
+ Ansible(
+ playbook="firewall_sync.yml",
+ server=self.server,
+ user=self.server._ssh_user(),
+ port=self.server._ssh_port(),
+ variables={
+ "enabled": bool(self.enabled),
+ "rules": list(self.get_rules()),
+ "rules_bypass": self.get_bypass_rules(),
+ },
+ ).run()
+ except Exception:
+ log_error("Failed to sync firewall rules", doc=self)
+
+ def validate_ip(self, ip: str):
+ """Checks if the provided string is a valid IPv4 or IPv6 address."""
+ if not ip:
+ return
+ try:
+ ipaddress.ip_network(ip, strict=False)
+ except ValueError:
+ message = _("{0} is not a valid IP address or CIDR.").format(ip)
+ frappe.throw(message, frappe.ValidationError)
+
+ def get_rules(self):
+ for rule in self.rules:
+ rule = {
+ "source": rule.source,
+ "destination": rule.destination,
+ "protocol": rule.protocol,
+ "action": self.transform_action(rule.action),
+ }
+ if not rule["source"]:
+ rule.pop("source")
+ if not rule["destination"]:
+ rule.pop("destination")
+ yield rule
+
+ def get_bypass_rules(self):
+ monitors = frappe.get_all("Monitor Server", pluck="ip")
+ rules = []
+ for monitor in monitors:
+ rules.append(
+ {
+ "source": monitor,
+ "protocol": "TCP",
+ "action": "ACCEPT",
+ }
+ )
+ rules.append(
+ {
+ "destination": monitor,
+ "protocol": "TCP",
+ "action": "ACCEPT",
+ }
+ )
+ return rules
+
+ def transform_action(self, action: str):
+ match action:
+ case "Allow":
+ return "ACCEPT"
+ case "Block":
+ return "DROP"
+ case _:
+ return "REJECT"
+
+ @property
+ def server(self) -> Server:
+ return frappe.get_doc("Server", self.server_id)
diff --git a/press/press/doctype/server_firewall/test_server_firewall.py b/press/press/doctype/server_firewall/test_server_firewall.py
new file mode 100644
index 00000000000..b6fce95af48
--- /dev/null
+++ b/press/press/doctype/server_firewall/test_server_firewall.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2026, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests import IntegrationTestCase
+
+# On IntegrationTestCase, the doctype test records and all
+# link-field test record dependencies are recursively loaded
+# Use these module variables to add/remove to/from that list
+EXTRA_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+IGNORE_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+
+
+class IntegrationTestServerFirewall(IntegrationTestCase):
+ """
+ Integration tests for ServerFirewall.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/server_firewall_rule/__init__.py b/press/press/doctype/server_firewall_rule/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/server_firewall_rule/server_firewall_rule.json b/press/press/doctype/server_firewall_rule/server_firewall_rule.json
new file mode 100644
index 00000000000..155df650eb6
--- /dev/null
+++ b/press/press/doctype/server_firewall_rule/server_firewall_rule.json
@@ -0,0 +1,61 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2026-01-20 16:03:37.600480",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "source",
+ "destination",
+ "protocol",
+ "action"
+ ],
+ "fields": [
+ {
+ "fieldname": "source",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Source"
+ },
+ {
+ "fieldname": "destination",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Destination"
+ },
+ {
+ "default": "Block",
+ "fieldname": "action",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Action",
+ "options": "Allow\nBlock",
+ "reqd": 1
+ },
+ {
+ "default": "TCP",
+ "fieldname": "protocol",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Protocol",
+ "options": "TCP\nUDP",
+ "reqd": 1
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2026-01-21 20:18:03.652434",
+ "modified_by": "hello@ssiyad.com",
+ "module": "Press",
+ "name": "Server Firewall Rule",
+ "owner": "Administrator",
+ "permissions": [],
+ "row_format": "Dynamic",
+ "rows_threshold_for_grid_search": 20,
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/server_firewall_rule/server_firewall_rule.py b/press/press/doctype/server_firewall_rule/server_firewall_rule.py
new file mode 100644
index 00000000000..122cb9449e2
--- /dev/null
+++ b/press/press/doctype/server_firewall_rule/server_firewall_rule.py
@@ -0,0 +1,26 @@
+# Copyright (c) 2026, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class ServerFirewallRule(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ action: DF.Literal["Allow", "Block"]
+ destination: DF.Data | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ protocol: DF.Literal["TCP", "UDP"]
+ source: DF.Data | None
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/server_mount/__init__.py b/press/press/doctype/server_mount/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/server_mount/server_mount.json b/press/press/doctype/server_mount/server_mount.json
new file mode 100644
index 00000000000..9a9bd2ce1e2
--- /dev/null
+++ b/press/press/doctype/server_mount/server_mount.json
@@ -0,0 +1,145 @@
+{
+ "actions": [],
+ "autoname": "autoincrement",
+ "creation": "2024-10-28 17:06:07.172615",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "mount_type",
+ "volume_id",
+ "filesystem",
+ "column_break_ygbk",
+ "status",
+ "source",
+ "column_break_uvrc",
+ "uuid",
+ "mount_point",
+ "mount_options",
+ "permissions_section",
+ "mount_point_owner",
+ "mount_point_group",
+ "column_break_kwsz",
+ "mount_point_mode"
+ ],
+ "fields": [
+ {
+ "columns": 2,
+ "fieldname": "volume_id",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Volume ID",
+ "mandatory_depends_on": "eval: doc.mount_type === \"Volume\"",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_ygbk",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "ext4",
+ "fieldname": "filesystem",
+ "fieldtype": "Select",
+ "label": "Filesystem",
+ "options": "ext4\nnone",
+ "reqd": 1
+ },
+ {
+ "fieldname": "mount_options",
+ "fieldtype": "Data",
+ "label": "Mount Options"
+ },
+ {
+ "columns": 1,
+ "default": "Volume",
+ "fieldname": "mount_type",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Mount Type",
+ "options": "Volume\nBind",
+ "reqd": 1
+ },
+ {
+ "columns": 3,
+ "fieldname": "mount_point",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Mount Point",
+ "reqd": 1
+ },
+ {
+ "columns": 3,
+ "fieldname": "source",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Source",
+ "mandatory_depends_on": "eval: doc.mount_type === \"Bind\"",
+ "read_only_depends_on": "eval: doc.mount_type === \"Volume\"",
+ "reqd": 1
+ },
+ {
+ "columns": 1,
+ "default": "Pending",
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Status",
+ "options": "Pending\nSuccess\nFailure",
+ "read_only": 1,
+ "reqd": 1
+ },
+ {
+ "fieldname": "uuid",
+ "fieldtype": "Data",
+ "label": "UUID",
+ "read_only": 1
+ },
+ {
+ "fieldname": "permissions_section",
+ "fieldtype": "Section Break",
+ "label": "Permissions"
+ },
+ {
+ "default": "root",
+ "fieldname": "mount_point_owner",
+ "fieldtype": "Data",
+ "label": "Mount Point Owner",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_kwsz",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "column_break_uvrc",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0755",
+ "fieldname": "mount_point_mode",
+ "fieldtype": "Data",
+ "label": "Mount Point Mode",
+ "reqd": 1
+ },
+ {
+ "default": "root",
+ "fieldname": "mount_point_group",
+ "fieldtype": "Data",
+ "label": "Mount Point Group",
+ "reqd": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2024-11-15 17:24:16.761964",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Server Mount",
+ "naming_rule": "Autoincrement",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/server_mount/server_mount.py b/press/press/doctype/server_mount/server_mount.py
new file mode 100644
index 00000000000..aa6c0f3434c
--- /dev/null
+++ b/press/press/doctype/server_mount/server_mount.py
@@ -0,0 +1,36 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from __future__ import annotations
+
+from frappe.model.document import Document
+
+
+class ServerMount(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ filesystem: DF.Literal["ext4", "none"]
+ mount_options: DF.Data | None
+ mount_point: DF.Data
+ mount_point_group: DF.Data
+ mount_point_mode: DF.Data
+ mount_point_owner: DF.Data
+ mount_type: DF.Literal["Volume", "Bind"]
+ name: DF.Int | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ source: DF.Data
+ status: DF.Literal["Pending", "Success", "Failure"]
+ uuid: DF.Data | None
+ volume_id: DF.Data | None
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/server_plan/__init__.py b/press/press/doctype/server_plan/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/server_plan/server_plan.js b/press/press/doctype/server_plan/server_plan.js
new file mode 100644
index 00000000000..faa92cb9fd0
--- /dev/null
+++ b/press/press/doctype/server_plan/server_plan.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Server Plan", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/server_plan/server_plan.json b/press/press/doctype/server_plan/server_plan.json
new file mode 100644
index 00000000000..f570c55f122
--- /dev/null
+++ b/press/press/doctype/server_plan/server_plan.json
@@ -0,0 +1,200 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "autoname": "prompt",
+ "creation": "2024-02-05 22:21:47.560972",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "title",
+ "plan_type",
+ "column_break_ppap",
+ "enabled",
+ "legacy_plan",
+ "allow_unified_server",
+ "machine_unavailable",
+ "ignore_machine_availability_sync",
+ "pricing_section",
+ "price_inr",
+ "column_break_sjmg",
+ "price_usd",
+ "section_break_nifk",
+ "premium",
+ "server_type",
+ "cluster",
+ "instance_type",
+ "platform",
+ "column_break_ypkt",
+ "vcpu",
+ "memory",
+ "disk",
+ "allowed_roles_section",
+ "roles"
+ ],
+ "fields": [
+ {
+ "default": "0",
+ "fieldname": "enabled",
+ "fieldtype": "Check",
+ "label": "Enabled"
+ },
+ {
+ "fieldname": "server_type",
+ "fieldtype": "Select",
+ "label": "Server Type",
+ "options": "Server\nDatabase Server\nProxy Server\nSelf Hosted Server"
+ },
+ {
+ "fieldname": "section_break_nifk",
+ "fieldtype": "Section Break",
+ "label": "Details"
+ },
+ {
+ "fieldname": "cluster",
+ "fieldtype": "Link",
+ "label": "Cluster",
+ "options": "Cluster"
+ },
+ {
+ "fieldname": "instance_type",
+ "fieldtype": "Data",
+ "label": "Instance Type"
+ },
+ {
+ "fieldname": "column_break_ypkt",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "vcpu",
+ "fieldtype": "Int",
+ "label": "vCPU"
+ },
+ {
+ "fieldname": "memory",
+ "fieldtype": "Int",
+ "label": "Memory"
+ },
+ {
+ "fieldname": "disk",
+ "fieldtype": "Int",
+ "label": "Disk"
+ },
+ {
+ "fieldname": "pricing_section",
+ "fieldtype": "Section Break",
+ "label": "Pricing"
+ },
+ {
+ "fieldname": "price_inr",
+ "fieldtype": "Currency",
+ "in_list_view": 1,
+ "label": "Price (INR)",
+ "options": "INR",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_sjmg",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "price_usd",
+ "fieldtype": "Currency",
+ "in_list_view": 1,
+ "label": "Price (USD)",
+ "options": "INR",
+ "reqd": 1
+ },
+ {
+ "fieldname": "title",
+ "fieldtype": "Data",
+ "label": "Title"
+ },
+ {
+ "fieldname": "allowed_roles_section",
+ "fieldtype": "Section Break",
+ "label": "Allowed Roles"
+ },
+ {
+ "fieldname": "roles",
+ "fieldtype": "Table",
+ "label": "Roles",
+ "options": "Has Role"
+ },
+ {
+ "default": "0",
+ "fieldname": "premium",
+ "fieldtype": "Check",
+ "label": "Premium"
+ },
+ {
+ "default": "x86_64",
+ "fieldname": "platform",
+ "fieldtype": "Select",
+ "label": "Platform",
+ "options": "x86_64\narm64\namd64",
+ "reqd": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "legacy_plan",
+ "fieldtype": "Check",
+ "label": "Legacy Plan"
+ },
+ {
+ "fieldname": "plan_type",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Plan Type",
+ "options": "Server Plan Type"
+ },
+ {
+ "fieldname": "column_break_ppap",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "allow_unified_server",
+ "fieldtype": "Check",
+ "label": "Allow Unified Server"
+ },
+ {
+ "default": "0",
+ "fieldname": "machine_unavailable",
+ "fieldtype": "Check",
+ "label": "Machine Unavailable"
+ },
+ {
+ "default": "0",
+ "fieldname": "ignore_machine_availability_sync",
+ "fieldtype": "Check",
+ "label": "Ignore Machine Availability Sync"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2026-01-12 13:37:13.763512",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Server Plan",
+ "naming_rule": "Set by user",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/server_plan/server_plan.py b/press/press/doctype/server_plan/server_plan.py
new file mode 100644
index 00000000000..8bada4c0b8f
--- /dev/null
+++ b/press/press/doctype/server_plan/server_plan.py
@@ -0,0 +1,105 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import frappe
+
+from press.press.doctype.site_plan.plan import Plan
+from press.utils.jobs import has_job_timeout_exceeded
+
+if TYPE_CHECKING:
+ from press.press.doctype.cluster.cluster import Cluster
+
+
+class ServerPlan(Plan):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.core.doctype.has_role.has_role import HasRole
+ from frappe.types import DF
+
+ allow_unified_server: DF.Check
+ cluster: DF.Link | None
+ disk: DF.Int
+ enabled: DF.Check
+ ignore_machine_availability_sync: DF.Check
+ instance_type: DF.Data | None
+ legacy_plan: DF.Check
+ machine_unavailable: DF.Check
+ memory: DF.Int
+ plan_type: DF.Link | None
+ platform: DF.Literal["x86_64", "arm64", "amd64"]
+ premium: DF.Check
+ price_inr: DF.Currency
+ price_usd: DF.Currency
+ roles: DF.Table[HasRole]
+ server_type: DF.Literal["Server", "Database Server", "Proxy Server", "Self Hosted Server"]
+ title: DF.Data | None
+ vcpu: DF.Int
+ # end: auto-generated types
+
+ dashboard_fields = (
+ "title",
+ "price_inr",
+ "price_usd",
+ "vcpu",
+ "memory",
+ "disk",
+ "platform",
+ "premium",
+ "plan_type",
+ "allow_unified_server",
+ "machine_unavailable",
+ )
+
+ def get_doc(self, doc):
+ doc["price_per_day_inr"] = self.get_price_per_day("INR")
+ doc["price_per_day_usd"] = self.get_price_per_day("USD")
+ return doc
+
+ def validate(self):
+ self.validate_active_subscriptions()
+
+ def validate_active_subscriptions(self):
+ old_doc = self.get_doc_before_save()
+ if old_doc and old_doc.enabled and not self.enabled and not self.legacy_plan:
+ active_sub_count = frappe.db.count("Subscription", {"enabled": 1, "plan": self.name})
+ if active_sub_count > 0:
+ frappe.throw(
+ f"Cannot disable this plan. This plan is used in {active_sub_count} active subscription(s)."
+ )
+
+
+def sync_machine_availability_status_of_plans():
+ plans = frappe.get_all(
+ "Server Plan",
+ filters={"ignore_machine_availability_sync": 0},
+ fields=["name", "cluster", "machine_unavailable", "instance_type"],
+ )
+ cluster_doc_map: dict[str, Cluster] = {}
+
+ for plan in plans:
+ if has_job_timeout_exceeded():
+ return
+
+ if not plan.instance_type:
+ continue
+
+ if plan.cluster not in cluster_doc_map:
+ cluster_doc_map[plan.cluster] = frappe.get_doc("Cluster", plan.cluster)
+
+ try:
+ is_unavailable = not cluster_doc_map[plan.cluster].check_machine_availability(plan.instance_type)
+ if is_unavailable != plan.machine_unavailable:
+ frappe.db.set_value("Server Plan", plan.name, "machine_unavailable", is_unavailable)
+ frappe.db.commit()
+ except Exception:
+ frappe.log_error(
+ f"Failed to sync machine availability status for Server Plan {plan.name}",
+ )
+ frappe.db.rollback()
diff --git a/press/press/doctype/server_plan/test_server_plan.py b/press/press/doctype/server_plan/test_server_plan.py
new file mode 100644
index 00000000000..53be7913940
--- /dev/null
+++ b/press/press/doctype/server_plan/test_server_plan.py
@@ -0,0 +1,33 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+import typing
+
+import frappe
+from frappe.model.naming import make_autoname
+from frappe.tests.utils import FrappeTestCase
+
+if typing.TYPE_CHECKING:
+ from press.press.doctype.server_plan.server_plan import ServerPlan
+
+
+def create_test_server_plan(server_type: str = "Server") -> "ServerPlan":
+ """Create test Server Plan doc."""
+ server_plan = frappe.get_doc(
+ {
+ "doctype": "Server Plan",
+ "name": make_autoname("SP-.####"),
+ "server_type": server_type,
+ "title": frappe.mock("name"),
+ "price_inr": 1000,
+ "price_usd": 200,
+ "enabled": 1,
+ "disk": 25,
+ }
+ ).insert()
+ server_plan.reload()
+ return server_plan
+
+
+class TestServerPlan(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/server_plan_type/__init__.py b/press/press/doctype/server_plan_type/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/server_plan_type/server_plan_type.js b/press/press/doctype/server_plan_type/server_plan_type.js
new file mode 100644
index 00000000000..187fb018acd
--- /dev/null
+++ b/press/press/doctype/server_plan_type/server_plan_type.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2026, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Server Plan Type", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/server_plan_type/server_plan_type.json b/press/press/doctype/server_plan_type/server_plan_type.json
new file mode 100644
index 00000000000..ea92d8c40de
--- /dev/null
+++ b/press/press/doctype/server_plan_type/server_plan_type.json
@@ -0,0 +1,84 @@
+{
+ "actions": [],
+ "autoname": "hash",
+ "creation": "2026-01-07 17:56:38.950000",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "title",
+ "description",
+ "order_in_list"
+ ],
+ "fields": [
+ {
+ "fieldname": "title",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Title",
+ "reqd": 1
+ },
+ {
+ "fieldname": "description",
+ "fieldtype": "Small Text",
+ "label": "Description",
+ "reqd": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "order_in_list",
+ "fieldtype": "Int",
+ "label": "Order In List",
+ "non_negative": 1,
+ "reqd": 1
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2026-01-07 19:36:43.478248",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Server Plan Type",
+ "naming_rule": "Random",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "select": 1,
+ "share": 1
+ },
+ {
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "select": 1,
+ "share": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "show_title_field_in_link": 1,
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": [],
+ "title_field": "title"
+}
diff --git a/press/press/doctype/server_plan_type/server_plan_type.py b/press/press/doctype/server_plan_type/server_plan_type.py
new file mode 100644
index 00000000000..93410418d6b
--- /dev/null
+++ b/press/press/doctype/server_plan_type/server_plan_type.py
@@ -0,0 +1,33 @@
+# Copyright (c) 2026, Frappe and contributors
+# For license information, please see license.txt
+
+import frappe
+from frappe.model.document import Document
+from frappe.utils.caching import redis_cache
+
+
+class ServerPlanType(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ description: DF.SmallText
+ order_in_list: DF.Int
+ title: DF.Data
+ # end: auto-generated types
+
+ pass
+
+
+@redis_cache(ttl=60)
+def get_server_plan_types() -> dict[str, dict]:
+ data = frappe.get_all(
+ "Server Plan Type",
+ fields=["name", "title", "description", "order_in_list"],
+ order_by="order_in_list desc",
+ )
+ return {d.name: d for d in data}
diff --git a/press/press/doctype/server_plan_type/test_server_plan_type.py b/press/press/doctype/server_plan_type/test_server_plan_type.py
new file mode 100644
index 00000000000..9fa6fc73633
--- /dev/null
+++ b/press/press/doctype/server_plan_type/test_server_plan_type.py
@@ -0,0 +1,21 @@
+from __future__ import annotations
+
+# Copyright (c) 2026, Frappe and Contributors
+# See license.txt
+# import frappe
+from frappe.tests import IntegrationTestCase
+
+# On IntegrationTestCase, the doctype test records and all
+# link-field test record dependencies are recursively loaded
+# Use these module variables to add/remove to/from that list
+EXTRA_TEST_RECORD_DEPENDENCIES: list[str] = [] # eg. ["User"]
+IGNORE_TEST_RECORD_DEPENDENCIES: list[str] = [] # eg. ["User"]
+
+
+class IntegrationTestServerPlanType(IntegrationTestCase):
+ """
+ Integration tests for ServerPlanType.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/server_snapshot/__init__.py b/press/press/doctype/server_snapshot/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/server_snapshot/server_snapshot.js b/press/press/doctype/server_snapshot/server_snapshot.js
new file mode 100644
index 00000000000..f1bafcd4407
--- /dev/null
+++ b/press/press/doctype/server_snapshot/server_snapshot.js
@@ -0,0 +1,136 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Server Snapshot', {
+ refresh(frm) {
+ [
+ [__('Sync'), 'sync', false, true],
+ [
+ __('Delete'),
+ 'delete_snapshots',
+ true,
+ ['Pending', 'Completed'].includes(frm.doc.status),
+ ],
+ [__('Unlock'), 'unlock', true, frm.doc.locked],
+ [__('Lock'), 'lock', true, !frm.doc.locked],
+ [
+ __('Create Replica DB'),
+ 'create_replica_db_server',
+ true,
+ frm.doc.status === 'Completed',
+ ],
+ [
+ __('Recover All Sites'),
+ 'recover_sites',
+ true,
+ frm.doc.status === 'Completed',
+ ],
+ ].forEach(([label, method, confirm, condition]) => {
+ if (typeof condition === 'undefined' || condition) {
+ frm.add_custom_button(
+ label,
+ () => {
+ if (confirm) {
+ frappe.confirm(
+ `Are you sure you want to ${label.toLowerCase()}?`,
+ () =>
+ frm.call(method).then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ } else {
+ frm.refresh();
+ }
+ }),
+ );
+ } else {
+ frm.call(method).then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ } else {
+ frm.refresh();
+ }
+ });
+ }
+ },
+ __('Actions'),
+ );
+ }
+ });
+
+ if (frm.doc.status === 'Completed') {
+ frm.add_custom_button(
+ 'Create Server',
+ () => {
+ frappe.prompt(
+ [
+ {
+ fieldtype: 'Link',
+ label: 'Team',
+ fieldname: 'team',
+ options: 'Team',
+ reqd: 1,
+ default: frm.doc.team,
+ },
+ {
+ fieldtype: 'Select',
+ label: 'Server Type',
+ fieldname: 'server_type',
+ options: ['Server', 'Database Server'],
+ reqd: 1,
+ default: 'Server',
+ },
+ { fieldtype: 'Data', label: 'Server Name', fieldname: 'title' },
+ {
+ fieldtype: 'Link',
+ label: 'Server Plan',
+ fieldname: 'plan',
+ options: 'Server Plan',
+ reqd: 0,
+ get_query: function () {
+ return {
+ filters: {
+ cluster: frm.doc.cluster,
+ enabled: 1,
+ premium: 0,
+ },
+ };
+ },
+ },
+ {
+ fieldtype: 'Check',
+ label: 'Create Subscription',
+ fieldname: 'create_subscription',
+ },
+ {
+ fieldtype: 'Check',
+ label: 'Is Temporary Server ?',
+ fieldname: 'temporary_server',
+ default: 1,
+ },
+ ],
+ ({
+ team,
+ server_type,
+ title,
+ plan,
+ create_subscription,
+ temporary_server,
+ }) => {
+ frm
+ .call('create_server', {
+ team,
+ server_type,
+ title,
+ plan,
+ create_subscription,
+ temporary_server,
+ })
+ .then((r) => frm.refresh());
+ },
+ );
+ },
+ __('Actions'),
+ );
+ }
+ },
+});
diff --git a/press/press/doctype/server_snapshot/server_snapshot.json b/press/press/doctype/server_snapshot/server_snapshot.json
new file mode 100644
index 00000000000..8bf92e43e23
--- /dev/null
+++ b/press/press/doctype/server_snapshot/server_snapshot.json
@@ -0,0 +1,367 @@
+{
+ "actions": [],
+ "autoname": "format:snap-{#######}",
+ "creation": "2025-07-30 16:05:51.738947",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "status",
+ "column_break_gcvn",
+ "team",
+ "column_break_unjj",
+ "provider",
+ "column_break_hgps",
+ "cluster",
+ "column_break_mwve",
+ "total_size_gb",
+ "column_break_jszb",
+ "expire_at",
+ "section_break_yuac",
+ "consistent",
+ "column_break_gviv",
+ "locked",
+ "column_break_pswb",
+ "free",
+ "section_break_zgjz",
+ "app_server",
+ "app_server_snapshot",
+ "app_server_snapshot_press_job",
+ "app_server_resume_service_press_job",
+ "app_server_services_started",
+ "column_break_dcyz",
+ "database_server",
+ "database_server_snapshot",
+ "database_server_snapshot_press_job",
+ "database_server_resume_service_press_job",
+ "database_server_services_started",
+ "section_break_ibcr",
+ "app_server_vcpu",
+ "column_break_hvxs",
+ "app_server_ram",
+ "column_break_xeyh",
+ "database_server_vcpu",
+ "column_break_nyhu",
+ "database_server_ram",
+ "section_break_ihux",
+ "site_list",
+ "traceback"
+ ],
+ "fields": [
+ {
+ "fieldname": "app_server",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "App Server",
+ "options": "Server",
+ "reqd": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "app_server_snapshot",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "App Server Snapshot",
+ "options": "Virtual Disk Snapshot",
+ "read_only": 1,
+ "search_index": 1,
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "column_break_dcyz",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fetch_from": "app_server.database_server",
+ "fetch_if_empty": 1,
+ "fieldname": "database_server",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "DB Server",
+ "options": "Database Server",
+ "reqd": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "database_server_snapshot",
+ "fieldtype": "Link",
+ "label": "DB Server Snapshot",
+ "options": "Virtual Disk Snapshot",
+ "read_only": 1,
+ "search_index": 1,
+ "set_only_once": 1
+ },
+ {
+ "default": "Pending",
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Status",
+ "options": "Pending\nProcessing\nFailure\nCompleted\nUnavailable",
+ "read_only": 1,
+ "reqd": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "column_break_gcvn",
+ "fieldtype": "Column Break",
+ "search_index": 1
+ },
+ {
+ "fetch_from": "app_server.team",
+ "fetch_if_empty": 1,
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "in_standard_filter": 1,
+ "label": "Team",
+ "options": "Team",
+ "read_only": 1,
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "section_break_zgjz",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "column_break_unjj",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "AWS EC2",
+ "fetch_from": "app_server.provider",
+ "fetch_if_empty": 1,
+ "fieldname": "provider",
+ "fieldtype": "Select",
+ "label": "Provider",
+ "options": "AWS EC2\nOCI",
+ "read_only": 1,
+ "reqd": 1,
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "section_break_yuac",
+ "fieldtype": "Section Break"
+ },
+ {
+ "default": "0",
+ "description": "The services on the server will be stopped during snapshotting",
+ "fieldname": "consistent",
+ "fieldtype": "Check",
+ "in_standard_filter": 1,
+ "label": "Consistent",
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "column_break_gviv",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "description": "Locked snapshots can't be deleted by any user",
+ "fieldname": "locked",
+ "fieldtype": "Check",
+ "label": "Locked",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_hgps",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fetch_from": "app_server.cluster",
+ "fetch_if_empty": 1,
+ "fieldname": "cluster",
+ "fieldtype": "Link",
+ "label": "Cluster",
+ "options": "Cluster",
+ "read_only": 1,
+ "reqd": 1,
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "app_server_resume_service_press_job",
+ "fieldtype": "Link",
+ "label": "App Server Resume Service Press Job",
+ "options": "Press Job",
+ "read_only": 1
+ },
+ {
+ "fieldname": "app_server_snapshot_press_job",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "App Server Snapshot Press Job",
+ "options": "Press Job",
+ "read_only": 1
+ },
+ {
+ "fieldname": "database_server_snapshot_press_job",
+ "fieldtype": "Link",
+ "label": "DB Server Snapshot Press Job",
+ "options": "Press Job",
+ "read_only": 1
+ },
+ {
+ "fieldname": "database_server_resume_service_press_job",
+ "fieldtype": "Link",
+ "label": "DB Server Resume Service Press Job",
+ "options": "Press Job",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "app_server_services_started",
+ "fieldtype": "Check",
+ "label": "App Server Services Started",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "database_server_services_started",
+ "fieldtype": "Check",
+ "label": "DB Server Services Started",
+ "read_only": 1
+ },
+ {
+ "fieldname": "section_break_ihux",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "site_list",
+ "fieldtype": "JSON",
+ "label": "Site List",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_mwve",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "total_size_gb",
+ "fieldtype": "Int",
+ "label": "Total Size (GB)",
+ "non_negative": 1,
+ "read_only": 1
+ },
+ {
+ "depends_on": "eval: doc.status == \"Failure\"",
+ "fieldname": "traceback",
+ "fieldtype": "Text",
+ "label": "Traceback"
+ },
+ {
+ "fieldname": "section_break_ibcr",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "app_server_vcpu",
+ "fieldtype": "Int",
+ "label": "App Server vCPU",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_hvxs",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "app_server_ram",
+ "fieldtype": "Int",
+ "label": "App Server RAM",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_xeyh",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "database_server_vcpu",
+ "fieldtype": "Int",
+ "label": "DB Server vCPU",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_nyhu",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "database_server_ram",
+ "fieldtype": "Int",
+ "label": "DB Server RAM",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_pswb",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "description": "If free, user will not be charged for the snapshot",
+ "fieldname": "free",
+ "fieldtype": "Check",
+ "label": "Free"
+ },
+ {
+ "fieldname": "column_break_jszb",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "expire_at",
+ "fieldtype": "Datetime",
+ "label": "Expire At"
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "links": [
+ {
+ "link_doctype": "Server Snapshot Recovery",
+ "link_fieldname": "snapshot"
+ }
+ ],
+ "modified": "2025-09-17 10:45:57.691000",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Server Snapshot",
+ "naming_rule": "Expression",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/server_snapshot/server_snapshot.py b/press/press/doctype/server_snapshot/server_snapshot.py
new file mode 100644
index 00000000000..8cd91f4c44f
--- /dev/null
+++ b/press/press/doctype/server_snapshot/server_snapshot.py
@@ -0,0 +1,719 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+import contextlib
+import json
+import typing
+from typing import TYPE_CHECKING, Literal
+
+import frappe
+from frappe.model.document import Document
+
+from press.api.client import dashboard_whitelist
+from press.overrides import get_permission_query_conditions_for_doctype
+from press.utils.jobs import has_job_timeout_exceeded
+
+if TYPE_CHECKING:
+ from press.press.doctype.cluster.cluster import Cluster
+ from press.press.doctype.site_backup.site_backup import VirtualMachine
+
+
+class ServerSnapshot(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ app_server: DF.Link
+ app_server_ram: DF.Int
+ app_server_resume_service_press_job: DF.Link | None
+ app_server_services_started: DF.Check
+ app_server_snapshot: DF.Link | None
+ app_server_snapshot_press_job: DF.Link | None
+ app_server_vcpu: DF.Int
+ cluster: DF.Link
+ consistent: DF.Check
+ database_server: DF.Link
+ database_server_ram: DF.Int
+ database_server_resume_service_press_job: DF.Link | None
+ database_server_services_started: DF.Check
+ database_server_snapshot: DF.Link | None
+ database_server_snapshot_press_job: DF.Link | None
+ database_server_vcpu: DF.Int
+ expire_at: DF.Datetime | None
+ free: DF.Check
+ locked: DF.Check
+ provider: DF.Literal["AWS EC2", "OCI"]
+ site_list: DF.JSON | None
+ status: DF.Literal["Pending", "Processing", "Failure", "Completed", "Unavailable"]
+ team: DF.Link | None
+ total_size_gb: DF.Int
+ traceback: DF.Text | None
+ # end: auto-generated types
+
+ dashboard_fields = (
+ "status",
+ "app_server",
+ "database_server",
+ "cluster",
+ "consistent",
+ "locked",
+ "free",
+ "total_size_gb",
+ "app_server_snapshot_press_job",
+ "database_server_snapshot_press_job",
+ "app_server_resume_service_press_job",
+ "database_server_resume_service_press_job",
+ "creation",
+ "expire_at",
+ )
+
+ @staticmethod
+ def get_list_query(query, filters=None, **list_args):
+ Snapshot = frappe.qb.DocType("Server Snapshot")
+ status = filters.get("status")
+ if status:
+ query = query.where(Snapshot.status == status)
+ else:
+ query = query.where(Snapshot.status != "Unavailable")
+
+ if filters.get("backup_date"):
+ with contextlib.suppress(Exception):
+ date = frappe.utils.getdate(filters["backup_date"])
+ query = query.where(
+ Snapshot.creation.between(
+ frappe.utils.add_to_date(date, hours=0, minutes=0, seconds=0),
+ frappe.utils.add_to_date(date, hours=23, minutes=59, seconds=59),
+ )
+ )
+
+ return query.run(as_dict=1)
+
+ def get_doc(self, doc: "ServerSnapshot"):
+ app_server_snapshot = {}
+ database_server_snapshot = {}
+
+ if self.status in ["Processing", "Completed"]:
+ if self.app_server_snapshot:
+ app_server_snapshot = frappe.get_value(
+ "Virtual Disk Snapshot",
+ self.app_server_snapshot,
+ ["size", "progress", "status", "start_time"],
+ as_dict=True,
+ )
+ print(app_server_snapshot)
+ if self.database_server_snapshot:
+ database_server_snapshot = frappe.get_value(
+ "Virtual Disk Snapshot",
+ self.database_server_snapshot,
+ ["size", "progress", "status", "start_time"],
+ as_dict=True,
+ )
+
+ doc.app_server_hostname = (
+ frappe.get_value("Server", self.app_server, "hostname") if self.app_server else ""
+ )
+ doc.app_server_title = frappe.get_value("Server", self.app_server, "title") if self.app_server else ""
+ doc.app_server_snapshot_size = app_server_snapshot.get("size", 0)
+ doc.app_server_snapshot_progress = int(app_server_snapshot.get("progress", "0%").strip("%"))
+ doc.app_server_snapshot_status = app_server_snapshot.get("status", "")
+ doc.app_server_snapshot_start_time = app_server_snapshot.get("start_time", None)
+
+ doc.database_server_title = (
+ frappe.get_value("Database Server", self.database_server, "title") if self.database_server else ""
+ )
+ doc.database_server_hostname = (
+ frappe.get_value("Database Server", self.database_server, "hostname")
+ if self.database_server
+ else ""
+ )
+ doc.database_server_snapshot_size = database_server_snapshot.get("size", 0)
+ doc.database_server_snapshot_progress = int(database_server_snapshot.get("progress", "0%").strip("%"))
+ doc.database_server_snapshot_status = database_server_snapshot.get("status", "")
+ doc.database_server_snapshot_start_time = database_server_snapshot.get("start_time", None)
+
+ doc.progress = int(((doc.app_server_snapshot_progress) + (doc.database_server_snapshot_progress)) / 2)
+
+ doc.site_list_json = self.sites
+
+ return doc
+
+ @property
+ def snapshots(self):
+ snapshots = []
+ if self.app_server_snapshot:
+ snapshots.append(self.app_server_snapshot)
+ if self.database_server_snapshot:
+ snapshots.append(self.database_server_snapshot)
+ return snapshots
+
+ @property
+ def sites(self) -> list[str]:
+ if not self.site_list:
+ return []
+ try:
+ return json.loads(self.site_list)
+ except: # noqa: E722
+ return []
+
+ @property
+ def arguments_for_press_job(self):
+ return {
+ "server_snapshot": self.name,
+ "is_consistent_snapshot": self.consistent,
+ }
+
+ @property
+ def subscription(self) -> str | None:
+ return frappe.db.get_value(
+ "Subscription", {"document_type": "Server Snapshot", "document_name": self.name}
+ )
+
+ def validate(self):
+ if self.provider != "AWS EC2":
+ frappe.throw("Only AWS Provider is supported for now")
+
+ def before_insert(self):
+ # Ensure both the server and database server isn't archived
+ allowed_statuses = ["Pending", "Running", "Stopped"]
+ app_server_vm = frappe.db.get_value("Server", self.app_server, "virtual_machine")
+ db_server_vm = frappe.db.get_value("Database Server", self.database_server, "virtual_machine")
+ if frappe.db.get_value("Virtual Machine", app_server_vm, "status") not in allowed_statuses:
+ frappe.throw(
+ "App Server should be in a valid state [Pending, Running, Stopped] to create a snapshot"
+ )
+ if frappe.db.get_value("Virtual Machine", db_server_vm, "status") not in allowed_statuses:
+ frappe.throw(
+ "Database Server should be in a valid state [Pending, Running, Stopped] to create a snapshot"
+ )
+
+ sites = (
+ frappe.get_all(
+ "Site",
+ filters={"server": self.app_server, "status": ("!=", "Archived")},
+ pluck="name",
+ )
+ or []
+ )
+ self.site_list = json.dumps(sites, indent=2, sort_keys=True)
+
+ # Ensure no other snapshot is in Pending state
+ if frappe.db.exists(
+ "Server Snapshot",
+ {
+ "status": "Pending",
+ "app_server": self.app_server,
+ "database_server": self.database_server,
+ },
+ ):
+ frappe.throw(
+ f"A snapshot for App Server {self.app_server} and Database Server {self.database_server} is already in Pending state."
+ )
+
+ # Set vCPU and RAM configuration
+ vm: VirtualMachine = frappe.get_doc("Virtual Machine", app_server_vm)
+ self.app_server_vcpu = vm.vcpu
+ self.app_server_ram = vm.ram
+
+ vm: VirtualMachine = frappe.get_doc("Virtual Machine", db_server_vm)
+ self.database_server_vcpu = vm.vcpu
+ self.database_server_ram = vm.ram
+
+ def after_insert(self):
+ try:
+ self.create_press_jobs()
+ except Exception:
+ import traceback
+
+ self.traceback = traceback.format_exc()
+ self.status = "Failure"
+ self.save(ignore_version=True)
+
+ # Clear created press jobs (if any)
+ if (
+ hasattr(self, "flags")
+ and hasattr(self.flags, "created_press_jobs")
+ and isinstance(self.flags.created_press_jobs, list)
+ ):
+ for job in self.flags.created_press_jobs:
+ with contextlib.suppress(Exception):
+ frappe.get_doc("Press Job", job).delete(ignore_permissions=True)
+
+ def on_update(self):
+ if self.has_value_changed("status"):
+ if self.status == "Completed":
+ self._create_subscription()
+ # Continue execution of Logical Replication Backup if it exists
+ if frappe.db.exists("Logical Replication Backup", {"server_snapshot": self.name}):
+ frappe.get_doc("Logical Replication Backup", {"server_snapshot": self.name}).next()
+ elif self.status in ["Failure", "Unavailable"]:
+ self._disable_subscription()
+
+ def create_press_jobs(self):
+ self.flags.created_press_jobs = []
+ self.app_server_snapshot_press_job = (
+ frappe.get_doc(
+ {
+ "doctype": "Press Job",
+ "job_type": "Snapshot Disk",
+ "server_type": "Server",
+ "server": self.app_server,
+ "virtual_machine": frappe.db.get_value("Server", self.app_server, "virtual_machine"),
+ "arguments": json.dumps(self.arguments_for_press_job, indent=2, sort_keys=True),
+ }
+ )
+ .insert(ignore_permissions=True)
+ .name
+ )
+ self.flags.created_press_jobs.append(self.app_server_snapshot_press_job)
+ self.database_server_snapshot_press_job = (
+ frappe.get_doc(
+ {
+ "doctype": "Press Job",
+ "job_type": "Snapshot Disk",
+ "server_type": "Database Server",
+ "server": self.database_server,
+ "virtual_machine": frappe.db.get_value(
+ "Database Server", self.database_server, "virtual_machine"
+ ),
+ "arguments": json.dumps(self.arguments_for_press_job, indent=2, sort_keys=True),
+ }
+ )
+ .insert(ignore_permissions=True)
+ .name
+ )
+ self.flags.created_press_jobs.append(self.database_server_snapshot_press_job)
+ self.save(ignore_version=True)
+
+ def resume_app_server_services(self):
+ if self.app_server_services_started:
+ return
+ self.resume_services("Server")
+
+ def resume_database_server_services(self):
+ if self.database_server_services_started:
+ return
+ self.resume_services("Database Server")
+
+ def resume_services(self, server_type: Literal["Server", "Database Server"]):
+ if (server_type == "Server" and self.app_server_services_started) or (
+ server_type == "Database Server" and self.database_server_services_started
+ ):
+ return
+
+ if not self.consistent:
+ frappe.db.set_value(
+ self.doctype,
+ self.name,
+ "app_server_services_started"
+ if server_type == "Server"
+ else "database_server_services_started",
+ True,
+ update_modified=True,
+ )
+ return
+
+ frappe.db.get_value(self.doctype, self.name, "status", for_update=True)
+
+ press_job = frappe.get_doc(
+ {
+ "doctype": "Press Job",
+ "job_type": "Resume Services After Snapshot",
+ "server_type": server_type,
+ "server": self.app_server if server_type == "Server" else self.database_server,
+ "virtual_machine": frappe.db.get_value(
+ "Server" if server_type == "Server" else "Database Server",
+ self.app_server if server_type == "Server" else self.database_server,
+ "virtual_machine",
+ ),
+ "arguments": json.dumps(self.arguments_for_press_job, indent=2, sort_keys=True),
+ }
+ ).insert(ignore_permissions=True)
+
+ frappe.db.set_value(
+ "Server Snapshot",
+ self.name,
+ "app_server_resume_service_press_job"
+ if server_type == "Server"
+ else "database_server_resume_service_press_job",
+ press_job.name,
+ update_modified=False,
+ )
+
+ @frappe.whitelist()
+ def sync(self, now: bool | None = None, trigger_snapshot_sync: bool | None = None):
+ if now is None:
+ now = False
+
+ if trigger_snapshot_sync is None:
+ trigger_snapshot_sync = True
+
+ frappe.enqueue_doc(
+ "Server Snapshot",
+ self.name,
+ "_sync",
+ enqueue_after_commit=True,
+ now=now,
+ trigger_snapshot_sync=trigger_snapshot_sync,
+ )
+
+ def _sync(self, trigger_snapshot_sync): # noqa: C901
+ if self.status not in ["Processing", "Completed"]:
+ # If snapshot is already marked as failure or unavailable, no need to sync
+ return
+
+ updated_status = self.status
+ total_size = 0
+ if len(self.snapshots) == 2:
+ completed = True
+ for s in self.snapshots:
+ snapshot_info = frappe.get_value("Virtual Disk Snapshot", s, ["status", "size"], as_dict=True)
+ if snapshot_info["status"] == "Unavailable":
+ updated_status = "Unavailable"
+ break
+ if snapshot_info["status"] != "Completed":
+ if trigger_snapshot_sync:
+ # If snapshot is not completed, enqueue the sync
+ frappe.enqueue_doc(
+ "Virtual Disk Snapshot",
+ s,
+ "sync",
+ enqueue_after_commit=True,
+ )
+ completed = False
+ break
+
+ total_size += snapshot_info["size"]
+
+ if completed:
+ updated_status = "Completed"
+
+ if self.status != updated_status or self.total_size_gb != total_size:
+ self.status = updated_status
+ self.total_size_gb = total_size
+ self.save(ignore_version=True)
+
+ @dashboard_whitelist()
+ def delete_snapshots(self):
+ if self.status in ["Unavailable", "Failure"]:
+ # If snapshot is already marked as failure or unavailable, no need to delete
+ return
+
+ if self.locked:
+ frappe.throw("Snapshot is locked. Unlock the snapshot before deleting.")
+
+ for s in self.snapshots:
+ frappe.enqueue_doc(
+ "Virtual Disk Snapshot",
+ s,
+ "delete_snapshot",
+ enqueue_after_commit=True,
+ ignore_validation=True,
+ )
+
+ self.status = "Unavailable"
+ self.save()
+
+ @dashboard_whitelist()
+ def lock(self, now: bool | None = False):
+ if self.locked:
+ return
+
+ if self.free:
+ frappe.throw("Non-chargeable snapshots cannot be locked")
+
+ if now is None:
+ now = False
+
+ frappe.enqueue_doc("Server Snapshot", self.name, "_lock", enqueue_after_commit=True, now=now)
+
+ @dashboard_whitelist()
+ def unlock(self, now: bool | None = False):
+ if not self.locked:
+ return
+
+ if now is None:
+ now = False
+
+ frappe.enqueue_doc(
+ "Server Snapshot",
+ self.name,
+ "_unlock",
+ enqueue_after_commit=True,
+ now=now,
+ )
+
+ def _lock(self):
+ for s in self.snapshots:
+ frappe.get_doc("Virtual Disk Snapshot", s).lock()
+ self.locked = True
+ self.save(ignore_version=True)
+
+ def _unlock(self):
+ for s in self.snapshots:
+ frappe.get_doc("Virtual Disk Snapshot", s).unlock()
+ self.locked = False
+ self.save(ignore_version=True)
+
+ @dashboard_whitelist()
+ def recover_sites(self, sites: list[str] | None = None):
+ if not sites:
+ sites = []
+
+ if not frappe.db.get_single_value("Press Settings", "enable_server_snapshot_recovery"):
+ frappe.throw("Server Snapshot Recovery is currently disabled. Please try again later.")
+
+ recover_record = frappe.get_doc(
+ {
+ "doctype": "Server Snapshot Recovery",
+ "snapshot": self.name,
+ }
+ )
+ for s in sites:
+ recover_record.append("sites", {"site": s})
+ recover_record.insert(ignore_permissions=True)
+ frappe.msgprint(
+ "Snapshot Recovery started successfully\n"
+ f"View Recovery Record ."
+ )
+ return recover_record.name
+
+ @frappe.whitelist()
+ def create_server( # noqa: C901
+ self,
+ server_type: Literal["Server", "Database Server"],
+ title: str | None = None,
+ plan: str | None = None,
+ team: str | None = None,
+ create_subscription: bool | None = False,
+ database_server: str | None = None,
+ temporary_server: bool | None = False,
+ is_for_recovery: bool = False,
+ provision_db_replica: bool = False,
+ master_db_server: str | None = None,
+ press_job_arguments: dict[str, typing.Any] | None = None,
+ ) -> str:
+ if server_type not in ["Server", "Database Server"]:
+ frappe.throw("Invalid server type. Must be 'Server' or 'Database Server'.")
+
+ if create_subscription is None:
+ create_subscription = False
+
+ if temporary_server is None:
+ temporary_server = False
+
+ if provision_db_replica is None:
+ provision_db_replica = False
+
+ if press_job_arguments is None:
+ press_job_arguments = {}
+
+ if server_type != "Database Server" and provision_db_replica:
+ frappe.throw("Provisioning a database replica is only applicable for Database Servers.")
+
+ if provision_db_replica and not master_db_server:
+ frappe.throw("Master Database Server is required for provisioning a database replica.")
+
+ if temporary_server and provision_db_replica:
+ frappe.throw("Temporary server cannot be used for provisioning a database replica.")
+
+ cluster: Cluster = frappe.get_doc("Cluster", self.cluster)
+ if not plan:
+ plan = cluster.find_server_plan_with_compute_config(
+ server_type=server_type,
+ vcpu=self.app_server_vcpu if server_type == "Server" else self.database_server_vcpu,
+ memory=self.app_server_ram if server_type == "Server" else self.database_server_ram,
+ )
+
+ cluster.proxy_server = frappe.get_all(
+ "Proxy Server",
+ {"status": "Active", "cluster": cluster.name, "is_primary": True},
+ pluck="name",
+ limit=1,
+ )[0]
+
+ if database_server:
+ cluster.database_server = database_server
+
+ server, _ = cluster.create_server(
+ doctype=server_type,
+ title=title or self.name,
+ team=team,
+ data_disk_snapshot=self.app_server_snapshot
+ if server_type == "Server"
+ else self.database_server_snapshot,
+ plan=frappe.get_doc("Server Plan", plan) if isinstance(plan, str) else plan,
+ create_subscription=create_subscription,
+ temporary_server=temporary_server,
+ is_for_recovery=is_for_recovery,
+ setup_db_replication=provision_db_replica,
+ master_db_server=master_db_server if provision_db_replica else None,
+ press_job_arguments=press_job_arguments,
+ )
+ server_name = ""
+ if server:
+ server_name = server.name
+
+ frappe.msgprint(
+ f"Server {server_name} created successfully from snapshot\n.{server_name} ."
+ f" Please check the server for further actions."
+ )
+
+ return server_name
+
+ @frappe.whitelist()
+ def create_replica_db_server(self) -> str:
+ """
+ !!! CAUTION !!!
+ This function is meant to be called from desk.
+ And you should know what you are doing.
+
+ For any action for creating app/db server from snapshot,
+ use `create_server` method instead.
+ """
+ if not self.database_server:
+ frappe.throw("Snapshot does not have a database server.")
+
+ if self.status != "Completed":
+ frappe.throw("Please wait for the snapshot to be completed.")
+
+ database_server = frappe.get_doc("Database Server", self.database_server)
+ if database_server.status != "Active":
+ frappe.throw("Master Database Server must be active to create a replica.")
+
+ return self.create_server(
+ server_type="Database Server",
+ title=f"{database_server.title} - Replica",
+ plan=database_server.plan,
+ team=database_server.team,
+ create_subscription=True,
+ provision_db_replica=True,
+ master_db_server=self.database_server,
+ )
+
+ def _create_subscription(self):
+ """
+ Create a subscription for the server snapshot.
+ This method can be called after the server snapshot is completed.
+ """
+ if self.free:
+ return
+
+ plan = frappe.get_value("Server Snapshot Plan", {"provider": self.provider, "enabled": 1}, "name")
+ if not plan:
+ frappe.throw(f"No active Server Snapshot Plan found for provider {self.provider}.")
+
+ if frappe.db.exists("Subscription", {"document_type": "Server Snapshot", "document_name": self.name}):
+ return
+
+ frappe.get_doc(
+ {
+ "doctype": "Subscription",
+ "enabled": 1,
+ "team": self.team,
+ "document_type": "Server Snapshot",
+ "document_name": self.name,
+ "plan_type": "Server Snapshot Plan",
+ "plan": plan,
+ "interval": "Daily",
+ }
+ ).insert(ignore_permissions=True)
+
+ def _disable_subscription(self):
+ """
+ Delete the subscription for the server snapshot.
+ This method can be called when the server snapshot is archived or deleted.
+ """
+ frappe.db.set_value(
+ "Subscription",
+ self.subscription,
+ "enabled",
+ 0,
+ update_modified=True,
+ )
+
+
+get_permission_query_conditions = get_permission_query_conditions_for_doctype("Server Snapshot")
+
+
+def move_pending_snapshots_to_processing():
+ """
+ Move all pending snapshots to processing state.
+ This is used to ensure that snapshots are processed in the correct order.
+ """
+ pending_snapshots = frappe.get_all(
+ "Server Snapshot",
+ filters={
+ "status": "Pending",
+ "app_server_snapshot": ("is", "set"),
+ "database_server_snapshot": ("is", "set"),
+ "app_server_services_started": 1,
+ "database_server_services_started": 1,
+ },
+ pluck="name",
+ )
+
+ for snapshot in pending_snapshots:
+ with contextlib.suppress(Exception):
+ current_status = frappe.db.get_value("Server Snapshot", snapshot, "status")
+ if current_status == "Pending":
+ # If the snapshot is still pending, update its status to Processing
+ frappe.db.set_value("Server Snapshot", snapshot, "status", "Processing", update_modified=True)
+ frappe.db.commit()
+
+
+def expire_snapshots():
+ Snapshot = frappe.qb.DocType("Server Snapshot")
+ records = (
+ frappe.qb.from_(Snapshot)
+ .select(Snapshot.name)
+ .where(
+ (Snapshot.status == "Completed")
+ & (Snapshot.expire_at.isnotnull())
+ & (Snapshot.expire_at <= frappe.utils.now_datetime())
+ & (Snapshot.locked == 0)
+ & (Snapshot.free == 1)
+ )
+ .limit(50)
+ .run(as_dict=True)
+ )
+
+ for record in records:
+ try:
+ snapshot = frappe.get_doc("Server Snapshot", record.get("name"))
+ if not snapshot.expire_at:
+ continue
+ snapshot.delete_snapshots()
+ frappe.db.commit()
+ except Exception:
+ frappe.log_error("Server Snapshot Expire Error")
+
+
+def sync_ongoing_server_snapshots():
+ # Usually when Virtual Disk Snapshot's status updated
+ # It should trigger the Server Snapshot sync as well.
+ # But in case if it missed for any reason, this function will ensure
+ # that the Server Snapshot's status is in sync with its Virtual Disk Snapshots
+
+ Snapshot = frappe.qb.DocType("Server Snapshot")
+ records = (
+ frappe.qb.from_(Snapshot)
+ .select(Snapshot.name)
+ .where(
+ (Snapshot.status == "Processing")
+ & (Snapshot.modified < frappe.utils.add_to_date(frappe.utils.now_datetime(), minutes=-10))
+ )
+ .limit(50)
+ .run(as_dict=True)
+ )
+
+ for record in records:
+ if has_job_timeout_exceeded():
+ break
+ with contextlib.suppress(Exception):
+ snapshot: ServerSnapshot = frappe.get_doc("Server Snapshot", record.get("name"), for_update=True)
+ snapshot.sync(now=True, trigger_snapshot_sync=False)
+ frappe.db.commit()
diff --git a/press/press/doctype/server_snapshot/test_server_snapshot.py b/press/press/doctype/server_snapshot/test_server_snapshot.py
new file mode 100644
index 00000000000..ee04aa4cceb
--- /dev/null
+++ b/press/press/doctype/server_snapshot/test_server_snapshot.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests import IntegrationTestCase, UnitTestCase
+
+# On IntegrationTestCase, the doctype test records and all
+# link-field test record dependencies are recursively loaded
+# Use these module variables to add/remove to/from that list
+EXTRA_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+IGNORE_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+
+
+class UnitTestServerSnapshot(UnitTestCase):
+ """
+ Unit tests for ServerSnapshot.
+ Use this class for testing individual functions and methods.
+ """
+
+ pass
+
+
+class IntegrationTestServerSnapshot(IntegrationTestCase):
+ """
+ Integration tests for ServerSnapshot.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/server_snapshot_plan/__init__.py b/press/press/doctype/server_snapshot_plan/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/server_snapshot_plan/server_snapshot_plan.js b/press/press/doctype/server_snapshot_plan/server_snapshot_plan.js
new file mode 100644
index 00000000000..fea7ea20e6b
--- /dev/null
+++ b/press/press/doctype/server_snapshot_plan/server_snapshot_plan.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Server Snapshot Plan", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/server_snapshot_plan/server_snapshot_plan.json b/press/press/doctype/server_snapshot_plan/server_snapshot_plan.json
new file mode 100644
index 00000000000..686c2a1d5eb
--- /dev/null
+++ b/press/press/doctype/server_snapshot_plan/server_snapshot_plan.json
@@ -0,0 +1,91 @@
+{
+ "actions": [],
+ "autoname": "prompt",
+ "creation": "2025-08-02 03:08:42.573683",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "enabled",
+ "title",
+ "column_break_nyns",
+ "provider",
+ "pricing_section",
+ "price_inr",
+ "column_break_vild",
+ "price_usd"
+ ],
+ "fields": [
+ {
+ "default": "0",
+ "fieldname": "enabled",
+ "fieldtype": "Check",
+ "label": "Enabled"
+ },
+ {
+ "fieldname": "title",
+ "fieldtype": "Data",
+ "label": "Title"
+ },
+ {
+ "fieldname": "pricing_section",
+ "fieldtype": "Section Break",
+ "label": "Pricing"
+ },
+ {
+ "fieldname": "price_inr",
+ "fieldtype": "Currency",
+ "in_list_view": 1,
+ "label": "Price per GB (INR)",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_vild",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "price_usd",
+ "fieldtype": "Currency",
+ "in_list_view": 1,
+ "label": "Price per GB (USD)"
+ },
+ {
+ "fieldname": "column_break_nyns",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "provider",
+ "fieldtype": "Select",
+ "label": "Provider",
+ "options": "AWS EC2",
+ "reqd": 1
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-08-02 03:22:33.282950",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Server Snapshot Plan",
+ "naming_rule": "Set by user",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": [],
+ "title_field": "title"
+}
diff --git a/press/press/doctype/server_snapshot_plan/server_snapshot_plan.py b/press/press/doctype/server_snapshot_plan/server_snapshot_plan.py
new file mode 100644
index 00000000000..56340942c65
--- /dev/null
+++ b/press/press/doctype/server_snapshot_plan/server_snapshot_plan.py
@@ -0,0 +1,23 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+from press.press.doctype.site_plan.plan import Plan
+
+
+class ServerSnapshotPlan(Plan):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ enabled: DF.Check
+ price_inr: DF.Currency
+ price_usd: DF.Currency
+ provider: DF.Literal["AWS EC2"]
+ title: DF.Data | None
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/server_snapshot_plan/test_server_snapshot_plan.py b/press/press/doctype/server_snapshot_plan/test_server_snapshot_plan.py
new file mode 100644
index 00000000000..bb6bc7f35fc
--- /dev/null
+++ b/press/press/doctype/server_snapshot_plan/test_server_snapshot_plan.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests import IntegrationTestCase, UnitTestCase
+
+# On IntegrationTestCase, the doctype test records and all
+# link-field test record dependencies are recursively loaded
+# Use these module variables to add/remove to/from that list
+EXTRA_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+IGNORE_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+
+
+class UnitTestServerSnapshotPlan(UnitTestCase):
+ """
+ Unit tests for ServerSnapshotPlan.
+ Use this class for testing individual functions and methods.
+ """
+
+ pass
+
+
+class IntegrationTestServerSnapshotPlan(IntegrationTestCase):
+ """
+ Integration tests for ServerSnapshotPlan.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/server_snapshot_recovery/__init__.py b/press/press/doctype/server_snapshot_recovery/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/server_snapshot_recovery/server_snapshot_recovery.js b/press/press/doctype/server_snapshot_recovery/server_snapshot_recovery.js
new file mode 100644
index 00000000000..15a946c8e1a
--- /dev/null
+++ b/press/press/doctype/server_snapshot_recovery/server_snapshot_recovery.js
@@ -0,0 +1,53 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Server Snapshot Recovery', {
+ refresh(frm) {
+ [
+ [
+ 'Provision Servers',
+ 'provision_servers',
+ true,
+ frm.doc.status === 'Draft',
+ ],
+ [
+ 'Archive Servers',
+ 'archive_servers',
+ true,
+ (frm.doc.app_server || frm.doc.database_server) &&
+ (!frm.doc.app_server_archived || !frm.doc.database_server_archived),
+ ],
+ ['Expire Backups', 'expire_backups', true, frm.doc.status === 'Restored'],
+ ].forEach(([label, method, confirm, condition]) => {
+ if (typeof condition === 'undefined' || condition) {
+ frm.add_custom_button(
+ label,
+ () => {
+ if (confirm) {
+ frappe.confirm(
+ `Are you sure you want to ${label.toLowerCase()}?`,
+ () =>
+ frm.call(method).then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ } else {
+ frm.refresh();
+ }
+ }),
+ );
+ } else {
+ frm.call(method).then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ } else {
+ frm.refresh();
+ }
+ });
+ }
+ },
+ __('Actions'),
+ );
+ }
+ });
+ },
+});
diff --git a/press/press/doctype/server_snapshot_recovery/server_snapshot_recovery.json b/press/press/doctype/server_snapshot_recovery/server_snapshot_recovery.json
new file mode 100644
index 00000000000..5b18755636c
--- /dev/null
+++ b/press/press/doctype/server_snapshot_recovery/server_snapshot_recovery.json
@@ -0,0 +1,219 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-08-04 10:34:26.272832",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "status",
+ "column_break_msoo",
+ "snapshot",
+ "column_break_cvnn",
+ "team",
+ "column_break_mqrc",
+ "cluster",
+ "section_break_nhfa",
+ "warm_up_end_time",
+ "column_break_eawo",
+ "restored_on",
+ "column_break_rdjy",
+ "expire_backup_on",
+ "section_break_ipfw",
+ "app_server",
+ "is_app_server_ready",
+ "app_server_archived",
+ "column_break_qsfq",
+ "database_server",
+ "is_database_server_ready",
+ "database_server_archived",
+ "section_break_ilwd",
+ "sites"
+ ],
+ "fields": [
+ {
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Status",
+ "options": "Draft\nCreating Servers\nGathering Site Data\nWarming Up\nRestoring\nRestored\nFailure\nUnavailable"
+ },
+ {
+ "fieldname": "column_break_msoo",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "snapshot",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Snapshot",
+ "options": "Server Snapshot",
+ "reqd": 1,
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "section_break_ilwd",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "sites",
+ "fieldtype": "Table",
+ "label": "Sites",
+ "options": "Server Snapshot Site Recovery"
+ },
+ {
+ "fieldname": "section_break_ipfw",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "app_server",
+ "fieldtype": "Link",
+ "label": "App Server",
+ "options": "Server",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "column_break_qsfq",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "database_server",
+ "fieldtype": "Link",
+ "label": "Database Server",
+ "options": "Database Server",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "is_app_server_ready",
+ "fieldtype": "Check",
+ "label": "App Server Ready",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "is_database_server_ready",
+ "fieldtype": "Check",
+ "label": "Database Server Ready",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "app_server_archived",
+ "fieldtype": "Check",
+ "label": "App Server Archived",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "database_server_archived",
+ "fieldtype": "Check",
+ "label": "Database Server Archived",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_mqrc",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fetch_from": "snapshot.cluster",
+ "fieldname": "cluster",
+ "fieldtype": "Link",
+ "label": "Cluster",
+ "options": "Cluster",
+ "reqd": 1,
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "warm_up_end_time",
+ "fieldtype": "Datetime",
+ "label": "Warm Up End Time",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_cvnn",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fetch_from": "snapshot.team",
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "label": "Team",
+ "options": "Team",
+ "reqd": 1
+ },
+ {
+ "fieldname": "section_break_nhfa",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "column_break_eawo",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "restored_on",
+ "fieldtype": "Datetime",
+ "label": "Restored On",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_rdjy",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "expire_backup_on",
+ "fieldtype": "Datetime",
+ "label": "Expire Backup On",
+ "read_only": 1
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-08-07 10:50:25.752688",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Server Snapshot Recovery",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/server_snapshot_recovery/server_snapshot_recovery.py b/press/press/doctype/server_snapshot_recovery/server_snapshot_recovery.py
new file mode 100644
index 00000000000..e10873ed20f
--- /dev/null
+++ b/press/press/doctype/server_snapshot_recovery/server_snapshot_recovery.py
@@ -0,0 +1,531 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+import json
+from typing import TYPE_CHECKING
+
+import frappe
+from frappe.model.document import Document
+from frappe.utils import add_to_date
+
+from press.agent import Agent
+from press.api.client import dashboard_whitelist
+from press.overrides import get_permission_query_conditions_for_doctype
+from press.press.doctype.agent_job.agent_job import AgentJob
+from press.press.doctype.communication_info.communication_info import get_communication_info
+from press.press.doctype.remote_file.remote_file import delete_remote_backup_objects
+from press.press.doctype.site_backup.site_backup import get_backup_bucket
+
+if TYPE_CHECKING:
+ from press.press.doctype.server_snapshot.server_snapshot import ServerSnapshot
+ from press.press.doctype.server_snapshot_site_recovery.server_snapshot_site_recovery import (
+ ServerSnapshotSiteRecovery,
+ )
+
+
+class ServerSnapshotRecovery(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.server_snapshot_site_recovery.server_snapshot_site_recovery import (
+ ServerSnapshotSiteRecovery,
+ )
+
+ app_server: DF.Link | None
+ app_server_archived: DF.Check
+ cluster: DF.Link
+ database_server: DF.Link | None
+ database_server_archived: DF.Check
+ expire_backup_on: DF.Datetime | None
+ is_app_server_ready: DF.Check
+ is_database_server_ready: DF.Check
+ restored_on: DF.Datetime | None
+ sites: DF.Table[ServerSnapshotSiteRecovery]
+ snapshot: DF.Link
+ status: DF.Literal[
+ "Draft",
+ "Creating Servers",
+ "Gathering Site Data",
+ "Warming Up",
+ "Restoring",
+ "Restored",
+ "Failure",
+ "Unavailable",
+ ]
+ team: DF.Link
+ warm_up_end_time: DF.Datetime | None
+ # end: auto-generated types
+
+ dashboard_fields = (
+ "status",
+ "snapshot",
+ )
+
+ @property
+ def server_agent(self) -> Agent:
+ return frappe.get_doc("Server", self.app_server).agent
+
+ def get_doc(self, doc):
+ sites_data = []
+ for s in self.sites:
+ sites_data.append(
+ {
+ "site": s.site,
+ "status": s.status,
+ "database_backup_available": self._is_remote_file_available(s.database_remote_file),
+ "public_files_backup_available": self._is_remote_file_available(s.public_remote_file),
+ "private_files_backup_available": self._is_remote_file_available(s.private_remote_file),
+ "encryption_key_available": bool(s.encryption_key),
+ }
+ )
+ doc.sites_data = sites_data
+ return doc
+
+ def _is_remote_file_available(self, remote_file: str | None) -> bool:
+ if not remote_file:
+ return False
+ return frappe.get_value("Remote File", remote_file, "status") == "Available"
+
+ def before_insert(self):
+ self.validate_snapshot_status()
+ self.fill_site_list()
+
+ def after_insert(self):
+ self.provision_servers()
+
+ def validate_snapshot_status(self):
+ snapshot: ServerSnapshot = frappe.get_doc(
+ "Server Snapshot",
+ self.snapshot,
+ )
+ if snapshot.status != "Completed":
+ frappe.throw(f"Cannot recover from snapshot {snapshot.name} with status {snapshot.status}")
+
+ def fill_site_list(self):
+ sites_json = json.loads(
+ frappe.get_value(
+ "Server Snapshot",
+ self.snapshot,
+ "site_list",
+ )
+ )
+ if not self.sites:
+ self.sites = []
+
+ for site in sites_json:
+ self.append(
+ "sites",
+ {
+ "site": site,
+ "status": "Draft",
+ },
+ )
+ else:
+ for site in self.sites:
+ if site.site not in sites_json:
+ frappe.throw(f"Site {site.site} not available in snapshot {self.snapshot}")
+
+ if len(self.sites) == 0:
+ frappe.throw("Please choose at least one site to recover.")
+
+ def on_update(self):
+ if (
+ self.status == "Creating Servers"
+ and (
+ self.has_value_changed("is_app_server_ready")
+ or self.has_value_changed("is_database_server_ready")
+ )
+ and self.is_app_server_ready
+ and self.is_database_server_ready
+ ):
+ app_server_snaphot = frappe.get_value("Server Snapshot", self.snapshot, "app_server_snapshot")
+ database_server_snaphot = frappe.get_value(
+ "Server Snapshot", self.snapshot, "database_server_snapshot"
+ )
+ snapshot_warmup_minutes = int(
+ (
+ max(
+ frappe.get_value("Virtual Disk Snapshot", app_server_snaphot, "size"),
+ frappe.get_value("Virtual Disk Snapshot", database_server_snaphot, "size"),
+ )
+ * 1024
+ )
+ / 300
+ / 60
+ ) # Assuming 300 MB/s warmup speed
+ self.warm_up_end_time = add_to_date(minutes=snapshot_warmup_minutes)
+ self.status = "Warming Up"
+ self.save()
+
+ if self.has_value_changed("status") and self.status == "Restored":
+ self.send_restoration_completion_email()
+ self.archive_servers()
+ self.restored_on = frappe.utils.now_datetime()
+ self.expire_backup_on = add_to_date(None, days=2)
+ self.save()
+
+ if (
+ (
+ self.has_value_changed("app_server_archived")
+ or self.has_value_changed("database_server_archived")
+ )
+ and self.app_server_archived
+ and self.database_server_archived
+ and self.status != "Restored"
+ ):
+ self.status = "Failure"
+ self.save()
+
+ @frappe.whitelist()
+ def provision_servers(self):
+ self.validate_snapshot_status()
+ self.status = "Creating Servers"
+ snapshot: ServerSnapshot = frappe.get_doc(
+ "Server Snapshot",
+ self.snapshot,
+ )
+
+ self.database_server = snapshot.create_server(
+ server_type="Database Server", temporary_server=True, is_for_recovery=True
+ )
+ self.app_server = snapshot.create_server(
+ server_type="Server",
+ temporary_server=True,
+ database_server=self.database_server,
+ is_for_recovery=True,
+ )
+ self.save()
+
+ @frappe.whitelist()
+ def archive_servers(self):
+ if not self.app_server or not self.database_server:
+ frappe.throw("Servers are not provisioned yet.")
+
+ app_server_doc = frappe.get_doc("Server", self.app_server)
+ if app_server_doc.status != "Archived":
+ app_server_doc.archive()
+
+ database_server_doc = frappe.get_doc("Database Server", self.database_server)
+ if database_server_doc.status != "Archived":
+ database_server_doc.archive()
+
+ def send_restoration_completion_email(self):
+ frappe.sendmail(
+ subject="Snapshot Recovery Completed",
+ recipients=get_communication_info("Email", "Server Activity", "Server", self.app_server),
+ template="snapshot_recovery_completion",
+ args={"snapshot": self.snapshot},
+ )
+
+ def mark_server_provisioning_as_failed(self):
+ self.status = "Failure"
+ self.save()
+
+ def mark_process_as_failed(self):
+ self.status = "Failure"
+ for site in self.sites:
+ site.status = "Failure"
+ self.save()
+
+ def fetch_sites_data(self):
+ self.status = "Gathering Site Data"
+ self.save()
+ sites = [i.site for i in self.sites]
+ self.server_agent.search_sites_in_snapshot(
+ sites, reference_doctype=self.doctype, reference_name=self.name
+ )
+
+ def backup_sites(self):
+ self.status = "Restoring"
+ for site in self.sites:
+ if site.status == "Pending":
+ site.status = "Running"
+ site.file_backup_job = self.server_agent.backup_site_files_from_snapshot(
+ self.cluster,
+ site.site,
+ site.bench,
+ reference_doctype=self.doctype,
+ reference_name=self.name,
+ ).name
+ site.database_backup_job = self.server_agent.backup_site_database_from_snapshot(
+ self.cluster,
+ site.site,
+ site.database_name,
+ self.database_server,
+ reference_doctype=self.doctype,
+ reference_name=self.name,
+ ).name
+
+ self.save()
+
+ def _check_site_recovery_status(self, save=False):
+ pending_restoration = False
+ for site in self.sites:
+ if (
+ site.status != "Failure"
+ and site.public_remote_file
+ and site.private_remote_file
+ and site.database_remote_file
+ ):
+ site.status = "Success"
+ if site.status in ["Draft", "Pending", "Running"]:
+ pending_restoration = True
+
+ if not pending_restoration:
+ self.status = "Restored"
+
+ if save:
+ self.save()
+
+ def _process_backup_files_from_snapshot_job_callback(self, job: AgentJob): # noqa: C901
+ if job.status not in ["Success", "Failure"]:
+ return
+ site = json.loads(job.request_data or "{}").get("site")
+ if not site:
+ return
+
+ site_record = None
+ for s in self.sites:
+ if s.site == site:
+ site_record = s
+ break
+
+ if not site_record:
+ frappe.throw(f"Site {site} not found in recovery sites.")
+
+ if job.status == "Failure":
+ site_record.status = "Failure"
+ else:
+ data = json.loads(job.data or "{}")
+ if not data:
+ site_record.status = "Failure"
+ return
+ for file, file_data in data.get("backup_files", {}).items():
+ remote_file = self._create_remote_file(
+ file_name=file_data.get("file"),
+ file_path=data.get("offsite_files").get(file),
+ file_size=file_data.get("size"),
+ )
+ if file.endswith("private_files.tar.gz"):
+ site_record.private_remote_file = remote_file.name
+ if file.endswith("public_files.tar.gz"):
+ site_record.public_remote_file = remote_file.name
+
+ self._check_site_recovery_status(save=True)
+
+ def _process_backup_database_from_snapshot_job_callback(self, job: AgentJob):
+ if job.status not in ["Success", "Failure"]:
+ return
+ site = json.loads(job.request_data or "{}").get("site")
+ if not site:
+ return
+
+ site_record = None
+ for s in self.sites:
+ if s.site == site:
+ site_record = s
+ break
+ if not site_record:
+ frappe.throw(f"Site {site} not found in recovery sites.")
+
+ if job.status == "Failure":
+ site_record.status = "Failure"
+ else:
+ data = json.loads(job.data or "{}")
+ if not data:
+ site_record.status = "Failure"
+ return
+ remote_file = self._create_remote_file(
+ file_name=data.get("backup_file"),
+ file_path=data.get("offsite_files").get(data.get("backup_file")),
+ file_size=data.get("backup_file_size"),
+ )
+ site_record.database_remote_file = remote_file.name
+
+ self._check_site_recovery_status(save=True)
+
+ def _create_remote_file(self, file_name: str, file_path: str, file_size: int):
+ bucket = get_backup_bucket(self.cluster)
+ remote_file = frappe.get_doc(
+ {
+ "doctype": "Remote File",
+ "file_name": file_name,
+ "file_path": file_path,
+ "file_size": file_size,
+ "file_type": "application/x-gzip" if file_name.endswith(".gz") else "application/x-tar",
+ "bucket": bucket,
+ }
+ )
+ remote_file.save()
+ return remote_file
+
+ @dashboard_whitelist()
+ def download_backup(self, site: str, file_type: str): # noqa: C901
+ """
+ Download the backup file for the given site and file type.
+ """
+ if file_type not in ["public", "private", "database", "encryption_key"]:
+ frappe.throw(
+ f"Invalid file type: {file_type}. Must be one of 'public', 'private', 'database', or 'encryption_key'."
+ )
+
+ site_record: ServerSnapshotSiteRecovery = None
+ for record in self.sites:
+ if record.site == site:
+ site_record = record
+ break
+
+ if not site_record:
+ frappe.throw(f"Site {site} not found in recovery sites.")
+
+ if (
+ (file_type == "public" and not site_record.public_remote_file)
+ or (file_type == "private" and not site_record.private_remote_file)
+ or (file_type == "database" and not site_record.database_remote_file)
+ or (file_type == "encryption_key" and not site_record.encryption_key)
+ ):
+ frappe.throw(f"{file_type.capitalize()} backup not available for site {site}.")
+
+ try:
+ remote_file_name = ""
+ if file_type == "public":
+ remote_file_name = site_record.public_remote_file
+ elif file_type == "private":
+ remote_file_name = site_record.private_remote_file
+ elif file_type == "database":
+ remote_file_name = site_record.database_remote_file
+ elif file_type == "encryption_key":
+ return site_record.get_password("encryption_key")
+
+ return frappe.get_doc("Remote File", remote_file_name).download_link
+ except Exception:
+ frappe.throw(f"Error downloading {file_type} backup for site {site}. Please try again later.")
+
+ @frappe.whitelist()
+ def expire_backups(self):
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_expire_backups",
+ enqueue_after_commit=True,
+ )
+
+ def _expire_backups(self):
+ if self.status != "Restored":
+ return
+
+ self.status = "Unavailable"
+ self.save()
+
+ remote_files = []
+ for site in self.sites:
+ if site.public_remote_file:
+ remote_files.append(site.public_remote_file)
+ if site.private_remote_file:
+ remote_files.append(site.private_remote_file)
+ if site.database_remote_file:
+ remote_files.append(site.database_remote_file)
+
+ delete_remote_backup_objects(remote_files)
+
+
+get_permission_query_conditions = get_permission_query_conditions_for_doctype("Server Snapshot Recovery")
+
+
+def resume_warmed_up_restorations():
+ records = frappe.get_all(
+ "Server Snapshot Recovery",
+ filters={
+ "status": "Warming Up",
+ "warm_up_end_time": ("<=", frappe.utils.now_datetime()),
+ },
+ fields=["name"],
+ )
+
+ for record in records:
+ try:
+ snapshot_recovery = frappe.get_doc("Server Snapshot Recovery", record.name)
+ snapshot_recovery.fetch_sites_data()
+ frappe.db.commit()
+ except Exception:
+ frappe.log_error("Server Snapshot Recovery Resume Error")
+
+
+def process_search_sites_in_snapshot_job_callback(job: AgentJob):
+ if job.status not in ["Success", "Failure"]:
+ return
+
+ if job.reference_doctype != "Server Snapshot Recovery" or not job.reference_name:
+ return
+
+ record: ServerSnapshotRecovery = frappe.get_doc("Server Snapshot Recovery", job.reference_name)
+
+ if job.status == "Failure":
+ record.mark_process_as_failed()
+ return
+
+ if job.status == "Success":
+ data = json.loads(job.data or "{}")
+
+ for site in record.sites:
+ if site.site not in data:
+ site.status = "Unavailable"
+ else:
+ site.status = "Pending"
+ site.bench = data[site.site].get("bench", "")
+ site.database_name = data[site.site].get("db_name", "")
+ site.encryption_key = data[site.site].get("encryption_key", "")
+
+ record.save()
+ record.backup_sites()
+
+
+def process_backup_files_from_snapshot_job_callback(job: AgentJob):
+ if job.status not in ["Success", "Failure"]:
+ return
+
+ if job.reference_doctype != "Server Snapshot Recovery" or not job.reference_name:
+ return
+
+ record: ServerSnapshotRecovery = frappe.get_doc(
+ "Server Snapshot Recovery", job.reference_name, for_update=True
+ )
+ record._process_backup_files_from_snapshot_job_callback(job)
+
+
+def process_backup_database_from_snapshot_job_callback(job: AgentJob):
+ if job.status not in ["Success", "Failure"]:
+ return
+
+ if job.reference_doctype != "Server Snapshot Recovery" or not job.reference_name:
+ return
+
+ record: ServerSnapshotRecovery = frappe.get_doc(
+ "Server Snapshot Recovery", job.reference_name, for_update=True
+ )
+ record._process_backup_database_from_snapshot_job_callback(job)
+
+
+def expire_backups():
+ records = frappe.get_all(
+ "Server Snapshot Recovery",
+ filters={
+ "status": "Restored",
+ "expire_backup_on": ("<=", frappe.utils.now_datetime()),
+ },
+ fields=["name"],
+ limit_page_length=50,
+ )
+
+ for record in records:
+ try:
+ snapshot_recovery = frappe.get_doc("Server Snapshot Recovery", record.name)
+ snapshot_recovery.expire_backups()
+ frappe.db.commit()
+ except Exception:
+ frappe.log_error("Server Snapshot Recovery Expire Error")
diff --git a/press/press/doctype/server_snapshot_recovery/test_server_snapshot_recovery.py b/press/press/doctype/server_snapshot_recovery/test_server_snapshot_recovery.py
new file mode 100644
index 00000000000..28b7bc5906f
--- /dev/null
+++ b/press/press/doctype/server_snapshot_recovery/test_server_snapshot_recovery.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests import IntegrationTestCase, UnitTestCase
+
+# On IntegrationTestCase, the doctype test records and all
+# link-field test record dependencies are recursively loaded
+# Use these module variables to add/remove to/from that list
+EXTRA_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+IGNORE_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+
+
+class UnitTestServerSnapshotRecovery(UnitTestCase):
+ """
+ Unit tests for ServerSnapshotRecovery.
+ Use this class for testing individual functions and methods.
+ """
+
+ pass
+
+
+class IntegrationTestServerSnapshotRecovery(IntegrationTestCase):
+ """
+ Integration tests for ServerSnapshotRecovery.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/server_snapshot_site_recovery/__init__.py b/press/press/doctype/server_snapshot_site_recovery/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/server_snapshot_site_recovery/server_snapshot_site_recovery.json b/press/press/doctype/server_snapshot_site_recovery/server_snapshot_site_recovery.json
new file mode 100644
index 00000000000..b5428c7c8c2
--- /dev/null
+++ b/press/press/doctype/server_snapshot_site_recovery/server_snapshot_site_recovery.json
@@ -0,0 +1,139 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-08-04 10:43:26.520507",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "site",
+ "database_name",
+ "bench",
+ "column_break_zcab",
+ "status",
+ "encryption_key",
+ "section_break_hjbs",
+ "public_remote_file",
+ "column_break_molt",
+ "private_remote_file",
+ "column_break_whas",
+ "database_remote_file",
+ "section_break_earz",
+ "file_backup_job",
+ "column_break_ccnx",
+ "database_backup_job"
+ ],
+ "fields": [
+ {
+ "fieldname": "site",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Site",
+ "reqd": 1,
+ "set_only_once": 1
+ },
+ {
+ "default": "Draft",
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Status",
+ "options": "Draft\nPending\nRunning\nSuccess\nFailure\nUnavailable",
+ "read_only": 1
+ },
+ {
+ "fieldname": "database_name",
+ "fieldtype": "Data",
+ "label": "Database Name",
+ "read_only": 1
+ },
+ {
+ "fieldname": "file_backup_job",
+ "fieldtype": "Link",
+ "label": "File Backup Job",
+ "options": "Agent Job",
+ "read_only": 1
+ },
+ {
+ "fieldname": "database_backup_job",
+ "fieldtype": "Link",
+ "label": "Database Backup Job",
+ "options": "Agent Job",
+ "read_only": 1
+ },
+ {
+ "fieldname": "public_remote_file",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Public Remote File",
+ "options": "Remote File",
+ "read_only": 1
+ },
+ {
+ "fieldname": "private_remote_file",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Private Remote File",
+ "options": "Remote File",
+ "read_only": 1
+ },
+ {
+ "fieldname": "database_remote_file",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Database Remote File",
+ "options": "Remote File",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_zcab",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "section_break_hjbs",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "column_break_molt",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "column_break_whas",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "section_break_earz",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "column_break_ccnx",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "bench",
+ "fieldtype": "Data",
+ "label": "Bench",
+ "read_only": 1
+ },
+ {
+ "fieldname": "encryption_key",
+ "fieldtype": "Password",
+ "label": "Encryption Key",
+ "read_only": 1
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-08-07 00:01:51.962501",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Server Snapshot Site Recovery",
+ "owner": "Administrator",
+ "permissions": [],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/server_snapshot_site_recovery/server_snapshot_site_recovery.py b/press/press/doctype/server_snapshot_site_recovery/server_snapshot_site_recovery.py
new file mode 100644
index 00000000000..17a4f289f4d
--- /dev/null
+++ b/press/press/doctype/server_snapshot_site_recovery/server_snapshot_site_recovery.py
@@ -0,0 +1,30 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class ServerSnapshotSiteRecovery(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ bench: DF.Data | None
+ database_backup_job: DF.Link | None
+ database_name: DF.Data | None
+ database_remote_file: DF.Link | None
+ encryption_key: DF.Password | None
+ file_backup_job: DF.Link | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ private_remote_file: DF.Link | None
+ public_remote_file: DF.Link | None
+ site: DF.Data
+ status: DF.Literal["Draft", "Pending", "Running", "Success", "Failure", "Unavailable"]
+ # end: auto-generated types
diff --git a/press/press/doctype/server_storage_plan/__init__.py b/press/press/doctype/server_storage_plan/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/server_storage_plan/patches/add_subscription_for_servers_with_additional_disk.py b/press/press/doctype/server_storage_plan/patches/add_subscription_for_servers_with_additional_disk.py
new file mode 100644
index 00000000000..6bcedaa71d4
--- /dev/null
+++ b/press/press/doctype/server_storage_plan/patches/add_subscription_for_servers_with_additional_disk.py
@@ -0,0 +1,47 @@
+# Copyright (c) 2024, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+
+
+import frappe
+from tqdm import tqdm
+
+
+def execute():
+ Server = frappe.qb.DocType("Server")
+ DatabaseServer = frappe.qb.DocType("Database Server")
+ VirtualMachine = frappe.qb.DocType("Virtual Machine")
+ ServerPlan = frappe.qb.DocType("Server Plan")
+
+ servers = (
+ frappe.qb.from_(Server)
+ .select(Server.name, Server.team, ServerPlan.disk, VirtualMachine.disk_size)
+ .join(VirtualMachine)
+ .on(Server.virtual_machine == VirtualMachine.name)
+ .join(ServerPlan)
+ .on(Server.plan == ServerPlan.name)
+ .where(ServerPlan.disk < VirtualMachine.disk_size)
+ .where(Server.public == 0)
+ .run(as_dict=True)
+ )
+
+ database_servers = (
+ frappe.qb.from_(DatabaseServer)
+ .select(
+ DatabaseServer.name, DatabaseServer.team, ServerPlan.disk, VirtualMachine.disk_size
+ )
+ .join(VirtualMachine)
+ .on(DatabaseServer.virtual_machine == VirtualMachine.name)
+ .join(ServerPlan)
+ .on(DatabaseServer.plan == ServerPlan.name)
+ .where(ServerPlan.disk < VirtualMachine.disk_size)
+ .where(DatabaseServer.public == 0)
+ .run(as_dict=True)
+ )
+
+ for server in tqdm(servers):
+ frappe.get_doc("Server", server.name).create_subscription_for_storage()
+
+ for database_server in tqdm(database_servers):
+ frappe.get_doc(
+ "Database Server", database_server.name
+ ).create_subscription_for_storage()
diff --git a/press/press/doctype/server_storage_plan/server_storage_plan.js b/press/press/doctype/server_storage_plan/server_storage_plan.js
new file mode 100644
index 00000000000..cbd60ee9a3d
--- /dev/null
+++ b/press/press/doctype/server_storage_plan/server_storage_plan.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Server Storage Plan", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/server_storage_plan/server_storage_plan.json b/press/press/doctype/server_storage_plan/server_storage_plan.json
new file mode 100644
index 00000000000..31439818d00
--- /dev/null
+++ b/press/press/doctype/server_storage_plan/server_storage_plan.json
@@ -0,0 +1,75 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "autoname": "prompt",
+ "creation": "2024-06-26 14:03:45.071081",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "enabled",
+ "title",
+ "pricing_section",
+ "price_inr",
+ "column_break_xauk",
+ "price_usd"
+ ],
+ "fields": [
+ {
+ "default": "0",
+ "fieldname": "enabled",
+ "fieldtype": "Check",
+ "label": "Enabled"
+ },
+ {
+ "fieldname": "title",
+ "fieldtype": "Data",
+ "label": "Title"
+ },
+ {
+ "fieldname": "pricing_section",
+ "fieldtype": "Section Break",
+ "label": "Pricing"
+ },
+ {
+ "fieldname": "price_inr",
+ "fieldtype": "Currency",
+ "label": "Price per GB (INR)"
+ },
+ {
+ "fieldname": "column_break_xauk",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "price_usd",
+ "fieldtype": "Currency",
+ "label": "Price per GB (INR)"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2024-06-26 17:50:43.521110",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Server Storage Plan",
+ "naming_rule": "Set by user",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "show_title_field_in_link": 1,
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": [],
+ "title_field": "title"
+}
\ No newline at end of file
diff --git a/press/press/doctype/server_storage_plan/server_storage_plan.py b/press/press/doctype/server_storage_plan/server_storage_plan.py
new file mode 100644
index 00000000000..5d30100f122
--- /dev/null
+++ b/press/press/doctype/server_storage_plan/server_storage_plan.py
@@ -0,0 +1,28 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+import frappe
+
+from press.press.doctype.site_plan.plan import Plan
+
+
+class ServerStoragePlan(Plan):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ enabled: DF.Check
+ price_inr: DF.Currency
+ price_usd: DF.Currency
+ title: DF.Data | None
+ # end: auto-generated types
+
+ def validate(self):
+ if self.enabled and frappe.db.exists(
+ "Server Storage Plan", {"enabled": 1, "name": ("!=", self.name)}
+ ):
+ frappe.throw("Only one storage add-on plan can be enabled at a time")
diff --git a/press/press/doctype/server_storage_plan/test_server_storage_plan.py b/press/press/doctype/server_storage_plan/test_server_storage_plan.py
new file mode 100644
index 00000000000..65d78285833
--- /dev/null
+++ b/press/press/doctype/server_storage_plan/test_server_storage_plan.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestServerStoragePlan(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/silenced_alert/__init__.py b/press/press/doctype/silenced_alert/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/silenced_alert/silenced_alert.js b/press/press/doctype/silenced_alert/silenced_alert.js
new file mode 100644
index 00000000000..240e7aa0d5a
--- /dev/null
+++ b/press/press/doctype/silenced_alert/silenced_alert.js
@@ -0,0 +1,29 @@
+// Copyright (c) 2023, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Silenced Alert', {
+ refresh(frm) {
+ if (!frm.doc.__unsaved) {
+ frm.add_custom_button('Preview Alerts filered by Instance', () => {
+ frm.call('preview_alerts').then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ } else {
+ frm.refresh();
+ }
+ });
+ });
+ }
+ if (!frm.doc.__unsaved && !frm.doc.silence_id) {
+ frm.add_custom_button('Create Silence', () => {
+ frm.call('create_new_silence').then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ } else {
+ frm.refresh();
+ }
+ });
+ });
+ }
+ },
+});
diff --git a/press/press/doctype/silenced_alert/silenced_alert.json b/press/press/doctype/silenced_alert/silenced_alert.json
new file mode 100644
index 00000000000..dfc5832f216
--- /dev/null
+++ b/press/press/doctype/silenced_alert/silenced_alert.json
@@ -0,0 +1,133 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2023-10-18 13:46:51.308480",
+ "default_view": "List",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "status",
+ "instance_type",
+ "instance",
+ "alert_comment",
+ "duration_column",
+ "from_time",
+ "to_time",
+ "duration",
+ "silence_id",
+ "section_break_hvst",
+ "alert_previews_column",
+ "total_alerts",
+ "alert_previews"
+ ],
+ "fields": [
+ {
+ "fieldname": "instance_type",
+ "fieldtype": "Link",
+ "label": "Instance Type",
+ "options": "DocType"
+ },
+ {
+ "fieldname": "instance",
+ "fieldtype": "Dynamic Link",
+ "in_list_view": 1,
+ "label": "Instance",
+ "options": "instance_type",
+ "reqd": 1
+ },
+ {
+ "fieldname": "duration_column",
+ "fieldtype": "Column Break",
+ "label": "Duration"
+ },
+ {
+ "default": "now",
+ "fieldname": "from_time",
+ "fieldtype": "Datetime",
+ "in_list_view": 1,
+ "label": "From Time",
+ "reqd": 1
+ },
+ {
+ "fieldname": "to_time",
+ "fieldtype": "Datetime",
+ "in_list_view": 1,
+ "label": "To Time",
+ "reqd": 1
+ },
+ {
+ "fieldname": "duration",
+ "fieldtype": "Data",
+ "label": "Duration",
+ "read_only": 1
+ },
+ {
+ "fieldname": "section_break_hvst",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "alert_comment",
+ "fieldtype": "Small Text",
+ "in_list_view": 1,
+ "label": "Alert Comment",
+ "reqd": 1
+ },
+ {
+ "default": "Preview",
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Status",
+ "options": "\nPreview\nActive\nExpired",
+ "read_only": 1
+ },
+ {
+ "fieldname": "alert_previews",
+ "fieldtype": "Code",
+ "label": "Alert Previews",
+ "options": "JSON"
+ },
+ {
+ "fieldname": "silence_id",
+ "fieldtype": "Data",
+ "label": "Silence ID",
+ "read_only": 1
+ },
+ {
+ "fieldname": "alert_previews_column",
+ "fieldtype": "Column Break",
+ "label": "Alert Previews"
+ },
+ {
+ "fieldname": "total_alerts",
+ "fieldtype": "Data",
+ "label": "Total Alerts",
+ "read_only": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2023-10-19 10:56:12.281776",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Silenced Alert",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/silenced_alert/silenced_alert.py b/press/press/doctype/silenced_alert/silenced_alert.py
new file mode 100644
index 00000000000..3a7559b137c
--- /dev/null
+++ b/press/press/doctype/silenced_alert/silenced_alert.py
@@ -0,0 +1,149 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+import base64
+import json
+from datetime import timezone
+
+import frappe
+import requests
+from frappe.model.document import Document
+from frappe.utils.data import format_duration, get_datetime
+
+from press.utils import log_error
+
+
+class SilencedAlert(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ alert_comment: DF.SmallText
+ alert_previews: DF.Code | None
+ duration: DF.Data | None
+ from_time: DF.Datetime
+ instance: DF.DynamicLink
+ instance_type: DF.Link | None
+ silence_id: DF.Data | None
+ status: DF.Literal["", "Preview", "Active", "Expired"]
+ to_time: DF.Datetime
+ total_alerts: DF.Data | None
+ # end: auto-generated types
+
+ def validate(self):
+ self.get_duration()
+
+ def get_duration(self):
+ diff = get_datetime(self.to_time) - get_datetime(self.from_time)
+ self.duration = format_duration(diff.total_seconds())
+
+ def get_keyword_based_on_instance_type(self):
+ match self.instance_type:
+ case "Site":
+ return "instance"
+ case "Server":
+ return "instance"
+ case "Cluster":
+ return "cluster"
+ case "Release Group":
+ return "group"
+ case "Bench":
+ return "bench"
+ case "Prometheus Alert Rule":
+ return "alertname"
+ case _:
+ return ""
+
+ @frappe.whitelist()
+ def preview_alerts(self):
+ monitor_server = frappe.get_doc(
+ "Monitor Server", "monitor.athul.fc.frappe.dev"
+ ) # frappe.db.get_single_value("Press Settings","monitor_server"))
+ auth_token = base64.b64encode(
+ f"frappe:{monitor_server.get_password('grafana_password')}".encode("utf-8")
+ ).decode("utf-8")
+ # keyword = f'{self.get_keyword_based_on_instance_type()}%3D%22{self.instance.replace(" ","%20")}%22'
+ keyword = f'{self.get_keyword_based_on_instance_type()}="{"erpdb.innoterra.co.in" or self.instance}"'
+ print(keyword)
+ res = requests.get(
+ f"https://monitor.frappe.cloud/alertmanager/api/v2/alerts/groups?filter={keyword}&silenced=false&active=true",
+ headers={"Authorization": f"Basic {auth_token}"},
+ )
+ if res.status_code == 200:
+ alerts = res.json()
+ self.total_alerts = len(alerts)
+ self.alert_previews = json.dumps(alerts, indent=2)
+ self.save()
+ else:
+ frappe.throw("Unable to fetch alerts from Alertmanager")
+
+ @frappe.whitelist()
+ def create_new_silence(self):
+ monitor_server = frappe.get_doc(
+ "Monitor Server", "monitor.athul.fc.frappe.dev"
+ ) # frappe.db.get_single_value("Press Settings","monitor_server"))
+ auth_token = base64.b64encode(
+ f"frappe:{monitor_server.get_password('grafana_password')}".encode("utf-8")
+ ).decode("utf-8")
+ data = {
+ "matchers": [
+ {
+ "name": self.get_keyword_based_on_instance_type(),
+ "value": "erpdb.innoterra.co.in",
+ "isRegex": False,
+ "isEqual": True,
+ }
+ ],
+ "startsAt": get_datetime(self.from_time).astimezone(timezone.utc).isoformat(),
+ "endsAt": get_datetime(self.to_time).astimezone(timezone.utc).isoformat(),
+ "createdBy": self.owner,
+ "comment": self.alert_comment,
+ "id": None,
+ }
+ res = requests.post(
+ "https://monitor.frappe.cloud/alertmanager/api/v2/silences",
+ headers={"Authorization": f"Basic {auth_token}"},
+ json=data,
+ )
+ if res.status_code == 200:
+ alerts = res.json()
+ self.status = "Active"
+ self.silence_id = alerts["silenceID"]
+ self.save()
+ else:
+ frappe.throw("Unable to fetch alerts from Alertmanager")
+
+
+def check_silenced_alerts():
+ """
+ Checks for silenced alerts in Alertmanager and updates the status of the silenced alert in Press
+ Runs every hour
+ """
+ silences = frappe.get_all(
+ "Silenced Alert", fields=["silence_id"], filters={"status": "Active"}
+ )
+ monitor_server = frappe.get_doc(
+ "Monitor Server", "monitor.athul.fc.frappe.dev"
+ ) # frappe.db.get_single_value("Press Settings","monitor_server"))
+ auth_token = base64.b64encode(
+ f"frappe:{monitor_server.get_password('grafana_password')}".encode("utf-8")
+ ).decode("utf-8")
+ req = requests.get(
+ "https://monitor.frappe.cloud/alertmanager/api/v2/silences?silenced=false&inhibited=false&active=true",
+ headers={"Authorization": f"Basic {auth_token}"},
+ )
+ if req.status_code == 200:
+ silences_from_alertmanager = req.json()
+ s_ids = [x["silence_id"] for x in silences]
+ for silence in silences_from_alertmanager:
+ if not silence["status"]["state"] == "active" and silence["id"] in s_ids:
+ frappe.db.set_value(
+ "Silenced Alert", {"silence_id": silence["id"]}, "status", "Expired"
+ )
+ frappe.db.commit()
+ else:
+ log_error("Failed to fetch silences from Alertmanager")
diff --git a/press/press/doctype/silenced_alert/test_silenced_alert.py b/press/press/doctype/silenced_alert/test_silenced_alert.py
new file mode 100644
index 00000000000..a478bc416a0
--- /dev/null
+++ b/press/press/doctype/silenced_alert/test_silenced_alert.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2023, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestSilencedAlert(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/site/archive.py b/press/press/doctype/site/archive.py
new file mode 100644
index 00000000000..c56a1cd8e36
--- /dev/null
+++ b/press/press/doctype/site/archive.py
@@ -0,0 +1,91 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+from typing import TYPE_CHECKING
+
+import frappe
+
+from press.utils import log_error
+
+if TYPE_CHECKING:
+ from press.press.doctype.site.site import Site
+
+
+def archive_suspended_trial_sites():
+ ARCHIVE_AFTER_DAYS = 21
+ ARCHIVE_AT_ONCE = 10
+
+ filters = [
+ ["status", "=", "Suspended"],
+ ["trial_end_date", "is", "set"],
+ [
+ "trial_end_date",
+ "<",
+ frappe.utils.add_to_date(None, days=-(ARCHIVE_AFTER_DAYS + 1)),
+ ], # Don't look at sites that are unlikely to be archived
+ ]
+
+ sites = frappe.get_all(
+ "Site",
+ filters=filters,
+ fields=["name", "team", "trial_end_date"],
+ order_by="creation asc",
+ )
+
+ archived_now = 0
+ for site in sites:
+ if archived_now > ARCHIVE_AT_ONCE:
+ break
+ try:
+ suspension_date = frappe.get_all(
+ "Site Activity",
+ filters={"site": site.name, "action": "Suspend Site"},
+ pluck="creation",
+ order_by="creation desc",
+ limit=1,
+ )[0]
+ suspended_days = frappe.utils.date_diff(frappe.utils.today(), suspension_date)
+
+ if suspended_days > ARCHIVE_AFTER_DAYS:
+ site: Site = frappe.get_doc("Site", site.name, for_update=True)
+ site.archive(reason="Archive suspended trial site")
+ archived_now = archived_now + 1
+ frappe.db.commit()
+ except Exception:
+ log_error("Suspended Site Archive Error")
+ # Without the rollback the transaction will be implicitly committed
+ # So we selectively commit and rollback
+ frappe.db.rollback()
+
+
+def delete_offsite_backups_for_archived_sites():
+ archived_sites = frappe.db.sql(
+ """
+ SELECT
+ backup.site,
+ COUNT(*) as offsite_backups
+ FROM
+ `tabSite Backup` backup
+ LEFT JOIN
+ `tabSite` site
+ ON
+ backup.site = site.name
+ WHERE
+ site.status = "Archived" AND
+ backup.files_availability = "Available" AND
+ backup.offsite = True
+ GROUP BY
+ backup.site
+ HAVING
+ offsite_backups > 1
+ ORDER BY
+ offsite_backups DESC
+ """,
+ as_dict=True,
+ )
+ for site in archived_sites:
+ try:
+ frappe.get_doc("Site", site.site).delete_offsite_backups()
+ frappe.db.commit()
+ except Exception:
+ frappe.db.rollback()
diff --git a/press/press/doctype/site/backups.py b/press/press/doctype/site/backups.py
index 3df24d25487..86a68ba4297 100644
--- a/press/press/doctype/site/backups.py
+++ b/press/press/doctype/site/backups.py
@@ -1,27 +1,25 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
import functools
from collections import deque
from datetime import datetime, timedelta
+from functools import wraps
from itertools import groupby
-from typing import Dict, List
+from time import time
import frappe
import pytz
from press.press.doctype.press_settings.press_settings import PressSettings
from press.press.doctype.remote_file.remote_file import delete_remote_backup_objects
-from press.press.doctype.site.site import Site
+from press.press.doctype.site.site import Literal, Site
from press.press.doctype.site_backup.site_backup import SiteBackup
from press.press.doctype.subscription.subscription import Subscription
from press.utils import log_error
-from functools import wraps
-from time import time
-
def timing(f):
@wraps(f)
@@ -29,12 +27,15 @@ def wrap(*args, **kw):
ts = time()
result = f(*args, **kw)
te = time()
- print(f"Took {te-ts}s")
+ print(f"Took {te - ts}s")
return result
return wrap
+BACKUP_TYPES = Literal["Logical", "Physical"]
+
+
class BackupRotationScheme:
"""
Represents backup rotation scheme for maintaining offsite backups.
@@ -42,9 +43,7 @@ class BackupRotationScheme:
Rotation is maintained by controlled deletion of daily backups.
"""
- def _expire_and_get_remote_files(
- self, offsite_backups: List[Dict[str, str]]
- ) -> List[str]:
+ def _expire_and_get_remote_files(self, offsite_backups: list[str]) -> list[str]:
"""Mark backup as unavailable and return remote files to delete."""
remote_files_to_delete = []
for backup in offsite_backups:
@@ -78,11 +77,11 @@ def expire_local_backups(self):
sites.append(site_conf.name)
self._expire_backups_of_site_in_bench(sites, config)
- @functools.lru_cache(maxsize=128)
+ @functools.lru_cache(maxsize=128) # noqa: B019
def _get_expiry(self, config: str):
return frappe.parse_json(config or "{}").keep_backups_for_hours or 24
- def _expire_backups_of_site_in_bench(self, sites: List[str], expiry: int):
+ def _expire_backups_of_site_in_bench(self, sites: list[str], expiry: int):
if sites:
frappe.db.set_value(
"Site Backup",
@@ -90,6 +89,7 @@ def _expire_backups_of_site_in_bench(self, sites: List[str], expiry: int):
"site": ("in", sites),
"status": "Success",
"files_availability": "Available",
+ "physical": False,
"offsite": False,
"creation": ("<", frappe.utils.add_to_date(None, hours=-expiry)),
},
@@ -97,15 +97,48 @@ def _expire_backups_of_site_in_bench(self, sites: List[str], expiry: int):
"Unavailable",
)
- def expire_offsite_backups(self) -> List[str]:
- """Expire and return list of offsite backups to delete."""
+ def _mark_physical_backups_as_expired(self, backups: list[str]):
+ site_backups = frappe.get_all(
+ "Site Backup",
+ filters={
+ "name": ("in", backups),
+ "files_availability": "Available",
+ "physical": True,
+ },
+ fields=["name", "database_snapshot"],
+ pluck="name",
+ )
+ for backup in site_backups:
+ # set snapshot as Unavailable
+ frappe.db.set_value(
+ "Site Backup",
+ backup,
+ "files_availability",
+ "Unavailable",
+ )
+ frappe.db.set_value(
+ "Virtual Disk Snapshot",
+ frappe.db.get_value("Site Backup", backup, "database_snapshot"),
+ "expired",
+ True,
+ )
+
+ def get_backups_due_for_expiry(self, backup_type: BACKUP_TYPES) -> list[str]:
raise NotImplementedError
+ def expire_offsite_backups(self) -> list[str]:
+ """Expire and return list of offsite backups to delete."""
+ return self._expire_and_get_remote_files(self.get_backups_due_for_expiry("Logical"))
+
def cleanup_offsite(self):
"""Expire backups according to the rotation scheme."""
expired_remote_files = self.expire_offsite_backups()
delete_remote_backup_objects(expired_remote_files)
+ def expire_physical_backups(self):
+ """Expire backups according to the rotation scheme"""
+ self._mark_physical_backups_as_expired(self.get_backups_due_for_expiry("Physical"))
+
class FIFO(BackupRotationScheme):
"""Represents First-in-First-out backup rotation scheme."""
@@ -115,7 +148,7 @@ def __init__(self):
frappe.db.get_single_value("Press Settings", "offsite_backups_count") or 30
)
- def expire_offsite_backups(self) -> List[str]:
+ def get_backups_due_for_expiry(self, backup_type: BACKUP_TYPES) -> list[str]:
offsite_expiry = self.offsite_backups_count
to_be_expired_backups = []
sites = frappe.get_all("Site", {"status": ("!=", "Archived")}, pluck="name")
@@ -126,11 +159,13 @@ def expire_offsite_backups(self) -> List[str]:
"site": site,
"status": "Success",
"files_availability": "Available",
- "offsite": True,
+ "offsite": backup_type == "Logical",
+ "physical": backup_type == "Physical",
},
order_by="creation desc",
+ pluck="name",
)[offsite_expiry:]
- return self._expire_and_get_remote_files(to_be_expired_backups)
+ return to_be_expired_backups
class GFS(BackupRotationScheme):
@@ -148,20 +183,21 @@ class GFS(BackupRotationScheme):
monthly_backup_day = 1 # days of the month (1-31)
yearly_backup_day = 1 # days of the year (1-366)
- def expire_offsite_backups(self) -> List[str]:
+ def get_backups_due_for_expiry(self, backup_type: BACKUP_TYPES) -> list[str]:
today = frappe.utils.getdate()
oldest_daily = today - timedelta(days=self.daily)
oldest_weekly = today - timedelta(weeks=4)
oldest_monthly = today - timedelta(days=366)
oldest_yearly = today - timedelta(days=3653)
- to_be_expired_backups = frappe.db.sql(
+ backups = frappe.db.sql(
f"""
SELECT name from `tabSite Backup`
WHERE
site in (select name from tabSite where status != "Archived") and
status="Success" and
files_availability="Available" and
- offsite=True and
+ offsite={backup_type == "Logical"} and
+ physical={backup_type == "Physical"} and
creation < "{oldest_daily}" and
(DAYOFWEEK(creation) != {self.weekly_backup_day} or creation < "{oldest_weekly}") and
(DAYOFMONTH(creation) != {self.monthly_backup_day} or creation < "{oldest_monthly}") and
@@ -169,11 +205,31 @@ def expire_offsite_backups(self) -> List[str]:
""",
as_dict=True,
)
+ return [backup["name"] for backup in backups]
# XXX: DAYOFWEEK in sql gives 1-7 for SUN-SAT in sql
# datetime.weekday() in python gives 0-6 for MON-SUN
# datetime.isoweekday() in python gives 1-7 for MON-SUN
- return self._expire_and_get_remote_files(to_be_expired_backups)
+
+class ModifiableCycle:
+ def __init__(self, items=()):
+ self.deque = deque(items)
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ if not self.deque:
+ raise StopIteration
+ item = self.deque.popleft()
+ self.deque.append(item)
+ return item
+
+ def delete_next(self):
+ self.deque.popleft()
+
+ def delete_prev(self):
+ self.deque.pop()
class ScheduledBackupJob:
@@ -188,21 +244,25 @@ def is_backup_hour(self, hour: int) -> bool:
# return (hour + self.offset) % self.interval == 0
return True
- def __init__(self):
+ def __init__(self, backup_type: BACKUP_TYPES):
+ self.backup_type: BACKUP_TYPES = backup_type
self.interval: int = (
frappe.get_cached_value("Press Settings", "Press Settings", "backup_interval") or 6
)
- self.offset: int = (
- frappe.get_cached_value("Press Settings", "Press Settings", "backup_offset") or 0
- )
- self.limit = (
- frappe.get_cached_value("Press Settings", "Press Settings", "backup_limit") or 100
+ self.offset: int = frappe.get_cached_value("Press Settings", "Press Settings", "backup_offset") or 0
+ self.limit = frappe.get_cached_value("Press Settings", "Press Settings", "backup_limit") or 100
+ self.max_failed_backup_attempts_in_a_day = (
+ frappe.get_cached_value("Press Settings", "Press Settings", "max_failed_backup_attempts_in_a_day")
+ or 6
)
self.offsite_setup = PressSettings.is_offsite_setup()
self.server_time = datetime.now()
- self.sites = Site.get_sites_for_backup(self.interval)
- self.sites_without_offsite = Subscription.get_sites_without_offsite_backups()
+ self.sites = Site.get_sites_for_backup(self.interval, backup_type=self.backup_type)
+ if self.backup_type == "Logical":
+ self.sites_without_offsite = Subscription.get_sites_without_offsite_backups()
+ else:
+ self.sites_without_offsite = []
def take_offsite(self, site: frappe._dict, day: datetime.date) -> bool:
return (
@@ -211,43 +271,23 @@ def take_offsite(self, site: frappe._dict, day: datetime.date) -> bool:
and not SiteBackup.offsite_backup_exists(site.name, day)
)
- def get_site_time(self, site: Dict[str, str]) -> datetime:
+ def get_site_time(self, site: dict[str, str]) -> datetime:
timezone = site.timezone or "Asia/Kolkata"
site_timezone = pytz.timezone(timezone)
return self.server_time.astimezone(site_timezone)
- class ModifiableCycle:
- def __init__(self, items=()):
- self.deque = deque(items)
-
- def __iter__(self):
- return self
-
- def __next__(self):
- if not self.deque:
- raise StopIteration
- item = self.deque.popleft()
- self.deque.append(item)
- return item
-
- def delete_next(self):
- self.deque.popleft()
-
- def delete_prev(self):
- self.deque.pop()
-
def start(self):
"""Schedule backups for all Active sites based on their local timezones. Also trigger offsite backups once a day."""
sites_by_server = []
for server, sites in groupby(self.sites, lambda d: d.server):
sites_by_server.append((server, iter(list(sites))))
- sites_by_server_cycle = self.ModifiableCycle(sites_by_server)
+ sites_by_server_cycle = ModifiableCycle(sites_by_server)
self._take_backups_in_round_robin(sites_by_server_cycle)
def _take_backups_in_round_robin(self, sites_by_server_cycle: ModifiableCycle):
limit = min(len(self.sites), self.limit)
- for server, sites in sites_by_server_cycle:
+ for _server, sites in sites_by_server_cycle:
try:
site = next(sites)
while not self.backup(site):
@@ -263,13 +303,39 @@ def backup(self, site) -> bool:
"""Return true if backup was taken."""
try:
site_time = self.get_site_time(site)
- if self.is_backup_hour(site_time.hour):
+ failed_backup_attempts_in_a_day = frappe.db.count(
+ "Site Backup",
+ {
+ "site": site.name,
+ "status": ("in", ["Failure", "Delivery Failure"]),
+ "physical": self.backup_type == "Physical",
+ "creation": [
+ ">=",
+ frappe.utils.add_days(None, -1),
+ ],
+ },
+ )
+ if (
+ self.is_backup_hour(site_time.hour)
+ and failed_backup_attempts_in_a_day <= self.max_failed_backup_attempts_in_a_day
+ ):
today = frappe.utils.getdate()
- offsite = self.take_offsite(site, today)
- with_files = offsite or not SiteBackup.file_backup_exists(site.name, today)
-
- frappe.get_doc("Site", site.name).backup(with_files=with_files, offsite=offsite)
+ """
+ Offsite backup is applicable only for logical backups
+ In physical backup, we can't take backup with files
+ """
+ offsite = self.backup_type == "Logical" and self.take_offsite(site, today)
+ with_files = self.backup_type == "Logical" and (
+ offsite or not SiteBackup.file_backup_exists(site.name, today)
+ )
+
+ frappe.get_doc("Site", site.name).backup(
+ with_files=with_files,
+ offsite=offsite,
+ physical=(self.backup_type == "Physical"),
+ deactivate_site_during_backup=(self.backup_type == "Physical"),
+ )
frappe.db.commit()
return True
return False
@@ -279,16 +345,49 @@ def backup(self, site) -> bool:
frappe.db.rollback()
-def schedule():
- scheduled_backup_job = ScheduledBackupJob()
+def schedule_logical_backups_for_sites_with_backup_time():
+ """
+ Schedule logical backups for sites with backup time.
+
+ Run this hourly only
+ """
+ sites = Site.get_sites_with_backup_time("Logical")
+ for site in sites:
+ site_doc: Site = frappe.get_doc("Site", site.name)
+ site_doc.backup(with_files=True, offsite=True, physical=False)
+ frappe.db.commit()
+
+
+def schedule_physical_backups_for_sites_with_backup_time():
+ """
+ Schedule physical backups for sites with backup time.
+
+ Run this hourly only
+ """
+ sites = Site.get_sites_with_backup_time("Physical")
+ for site in sites:
+ site_doc: Site = frappe.get_doc("Site", site.name)
+ site_doc.backup(with_files=False, offsite=False, physical=True, deactivate_site_during_backup=True)
+ frappe.db.commit()
+
+
+def schedule_logical_backups():
+ scheduled_backup_job = ScheduledBackupJob(backup_type="Logical")
+ scheduled_backup_job.start()
+
+
+def schedule_physical_backups():
+ scheduled_backup_job = ScheduledBackupJob(backup_type="Physical")
scheduled_backup_job.start()
def cleanup_offsite():
"""Delete expired (based on policy) offsite backups and mark em as Unavailable."""
- scheme = (
- frappe.db.get_single_value("Press Settings", "backup_rotation_scheme") or "FIFO"
- )
+ frappe.enqueue("press.press.doctype.site.backups._cleanup_offsite", queue="long", timeout=3600)
+
+
+def _cleanup_offsite():
+ scheme = frappe.db.get_single_value("Press Settings", "backup_rotation_scheme") or "FIFO"
if scheme == "FIFO":
rotation = FIFO()
elif scheme == "Grandfather-father-son":
@@ -302,3 +401,18 @@ def cleanup_local():
brs = BackupRotationScheme()
brs.expire_local_backups()
frappe.db.commit()
+
+
+def expire_physical():
+ """Mark physical snapshot as expired (based on policy) and backups mark em as Unavailable."""
+ frappe.enqueue("press.press.doctype.site.backups._expire_physical_backups")
+
+
+def _expire_physical_backups():
+ scheme = frappe.db.get_single_value("Press Settings", "backup_rotation_scheme") or "FIFO"
+ if scheme == "FIFO":
+ rotation = FIFO()
+ elif scheme == "Grandfather-father-son":
+ rotation = GFS()
+ rotation.expire_physical_backups()
+ frappe.db.commit()
diff --git a/press/press/doctype/site/erpnext_site.py b/press/press/doctype/site/erpnext_site.py
index c8f43262f8c..9496c22ec5d 100644
--- a/press/press/doctype/site/erpnext_site.py
+++ b/press/press/doctype/site/erpnext_site.py
@@ -10,7 +10,7 @@
class ERPNextSite(Site):
- def __init__(self, site=None, account_request: AccountRequest = None):
+ def __init__(self, site=None, account_request: AccountRequest | None = None):
if site:
super().__init__("Site", site)
elif account_request:
@@ -24,9 +24,7 @@ def __init__(self, site=None, account_request: AccountRequest = None):
"team": "Administrator",
"account_request": account_request.name,
"subscription_plan": get_erpnext_plan(),
- "erpnext_consultant": ERPNextConsultant.get_one_for_country(
- account_request.country
- ),
+ "erpnext_consultant": ERPNextConsultant.get_one_for_country(account_request.country),
"trial_end_date": frappe.utils.add_days(None, 14),
}
)
@@ -38,9 +36,7 @@ def rename_pooled_site(self, account_request):
self.trial_end_date = frappe.utils.add_days(None, 14)
plan = get_erpnext_plan()
self._update_configuration(self.get_plan_config(plan), save=False)
- self.erpnext_consultant = ERPNextConsultant.get_one_for_country(
- account_request.country
- )
+ self.erpnext_consultant = ERPNextConsultant.get_one_for_country(account_request.country)
self.save(ignore_permissions=True)
self.create_subscription(plan)
diff --git a/press/press/doctype/site/patches/set_plan_limit_in_site_config.py b/press/press/doctype/site/patches/set_plan_limit_in_site_config.py
new file mode 100644
index 00000000000..4cd3d7a9ac3
--- /dev/null
+++ b/press/press/doctype/site/patches/set_plan_limit_in_site_config.py
@@ -0,0 +1,30 @@
+# Copyright (c) 2024, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+import frappe
+from tqdm import tqdm
+
+
+def execute():
+ key_name = "plan_limit"
+ if not frappe.db.exists("Site Config Key", {"key": key_name}):
+ frappe.get_doc(
+ {
+ "doctype": "Site Config Key",
+ "key": key_name,
+ "type": "JSON",
+ "internal": True,
+ }
+ ).insert(ignore_permissions=True)
+
+ non_archived_sites = frappe.get_all(
+ "Site", filters={"status": ("!=", "Archived")}, pluck="name"
+ )
+
+ for site_name in tqdm(non_archived_sites):
+ try:
+ site = frappe.get_doc("Site", site_name, for_update=True)
+ site.update_site_config(site.get_plan_config())
+ frappe.db.commit()
+ except Exception as e:
+ print(f"Couldn't set plan limit for site {site_name}: {e}")
+ frappe.db.rollback()
diff --git a/press/press/doctype/site/patches/set_status_wizard_check_next_retry_datetime_in_site.py b/press/press/doctype/site/patches/set_status_wizard_check_next_retry_datetime_in_site.py
new file mode 100644
index 00000000000..c9fc6172480
--- /dev/null
+++ b/press/press/doctype/site/patches/set_status_wizard_check_next_retry_datetime_in_site.py
@@ -0,0 +1,23 @@
+# Copyright (c) 2021, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+import frappe
+
+
+def execute():
+ frappe.reload_doctype("Site")
+ # set setup_wizard_status_check_next_retry_on to current datetime
+ # in saas sites that has setup_wizard_complete = false
+ # and setup_wizard_status_check_next_retry_on is null
+
+ frappe.db.sql(
+ """
+ UPDATE
+ tabSite s
+ SET
+ s.setup_wizard_status_check_next_retry_on = NOW()
+ WHERE
+ s.setup_wizard_complete = 0
+ and s.setup_wizard_status_check_next_retry_on is null
+ and s.status != 'Archived'
+ """
+ )
diff --git a/press/press/doctype/site/pool.py b/press/press/doctype/site/pool.py
index 1a7824a4997..b20626e9c59 100644
--- a/press/press/doctype/site/pool.py
+++ b/press/press/doctype/site/pool.py
@@ -1,23 +1,21 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
import frappe
from frappe.model.naming import make_autoname
-from press.utils import log_error
+
from press.press.doctype.site.erpnext_site import (
- get_erpnext_bench,
get_erpnext_apps,
+ get_erpnext_bench,
get_erpnext_domain,
)
+from press.utils import log_error
class SitePool:
def __init__(self):
- self.site_count = frappe.db.count(
- "Site", filters={"is_standby": True, "status": "Active"}
- )
+ self.site_count = frappe.db.count("Site", filters={"is_standby": True, "status": "Active"})
self.pool_size = frappe.db.get_single_value("Press Settings", "standby_pool_size")
self.queue_size = frappe.db.get_single_value("Press Settings", "standby_queue_size")
@@ -27,7 +25,6 @@ def create(self):
sites_created = 0
while sites_created < self.queue_size:
self.create_one()
- frappe.db.commit()
sites_created += 1
def create_one(self):
diff --git a/press/press/doctype/site/saas_pool.py b/press/press/doctype/site/saas_pool.py
index 39e544ead91..c3c648c09fd 100644
--- a/press/press/doctype/site/saas_pool.py
+++ b/press/press/doctype/site/saas_pool.py
@@ -1,44 +1,52 @@
import frappe
from frappe.model.naming import make_autoname
-from press.utils import log_error
+
from press.press.doctype.site.saas_site import (
- get_saas_bench,
+ create_app_subscriptions,
+ get_pool_apps,
get_saas_apps,
+ get_saas_bench,
get_saas_domain,
- get_pool_apps,
- create_app_subscriptions,
set_site_in_subscription_docs,
)
+from press.utils import log_error
class SaasSitePool:
def __init__(self, app):
self.app = app
self.site_count = frappe.db.count(
- "Site", filters={"is_standby": True, "status": "Active", "standby_for": self.app}
+ "Site",
+ filters={
+ "is_standby": True,
+ "status": ["in", ["Active", "Pending", "Installing", "Updating", "Recovering"]],
+ "standby_for": self.app,
+ "hybrid_saas_pool": "",
+ },
)
self.saas_settings = frappe.get_doc("Saas Settings", app)
def create(self):
- if (
- self.saas_settings.enable_pooling
- and self.site_count < self.saas_settings.standby_pool_size
- ):
- sites_created = 0
- while sites_created < self.saas_settings.standby_queue_size:
- self.create_one()
- frappe.db.commit()
- sites_created += 1
+ if self.saas_settings.enable_pooling:
+ if self.site_count < self.saas_settings.standby_pool_size:
+ sites_created = 0
+ while sites_created < self.saas_settings.standby_queue_size:
+ self.create_one()
+ frappe.db.commit()
+ sites_created += 1
- def create_one(self):
- bench, apps, subdomain, domain = None, None, None, None
- try:
if frappe.db.get_value("Saas Settings", self.app, "enable_hybrid_pools"):
self.create_hybrid_pool_sites()
+
+ def create_one(self, pool_name: str = ""):
+ bench, apps, subdomain, domain = None, None, None, None
+ try:
domain = get_saas_domain(self.app)
bench = get_saas_bench(self.app)
subdomain = self.get_subdomain()
apps = get_saas_apps(self.app)
+ if pool_name:
+ apps.extend(get_pool_apps(pool_name))
site = frappe.get_doc(
{
"doctype": "Site",
@@ -46,7 +54,8 @@ def create_one(self):
"domain": domain,
"is_standby": True,
"standby_for": self.app,
- "team": "Administrator",
+ "hybrid_saas_pool": pool_name,
+ "team": frappe.get_value("Team", {"user": "Administrator"}, "name"),
"bench": bench,
"apps": [{"app": app} for app in apps],
}
@@ -68,28 +77,24 @@ def create_hybrid_pool_sites(self):
# create a Site according to Site Rules child table in each Hybrid Saas Pool
for pool_name in frappe.get_all("Hybrid Saas Pool", {"app": self.app}, pluck="name"):
# only has app rules for now, will add site config and other rules later
- pool_apps = get_pool_apps(pool_name)
- domain = get_saas_domain(self.app)
- bench = get_saas_bench(self.app)
- subdomain = self.get_subdomain()
- apps = get_saas_apps(self.app)
- apps.extend(pool_apps)
- site = frappe.get_doc(
+ hybrid_standby_count = frappe.db.count(
+ "Site",
{
- "doctype": "Site",
- "subdomain": subdomain,
- "domain": domain,
- "is_standby": True,
- "hybrid_saas_pool": pool_name,
+ "is_standby": 1,
"standby_for": self.app,
- "team": "Administrator",
- "bench": bench,
- "apps": [{"app": app} for app in apps],
- }
+ "hybrid_saas_pool": pool_name,
+ "status": ("in", ["Active", "Pending", "Installing", "Updating", "Recovering"]),
+ },
)
- subscription_docs = create_app_subscriptions(site, self.app)
- site.insert()
- set_site_in_subscription_docs(subscription_docs, site.name)
+
+ if hybrid_standby_count > self.saas_settings.standby_pool_size:
+ continue
+
+ sites_created = 0
+ while sites_created < self.saas_settings.standby_queue_size:
+ self.create_one(pool_name)
+ frappe.db.commit()
+ sites_created += 1
def get_subdomain(self):
return make_autoname("standby-.########")
diff --git a/press/press/doctype/site/saas_site.py b/press/press/doctype/site/saas_site.py
index c24f9c06452..e0d01b59ad6 100644
--- a/press/press/doctype/site/saas_site.py
+++ b/press/press/doctype/site/saas_site.py
@@ -1,7 +1,10 @@
-import frappe
+import contextlib
import json
-from press.press.doctype.site.site import Site
+
+import frappe
+
from press.press.doctype.account_request.account_request import AccountRequest
+from press.press.doctype.site.site import Site
class SaasSite(Site):
@@ -9,7 +12,7 @@ def __init__(
self,
site=None,
app=None,
- account_request: AccountRequest = None,
+ account_request: AccountRequest | None = None,
hybrid_saas_pool=None,
subdomain=None,
):
@@ -50,8 +53,20 @@ def rename_pooled_site(self, account_request=None, subdomain=None):
self.trial_end_date = frappe.utils.add_days(None, 14)
plan = get_saas_site_plan(self.app)
self._update_configuration(self.get_plan_config(plan), save=False)
+ subscription_config = {}
+ for row in self.configuration:
+ if row.key == "subscription":
+ with contextlib.suppress(json.JSONDecodeError):
+ subscription_config = json.loads(row.value)
+ subscription_config.update(
+ {
+ "trial_end_date": self.trial_end_date.strftime("%Y-%m-%d"),
+ }
+ )
+ self._update_configuration({"subscription": subscription_config}, save=False)
self.save(ignore_permissions=True)
self.create_subscription(plan)
+ self.reload()
return self
@@ -77,6 +92,7 @@ def get_saas_bench(app):
pluck="name",
)
release_group = get_saas_group(app)
+ cluster = get_saas_cluster(app)
bench_servers = frappe.db.sql(
"""
SELECT
@@ -88,18 +104,16 @@ def get_saas_bench(app):
ON
bench.server = server.name
WHERE
- server.proxy_server in %s AND bench.status = "Active" AND bench.group = %s
+ server.proxy_server in %s AND server.cluster = %s AND bench.status = "Active" AND bench.group = %s
ORDER BY
server.use_for_new_sites DESC, bench.creation DESC
""",
- [proxy_servers, release_group],
+ [proxy_servers, cluster, release_group],
as_dict=True,
)
signup_servers = tuple([bs["server"] for bs in bench_servers])
- signup_server_sub_str = (
- tuple(signup_servers) if len(signup_servers) > 1 else f"('{signup_servers[0]}')"
- )
+ signup_server_sub_str = tuple(signup_servers) if len(signup_servers) > 1 else f"('{signup_servers[0]}')"
lowest_cpu_server = frappe.db.sql(
f"""
SELECT
@@ -111,7 +125,7 @@ def get_saas_bench(app):
FROM
tabSite site
LEFT JOIN
- tabPlan plan
+ `tabSite Plan` plan
ON
site.plan = plan.name
WHERE
@@ -123,13 +137,12 @@ def get_saas_bench(app):
LIMIT 1""",
as_dict=True,
)
- lowest_cpu_server = (
- lowest_cpu_server[0].server if lowest_cpu_server else signup_servers[0]
- )
+ lowest_cpu_server = lowest_cpu_server[0].server if lowest_cpu_server else signup_servers[0]
for bs in bench_servers:
if bs["server"] == lowest_cpu_server:
return bs["name"]
+ return None
def get_saas_plan(app):
@@ -174,16 +187,23 @@ def get_default_team_for_app(app):
def create_app_subscriptions(site, app):
marketplace_apps = (
- get_saas_apps(app)
- if frappe.db.get_value("Saas Settings", app, "multi_subscription")
- else [app]
+ get_saas_apps(app) if frappe.db.get_value("Saas Settings", app, "multi_subscription") else [app]
)
# create subscriptions
subscription_docs, custom_saas_config = get_app_subscriptions(marketplace_apps, app)
+ if site.trial_end_date:
+ # set trial end date in site config
+ subscription_saas_config: dict = custom_saas_config.get("subscription", {})
+ subscription_saas_config.update(
+ {
+ "trial_end_date": site.trial_end_date.strftime("%Y-%m-%d"),
+ }
+ )
+ custom_saas_config["subscription"] = subscription_saas_config
# set site config
- site_config = {f"sk_{s.app}": s.secret_key for s in subscription_docs}
+ site_config = {f"sk_{s.document_name}": s.secret_key for s in subscription_docs}
site_config.update(custom_saas_config)
site._update_configuration(site_config, save=False)
@@ -201,19 +221,20 @@ def get_app_subscriptions(apps=None, standby_for=None):
for app in apps:
free_plan = frappe.get_all(
- "Marketplace App Plan", {"enabled": 1, "is_free": 1, "app": app}, pluck="name"
+ "Marketplace App Plan",
+ {"enabled": 1, "price_usd": ("<=", 0), "app": app},
+ pluck="name",
)
if free_plan:
new_subscription = frappe.get_doc(
{
- "doctype": "Marketplace App Subscription",
- "marketplace_app_plan": get_saas_plan(app)
- if frappe.db.exists("Saas Settings", app)
- else free_plan[0],
- "app": app,
- "while_site_creation": True,
- "status": "Disabled",
- "team": "Administrator",
+ "doctype": "Subscription",
+ "document_type": "Marketplace App",
+ "document_name": app,
+ "plan_type": "Marketplace App Plan",
+ "plan": get_saas_plan(app) if frappe.db.exists("Saas Settings", app) else free_plan[0],
+ "enabled": 0,
+ "team": frappe.get_value("Team", {"user": "Administrator"}, "name"),
}
).insert(ignore_permissions=True)
@@ -225,15 +246,11 @@ def get_app_subscriptions(apps=None, standby_for=None):
if app == standby_for:
secret_key = new_subscription.secret_key
- if standby_for in frappe.get_all(
- "Saas Settings", {"billing_type": "prepaid"}, pluck="name"
- ):
+ if standby_for in frappe.get_all("Saas Settings", {"billing_type": "prepaid"}, pluck="name"):
custom_saas_config.update(
{
"subscription": {"secret_key": secret_key},
- "app_include_js": [
- frappe.db.get_single_value("Press Settings", "app_include_script")
- ],
+ "app_include_js": [frappe.db.get_single_value("Press Settings", "app_include_script")],
}
)
diff --git a/press/press/doctype/site/site.js b/press/press/doctype/site/site.js
index cdd21b99b61..688ccc9ff3e 100644
--- a/press/press/doctype/site/site.js
+++ b/press/press/doctype/site/site.js
@@ -30,7 +30,7 @@ frappe.ui.form.on('Site', {
`,
);
- frm.add_web_link(`https://${frm.doc.name}`, __('Visit Site'));
+ frm.add_web_link(`https://${frm.doc.name}/apps`, __('Visit Site'));
frm.add_web_link(`/dashboard/sites/${frm.doc.name}`, __('Visit Dashboard'));
let site = frm.get_doc();
@@ -82,8 +82,42 @@ frappe.ui.form.on('Site', {
);
}
+ if (!site.is_monitoring_disabled) {
+ frm.add_custom_button(
+ __('Disable Monitoring'),
+ () => {
+ frappe.prompt(
+ {
+ fieldtype: 'Data',
+ label: 'Reason',
+ fieldname: 'reason',
+ reqd: 1,
+ },
+ ({ reason }) => {
+ frm
+ .call('disable_monitoring', {
+ reason,
+ })
+ .then((r) => frm.refresh());
+ },
+ __('Provide Reason'),
+ );
+ },
+ __('Actions'),
+ );
+ } else {
+ frm.add_custom_button(
+ __('Enable Monitoring'),
+ () => {
+ frappe.msgprint('Go to Dashboard to re-enable monitoring.');
+ },
+ __('Actions'),
+ );
+ }
+
[
[__('Backup'), 'backup'],
+ [__('Physical Backup'), 'physical_backup'],
[__('Sync Info'), 'sync_info'],
].forEach(([label, method]) => {
frm.add_custom_button(
@@ -95,39 +129,16 @@ frappe.ui.form.on('Site', {
);
});
[
- [__('Archive'), 'archive', frm.doc.status !== 'Archived'],
- [__('Cleanup after Archive'), 'cleanup_after_archive'],
+ [__('Sync Apps'), 'sync_apps'],
[__('Migrate'), 'migrate'],
- [__('Reinstall'), 'reinstall'],
- [__('Restore'), 'restore_site'],
- [__('Restore Tables'), 'restore_tables'],
[__('Update'), 'schedule_update'],
[__('Deactivate'), 'deactivate'],
[__('Activate'), 'activate', frm.doc.status !== 'Archived'],
[__('Reset Site Usage'), 'reset_site_usage'],
[__('Clear Cache'), 'clear_site_cache'],
+ [__('Optimize Tables'), 'optimize_tables'],
[__('Update Site Config'), 'update_site_config'],
- [
- __('Enable Database Access'),
- 'enable_database_access',
- !frm.doc.is_database_access_enabled,
- ],
- [
- __('Disable Database Access'),
- 'disable_database_access',
- frm.doc.is_database_access_enabled,
- ],
[__('Create DNS Record'), 'create_dns_record'],
- [
- __('Enable Database Write Access'),
- 'enable_read_write',
- frm.doc.database_access_mode == 'read_only',
- ],
- [
- __('Disable Database Write Access'),
- 'disable_read_write',
- frm.doc.database_access_mode == 'read_write',
- ],
[__('Run After Migrate Steps'), 'run_after_migrate_steps'],
[__('Retry Rename'), 'retry_rename'],
[
@@ -136,6 +147,19 @@ frappe.ui.form.on('Site', {
frm.doc.name.includes('.archived'),
],
[__('Update without Backup'), 'update_without_backup'],
+ [
+ __('Fetch bench from Agent'),
+ 'fetch_bench_from_agent',
+ frm.doc.status !== 'Archived',
+ ],
+ [
+ __('Set status based on Ping'),
+ 'set_status_based_on_ping',
+ !['Active', 'Archived', 'Inactive', 'Suspended'].includes(
+ frm.doc.status,
+ ),
+ ],
+ [__('Show Admin Password'), 'show_admin_password'],
].forEach(([label, method, condition]) => {
if (typeof condition === 'undefined' || condition) {
frm.add_custom_button(
@@ -151,6 +175,14 @@ frappe.ui.form.on('Site', {
}
});
+ frm.add_custom_button(
+ __('Update Skip Failing Patches'),
+ () => {
+ frm.call('schedule_update', { skip_failing_patches: true });
+ },
+ __('Actions'),
+ );
+
frm.add_custom_button(
__('Force Archive'),
() => {
@@ -190,29 +222,6 @@ frappe.ui.form.on('Site', {
});
frm.toggle_enable(['host_name'], frm.doc.status === 'Active');
- if (frm.doc.is_database_access_enabled) {
- frm.add_custom_button(
- __('Show Database Credentials'),
- () =>
- frm.call('get_database_credentials').then((r) => {
- let message = `Host: ${r.message.host}
-
-Port: ${r.message.port}
-
-Database: ${r.message.database}
-
-Username: ${r.message.username}
-
-Password: ${r.message.password}
-
-\`\`\`\nmysql -u ${r.message.username} -p${r.message.password} -h ${r.message.host} -P ${r.message.port} --ssl --ssl-verify-server-cert\n\`\`\``;
-
- frappe.msgprint(frappe.markdown(message), 'Database Credentials');
- }),
- __('Actions'),
- );
- }
-
frm.add_custom_button(
__('Replicate Site'),
() => {
@@ -241,6 +250,35 @@ Password: ${r.message.password}
__('Actions'),
);
+ frm.add_custom_button(
+ __('Update DNS Record'),
+ () => {
+ const dialog = new frappe.ui.Dialog({
+ title: __('Update DNS Record'),
+ fields: [
+ {
+ fieldtype: 'Data',
+ fieldname: 'value',
+ label: 'Value',
+ description: "Site's CNAME record will point to this value",
+ reqd: 1,
+ },
+ ],
+ primary_action(args) {
+ frm
+ .call('update_dns_record', {
+ value: args.value,
+ })
+ .then(() => {
+ dialog.hide();
+ });
+ },
+ });
+ dialog.show();
+ },
+ __('Dangerous Actions'),
+ );
+
frm.add_custom_button(
__('Move to Group'),
() => {
@@ -285,6 +323,146 @@ Password: ${r.message.password}
},
__('Actions'),
);
+
+ frm.add_custom_button(
+ __('Forcefully Remove Site'),
+ () => {
+ const dialog = new frappe.ui.Dialog({
+ title: __('Forcefully Remove Site'),
+ fields: [
+ {
+ fieldtype: 'Link',
+ options: 'Bench',
+ label: __('Bench'),
+ fieldname: 'bench',
+ reqd: 1,
+ get_query: () => {
+ return {
+ filters: [
+ ['name', '!=', frm.doc.bench],
+ ['status', '!=', 'Archived'],
+ ],
+ };
+ },
+ },
+ {
+ fieldtype: 'Check',
+ label: __("I know what I'm doing"),
+ fieldname: 'confirmation',
+ reqd: 1,
+ },
+ ],
+ });
+
+ dialog.set_primary_action(__('Forcefully Remove Site'), (args) => {
+ if (!args.confirmation) {
+ frappe.throw(__("Please confirm that you know what you're doing"));
+ }
+ frm
+ .call('forcefully_remove_site', {
+ bench: args.bench,
+ })
+ .then((r) => {
+ dialog.hide();
+ frm.refresh();
+ if (r.message.job) {
+ message = `
+Removing site from **${r.message.bench}**.
+
+Track progress [here](https://${r.message.server}/agent/jobs/${r.message.job}).`;
+ frappe.msgprint(frappe.markdown(message), 'Removing Site');
+ } else {
+ message = `
+Couldn't remove site from **${r.message.bench}**.
+\`\`\`
+${r.message.error}
+\`\`\``;
+ frappe.msgprint(
+ frappe.markdown(message),
+ 'Failed to Remove Site',
+ );
+ }
+ });
+ });
+
+ dialog.show();
+ },
+ __('Dangerous Actions'),
+ );
+
+ frm.add_custom_button(
+ __('Forcefully Move Site'),
+ () => {
+ const dialog = new frappe.ui.Dialog({
+ title: __('Forcefully Move Site'),
+ fields: [
+ {
+ fieldtype: 'Link',
+ options: 'Bench',
+ label: __('Bench'),
+ fieldname: 'bench',
+ reqd: 1,
+ get_query: () => {
+ return {
+ filters: [
+ ['name', '!=', frm.doc.bench],
+ ['status', '!=', 'Archived'],
+ ],
+ };
+ },
+ },
+ {
+ fieldtype: 'Check',
+ label: __("I know what I'm doing"),
+ fieldname: 'confirmation',
+ reqd: 1,
+ },
+ {
+ fieldtype: 'Check',
+ label: __('Deactivate'),
+ fieldname: 'deactivate',
+ },
+ ],
+ });
+
+ dialog.set_primary_action(__('Forcefully Move Site'), (args) => {
+ if (!args.confirmation) {
+ frappe.throw(__("Please confirm that you know what you're doing"));
+ }
+ frm
+ .call('move_to_bench', {
+ bench: args.bench,
+ deactivate: args.deactivate,
+ })
+ .then(() => {
+ dialog.hide();
+ frm.refresh();
+ });
+ });
+
+ dialog.show();
+ },
+ __('Dangerous Actions'),
+ );
+
+ [
+ [__('Reinstall'), 'reinstall'],
+ [__('Restore'), 'restore_site'],
+ [__('Restore Tables'), 'restore_tables'],
+ [__('Archive'), 'archive', frm.doc.status !== 'Archived'],
+ [__('Cleanup after Archive'), 'cleanup_after_archive'],
+ ].forEach(([label, method]) => {
+ frm.add_custom_button(
+ label,
+ () => {
+ frappe.confirm(
+ `Are you sure you want to perform the action on this site ?`,
+ () => frm.call(method).then((r) => frm.refresh()),
+ );
+ },
+ __('Dangerous Actions'),
+ );
+ });
},
});
@@ -302,7 +480,7 @@ function login_as_admin(site_name, reason = null) {
console.log(site_name, res.message.sid);
if (res) {
window.open(
- `https://${site_name}/desk?sid=${res.message.sid}`,
+ `https://${site_name}/app?sid=${res.message.sid}`,
'_blank',
);
}
diff --git a/press/press/doctype/site/site.json b/press/press/doctype/site/site.json
index afb53b5c04d..c18220b723f 100644
--- a/press/press/doctype/site/site.json
+++ b/press/press/doctype/site/site.json
@@ -11,13 +11,14 @@
"status",
"status_before_update",
"server",
- "skip_auto_updates",
"archive_failed",
+ "creation_failed",
"column_break_3",
- "admin_password",
"bench",
"group",
"cluster",
+ "admin_password",
+ "additional_system_user_created",
"config_tab",
"hide_config",
"host_name",
@@ -27,23 +28,28 @@
"config",
"billing_tab",
"team",
- "setup_wizard_complete",
"plan",
"free",
- "column_break_15",
"staging",
+ "column_break_15",
"account_request",
"is_erpnext_setup",
"trial_end_date",
"erpnext_consultant",
- "section_break_6",
- "apps_tab",
- "apps",
- "_keys_removed_in_last_update",
+ "site_usage_section",
+ "disable_site_usage_exceed_check",
+ "site_usage_exceeded",
+ "site_usage_exceeded_on",
+ "site_usage_exceeded_last_checked_on",
+ "column_break_vngj",
+ "last_site_usage_warning_mail_sent_on",
"_site_usages",
+ "current_disk_usage",
"current_cpu_usage",
"current_database_usage",
- "current_disk_usage",
+ "apps_tab",
+ "apps",
+ "_keys_removed_in_last_update",
"deploy_section",
"timezone",
"column_break_29",
@@ -54,11 +60,26 @@
"column_break_34",
"remote_private_file",
"remote_public_file",
+ "backup_tab",
+ "logical_backup_section",
+ "skip_scheduled_logical_backups",
+ "schedule_logical_backup_at_custom_time",
+ "logical_backup_times",
+ "physical_backup_section",
+ "skip_scheduled_physical_backups",
+ "schedule_physical_backup_at_custom_time",
+ "physical_backup_times",
+ "flags1_section",
+ "allow_physical_backup_by_user",
"tab_break_46",
"notifications_section",
- "notify_email",
+ "communication_infos",
+ "monitoring_section",
+ "is_monitoring_disabled",
+ "reason_for_disabling_monitoring",
"auto_updates_section",
- "auto_updates_scheduled",
+ "skip_auto_updates",
+ "only_update_at_specified_time",
"auto_update_last_triggered_on",
"column_break_53",
"update_trigger_frequency",
@@ -67,19 +88,22 @@
"update_on_weekday",
"update_end_of_month",
"update_on_day_of_month",
+ "setup_wizard_status_section",
+ "setup_wizard_complete",
+ "column_break_vbgj",
+ "setup_wizard_status_check_retries",
+ "setup_wizard_status_check_next_retry_on",
"database_section",
- "is_database_access_enabled",
- "database_access_mode",
- "column_break_lfuz",
- "database_access_user",
- "database_access_password",
+ "database_access_connection_limit",
"saas_section",
"is_standby",
"standby_for",
+ "standby_for_product",
+ "hybrid_for",
+ "signup_time",
"column_break_63",
"hybrid_saas_pool",
- "backups_section",
- "skip_scheduled_backups",
+ "saas_communication_secret",
"tags_section",
"tags"
],
@@ -119,9 +143,10 @@
"in_list_view": 1,
"in_standard_filter": 1,
"label": "Status",
- "options": "Pending\nInstalling\nUpdating\nActive\nInactive\nBroken\nArchived\nSuspended",
+ "options": "Pending\nInstalling\nUpdating\nRecovering\nActive\nInactive\nBroken\nArchived\nSuspended",
"read_only": 1,
- "reqd": 1
+ "reqd": 1,
+ "search_index": 1
},
{
"fieldname": "admin_password",
@@ -136,12 +161,6 @@
"hide_days": 1,
"hide_seconds": 1
},
- {
- "fieldname": "section_break_6",
- "fieldtype": "Section Break",
- "hide_days": 1,
- "hide_seconds": 1
- },
{
"default": "{}",
"fieldname": "config",
@@ -158,7 +177,8 @@
"hide_days": 1,
"hide_seconds": 1,
"label": "Subdomain",
- "reqd": 1
+ "reqd": 1,
+ "search_index": 1
},
{
"fieldname": "team",
@@ -169,7 +189,8 @@
"in_standard_filter": 1,
"label": "Team",
"options": "Team",
- "reqd": 1
+ "reqd": 1,
+ "search_index": 1
},
{
"collapsible": 1,
@@ -265,7 +286,7 @@
"fieldtype": "Link",
"label": "Domain",
"options": "Root Domain",
- "set_only_once": 1
+ "search_index": 1
},
{
"fieldname": "_site_usages",
@@ -304,7 +325,8 @@
"label": "Release Group",
"options": "Release Group",
"read_only": 1,
- "reqd": 1
+ "reqd": 1,
+ "search_index": 1
},
{
"fieldname": "column_break_29",
@@ -351,7 +373,7 @@
"fieldname": "plan",
"fieldtype": "Link",
"label": "Plan",
- "options": "Plan",
+ "options": "Site Plan",
"read_only": 1
},
{
@@ -366,11 +388,6 @@
"fieldtype": "Check",
"label": "Skip Auto Updates"
},
- {
- "fieldname": "notify_email",
- "fieldtype": "Data",
- "label": "Notify Email"
- },
{
"collapsible": 1,
"fieldname": "notifications_section",
@@ -433,12 +450,6 @@
"fieldtype": "Time",
"label": "Update Trigger Time"
},
- {
- "default": "0",
- "fieldname": "auto_updates_scheduled",
- "fieldtype": "Check",
- "label": "Auto Updates Scheduled"
- },
{
"fieldname": "column_break_12",
"fieldtype": "Column Break"
@@ -455,13 +466,6 @@
"fieldtype": "Section Break",
"label": "Database Access"
},
- {
- "default": "0",
- "fieldname": "is_database_access_enabled",
- "fieldtype": "Check",
- "label": "Is Database Access Enabled",
- "read_only": 1
- },
{
"fieldname": "standby_for",
"fieldtype": "Link",
@@ -476,7 +480,7 @@
"collapsible": 1,
"fieldname": "saas_section",
"fieldtype": "Section Break",
- "label": "SaaS"
+ "label": "SaaS / Signup"
},
{
"fieldname": "column_break_63",
@@ -532,51 +536,212 @@
"label": "Hide Config"
},
{
- "fieldname": "database_access_password",
- "fieldtype": "Password",
- "label": "Database Access Password",
- "read_only": 1
+ "collapsible": 1,
+ "fieldname": "tags_section",
+ "fieldtype": "Section Break",
+ "label": "Tags"
},
{
- "fieldname": "database_access_mode",
- "fieldtype": "Select",
- "label": "Database Access Mode",
- "options": "\nread_only\nread_write",
- "read_only": 1
+ "fieldname": "tags",
+ "fieldtype": "Table",
+ "label": "Tags",
+ "options": "Resource Tag"
},
{
- "fieldname": "column_break_lfuz",
- "fieldtype": "Column Break"
+ "fieldname": "standby_for_product",
+ "fieldtype": "Link",
+ "label": "Standby for Product",
+ "options": "Product Trial",
+ "search_index": 1
},
{
- "fieldname": "database_access_user",
+ "default": "0",
+ "fieldname": "only_update_at_specified_time",
+ "fieldtype": "Check",
+ "label": "Only update at specified time"
+ },
+ {
+ "default": "0",
+ "fieldname": "additional_system_user_created",
+ "fieldtype": "Check",
+ "label": "Additional System User Created"
+ },
+ {
+ "description": "This key will be used to validate requests from saas site for billing & configuration purpose",
+ "fieldname": "saas_communication_secret",
"fieldtype": "Data",
- "label": "Database Access User",
- "read_only": 1
+ "label": "SaaS Communication Secret"
},
{
"collapsible": 1,
- "fieldname": "backups_section",
+ "fieldname": "setup_wizard_status_section",
"fieldtype": "Section Break",
- "label": "Backups"
+ "label": "Setup Wizard Status"
+ },
+ {
+ "fieldname": "column_break_vbgj",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "setup_wizard_status_check_retries",
+ "fieldtype": "Int",
+ "label": "Retries"
+ },
+ {
+ "fieldname": "setup_wizard_status_check_next_retry_on",
+ "fieldtype": "Datetime",
+ "label": "Next Retry On"
+ },
+ {
+ "default": "16",
+ "fieldname": "database_access_connection_limit",
+ "fieldtype": "Int",
+ "label": "Database Access Connection Limit"
+ },
+ {
+ "fieldname": "signup_time",
+ "fieldtype": "Datetime",
+ "label": "Signup Time"
+ },
+ {
+ "fieldname": "backup_tab",
+ "fieldtype": "Tab Break",
+ "label": "Backup"
},
{
"default": "0",
- "fieldname": "skip_scheduled_backups",
+ "fieldname": "allow_physical_backup_by_user",
"fieldtype": "Check",
- "label": "Skip Scheduled Backups"
+ "label": "Allow Physical Backup By User"
},
{
- "collapsible": 1,
- "fieldname": "tags_section",
+ "collapsible_depends_on": "eval: doc.schedule_logical_backup_at_custom_time",
+ "fieldname": "logical_backup_section",
"fieldtype": "Section Break",
- "label": "Tags"
+ "label": "Logical Backup"
},
{
- "fieldname": "tags",
+ "fieldname": "logical_backup_times",
"fieldtype": "Table",
- "label": "Tags",
- "options": "Resource Tag"
+ "label": "Logical Backup Times",
+ "options": "Site Backup Time"
+ },
+ {
+ "collapsible_depends_on": "eval: doc.schedule_physical_backup_at_custom_time",
+ "fieldname": "physical_backup_section",
+ "fieldtype": "Section Break",
+ "label": "Physical Backup"
+ },
+ {
+ "default": "0",
+ "fieldname": "schedule_physical_backup_at_custom_time",
+ "fieldtype": "Check",
+ "label": "Schedule Physical Backup at Custom Time"
+ },
+ {
+ "fieldname": "physical_backup_times",
+ "fieldtype": "Table",
+ "label": "Physical Backup Times",
+ "options": "Site Backup Time"
+ },
+ {
+ "fieldname": "flags1_section",
+ "fieldtype": "Section Break",
+ "label": "Flags"
+ },
+ {
+ "default": "0",
+ "fieldname": "schedule_logical_backup_at_custom_time",
+ "fieldtype": "Check",
+ "label": "Schedule Logical Backup at Custom Time"
+ },
+ {
+ "default": "0",
+ "fieldname": "skip_scheduled_logical_backups",
+ "fieldtype": "Check",
+ "label": "Skip Scheduled Logical Backups"
+ },
+ {
+ "default": "1",
+ "fieldname": "skip_scheduled_physical_backups",
+ "fieldtype": "Check",
+ "label": "Skip Scheduled Physical Backups"
+ },
+ {
+ "fieldname": "hybrid_for",
+ "fieldtype": "Link",
+ "label": "Hybrid For",
+ "options": "App"
+ },
+ {
+ "fieldname": "site_usage_section",
+ "fieldtype": "Section Break",
+ "label": "Site Usage"
+ },
+ {
+ "depends_on": "eval: doc.site_usage_exceeded",
+ "fieldname": "site_usage_exceeded_on",
+ "fieldtype": "Datetime",
+ "label": "Site Usage Exceeded On"
+ },
+ {
+ "fieldname": "column_break_vngj",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "site_usage_exceeded",
+ "fieldtype": "Check",
+ "label": "Site Usage Exceeded"
+ },
+ {
+ "depends_on": "eval: doc.site_usage_exceeded",
+ "fieldname": "last_site_usage_warning_mail_sent_on",
+ "fieldtype": "Datetime",
+ "label": "Last Site Usage Warning Mail Sent On"
+ },
+ {
+ "fieldname": "site_usage_exceeded_last_checked_on",
+ "fieldtype": "Datetime",
+ "label": "Site Usage Exceeded Last Checked On"
+ },
+ {
+ "default": "0",
+ "fieldname": "disable_site_usage_exceed_check",
+ "fieldtype": "Check",
+ "label": "Disable Site Usage Exceed Check"
+ },
+ {
+ "fieldname": "communication_infos",
+ "fieldtype": "Table",
+ "label": "Communication Infos",
+ "options": "Communication Info"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_monitoring_disabled",
+ "fieldtype": "Check",
+ "label": "Is Monitoring Disabled",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "monitoring_section",
+ "fieldtype": "Section Break",
+ "label": "Monitoring"
+ },
+ {
+ "fieldname": "reason_for_disabling_monitoring",
+ "fieldtype": "Data",
+ "label": "Reason for Disabling Monitoring",
+ "read_only": 1
+ },
+ {
+ "fieldname": "creation_failed",
+ "fieldtype": "Datetime",
+ "label": "Creation Failed",
+ "read_only": 1
}
],
"links": [
@@ -637,16 +802,26 @@
},
{
"group": "Related Documents",
- "link_doctype": "Central Site Migration",
+ "link_doctype": "Marketplace App Subscription",
"link_fieldname": "site"
},
{
"group": "Related Documents",
- "link_doctype": "Marketplace App Subscription",
+ "link_doctype": "Site Access Token",
+ "link_fieldname": "site"
+ },
+ {
+ "group": "Related Documents",
+ "link_doctype": "Site Database User",
+ "link_fieldname": "site"
+ },
+ {
+ "group": "Related Documents",
+ "link_doctype": "Physical Backup Restoration",
"link_fieldname": "site"
}
],
- "modified": "2023-07-02 00:08:36.031427",
+ "modified": "2025-12-12 22:26:41.654346",
"modified_by": "Administrator",
"module": "Press",
"name": "Site",
@@ -675,8 +850,18 @@
"read": 1,
"role": "Press Member",
"write": 1
+ },
+ {
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Support Agent",
+ "share": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "creation",
"sort_order": "DESC",
"states": [],
diff --git a/press/press/doctype/site/site.py b/press/press/doctype/site/site.py
index 0826b2d3c0c..c1388f6ae6c 100644
--- a/press/press/doctype/site/site.py
+++ b/press/press/doctype/site/site.py
@@ -1,42 +1,410 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
+
import json
-import re
from collections import defaultdict
-from datetime import datetime
-from typing import Any, Dict, List
+from contextlib import suppress
+from datetime import datetime, timedelta
+from functools import cached_property, wraps
+from typing import Any, Literal
-import boto3
import dateutil.parser
import frappe
+import frappe.utils
+import pytz
import requests
+import rq
+from frappe import _, has_permission
from frappe.core.utils import find
-from frappe.frappeclient import FrappeClient
+from frappe.frappeclient import FrappeClient, FrappeException
from frappe.model.document import Document
from frappe.model.naming import append_number_if_name_exists
-from frappe.utils import cint, cstr, get_datetime
+from frappe.utils import (
+ add_to_date,
+ cint,
+ cstr,
+ flt,
+ get_datetime,
+ get_url,
+ now_datetime,
+ sbool,
+ time_diff_in_hours,
+)
+
+from press.access.actions import SiteActions
+from press.access.decorators import action_guard
+from press.access.support_access import has_support_access
+from press.exceptions import (
+ CannotChangePlan,
+ InsufficientSpaceOnServer,
+ SiteAlreadyArchived,
+ SiteUnderMaintenance,
+ VolumeResizeLimitError,
+)
+from press.guards import role_guard
+from press.marketplace.doctype.marketplace_app_plan.marketplace_app_plan import (
+ MarketplaceAppPlan,
+)
+from press.press.doctype.communication_info.communication_info import get_communication_info
+from press.press.doctype.server.server import Server
+from press.saas.doctype.product_trial.product_trial import create_free_app_subscription
+from press.utils.jobs import has_job_timeout_exceeded
+from press.utils.telemetry import capture
+from press.utils.webhook import create_webhook_event
try:
from frappe.utils import convert_utc_to_user_timezone
except ImportError:
- from frappe.utils import convert_utc_to_system_timezone as convert_utc_to_user_timezone
+ from frappe.utils import (
+ convert_utc_to_system_timezone as convert_utc_to_user_timezone,
+ )
+from typing import TYPE_CHECKING
+
+from frappe.permissions import is_system_user
from frappe.utils.password import get_decrypted_password
-from frappe.utils.user import is_system_user
-from press.agent import Agent
-from press.api.site import check_dns
+from press.agent import Agent, AgentRequestSkippedException
+from press.api.client import dashboard_whitelist
from press.overrides import get_permission_query_conditions_for_doctype
-from press.press.doctype.marketplace_app.marketplace_app import marketplace_app_hook
-from press.press.doctype.plan.plan import get_plan_config
+from press.press.doctype.marketplace_app.marketplace_app import (
+ get_plans_for_app,
+ marketplace_app_hook,
+)
+from press.press.doctype.resource_tag.tag_helpers import TagHelpers
+from press.press.doctype.server.server import is_dedicated_server
from press.press.doctype.site_activity.site_activity import log_site_activity
from press.press.doctype.site_analytics.site_analytics import create_site_analytics
-from press.utils import convert, get_client_blacklisted_keys, guess_type, log_error
+from press.press.doctype.site_plan.site_plan import UNLIMITED_PLANS, get_plan_config
+from press.press.report.mariadb_slow_queries.mariadb_slow_queries import (
+ get_doctype_name,
+)
+from press.utils import (
+ convert,
+ fmt_timedelta,
+ get_client_blacklisted_keys,
+ get_current_team,
+ get_last_doc,
+ guess_type,
+ human_readable,
+ is_list,
+ log_error,
+ unique,
+ validate_subdomain,
+)
+from press.utils.dns import _change_dns_record, check_dns_cname_a, create_dns_record
+
+if TYPE_CHECKING:
+ from datetime import datetime
+
+ from frappe.types import DF
+ from frappe.types.DF import Table
+
+ from press.press.doctype.account_request.account_request import AccountRequest
+ from press.press.doctype.agent_job.agent_job import AgentJob
+ from press.press.doctype.bench.bench import Bench
+ from press.press.doctype.bench_app.bench_app import BenchApp
+ from press.press.doctype.database_server.database_server import DatabaseServer
+ from press.press.doctype.deploy_candidate.deploy_candidate import DeployCandidate
+ from press.press.doctype.deploy_candidate_app.deploy_candidate_app import DeployCandidateApp
+ from press.press.doctype.release_group.release_group import ReleaseGroup
+ from press.press.doctype.server.server import BaseServer, Server
+ from press.press.doctype.site_backup.site_backup import SiteBackup
+ from press.press.doctype.site_domain.site_domain import SiteDomain
+ from press.press.doctype.tls_certificate.tls_certificate import TLSCertificate
+
+DOCTYPE_SERVER_TYPE_MAP = {
+ "Server": "Application",
+ "Database Server": "Database",
+ "Proxy Server": "Proxy",
+}
+
+ARCHIVE_AFTER_SUSPEND_DAYS = 21
+CREATION_FAILURE_RETENTION_DAYS = 14
+PRIVATE_BENCH_DOC = "https://docs.frappe.io/cloud/sites/move-site-to-private-bench"
+SERVER_SCRIPT_DISABLED_VERSION = (
+ 15 # version from which server scripts were disabled on public benches. No longer set in site
+)
+TRANSITORY_STATES = ["Updating", "Recovering", "Pending", "Installing"]
+
+
+class Site(Document, TagHelpers):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.communication_info.communication_info import CommunicationInfo
+ from press.press.doctype.resource_tag.resource_tag import ResourceTag
+ from press.press.doctype.site_app.site_app import SiteApp
+ from press.press.doctype.site_backup_time.site_backup_time import SiteBackupTime
+ from press.press.doctype.site_config.site_config import SiteConfig
+
+ _keys_removed_in_last_update: DF.Data | None
+ _site_usages: DF.Data | None
+ account_request: DF.Link | None
+ additional_system_user_created: DF.Check
+ admin_password: DF.Password | None
+ allow_physical_backup_by_user: DF.Check
+ apps: DF.Table[SiteApp]
+ archive_failed: DF.Check
+ auto_update_last_triggered_on: DF.Datetime | None
+ bench: DF.Link
+ cluster: DF.Link
+ communication_infos: DF.Table[CommunicationInfo]
+ config: DF.Code | None
+ configuration: DF.Table[SiteConfig]
+ creation_failed: DF.Datetime | None
+ current_cpu_usage: DF.Int
+ current_database_usage: DF.Int
+ current_disk_usage: DF.Int
+ database_access_connection_limit: DF.Int
+ database_name: DF.Data | None
+ disable_site_usage_exceed_check: DF.Check
+ domain: DF.Link | None
+ erpnext_consultant: DF.Link | None
+ free: DF.Check
+ group: DF.Link
+ hide_config: DF.Check
+ host_name: DF.Data | None
+ hybrid_for: DF.Link | None
+ hybrid_saas_pool: DF.Link | None
+ is_erpnext_setup: DF.Check
+ is_monitoring_disabled: DF.Check
+ is_standby: DF.Check
+ last_site_usage_warning_mail_sent_on: DF.Datetime | None
+ logical_backup_times: DF.Table[SiteBackupTime]
+ only_update_at_specified_time: DF.Check
+ physical_backup_times: DF.Table[SiteBackupTime]
+ plan: DF.Link | None
+ reason_for_disabling_monitoring: DF.Data | None
+ remote_config_file: DF.Link | None
+ remote_database_file: DF.Link | None
+ remote_private_file: DF.Link | None
+ remote_public_file: DF.Link | None
+ saas_communication_secret: DF.Data | None
+ schedule_logical_backup_at_custom_time: DF.Check
+ schedule_physical_backup_at_custom_time: DF.Check
+ server: DF.Link
+ setup_wizard_complete: DF.Check
+ setup_wizard_status_check_next_retry_on: DF.Datetime | None
+ setup_wizard_status_check_retries: DF.Int
+ signup_time: DF.Datetime | None
+ site_usage_exceeded: DF.Check
+ site_usage_exceeded_last_checked_on: DF.Datetime | None
+ site_usage_exceeded_on: DF.Datetime | None
+ skip_auto_updates: DF.Check
+ skip_failing_patches: DF.Check
+ skip_scheduled_logical_backups: DF.Check
+ skip_scheduled_physical_backups: DF.Check
+ staging: DF.Check
+ standby_for: DF.Link | None
+ standby_for_product: DF.Link | None
+ status: DF.Literal[
+ "Pending",
+ "Installing",
+ "Updating",
+ "Recovering",
+ "Active",
+ "Inactive",
+ "Broken",
+ "Archived",
+ "Suspended",
+ ]
+ status_before_update: DF.Data | None
+ subdomain: DF.Data
+ tags: DF.Table[ResourceTag]
+ team: DF.Link
+ timezone: DF.Data | None
+ trial_end_date: DF.Date | None
+ update_end_of_month: DF.Check
+ update_on_day_of_month: DF.Int
+ update_on_weekday: DF.Literal[
+ "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"
+ ]
+ update_trigger_frequency: DF.Literal["Daily", "Weekly", "Monthly"]
+ update_trigger_time: DF.Time | None
+ # end: auto-generated types
+
+ DOCTYPE = "Site"
+
+ dashboard_fields = (
+ "ip",
+ "status",
+ "group",
+ "team",
+ "plan",
+ "setup_wizard_complete",
+ "archive_failed",
+ "cluster",
+ "bench",
+ "group",
+ "database_access_connection_limit",
+ "trial_end_date",
+ "tags",
+ "server",
+ "host_name",
+ "skip_auto_updates",
+ "additional_system_user_created",
+ "label",
+ "signup_time",
+ "account_request",
+ "allow_physical_backup_by_user",
+ "site_usage_exceeded",
+ "is_monitoring_disabled",
+ "reason_for_disabling_monitoring",
+ "creation_failed",
+ )
+
+ @staticmethod
+ def get_list_query(query, filters=None, **list_args):
+ from press.press.doctype.site_update.site_update import (
+ benches_with_available_update,
+ )
+
+ Site = frappe.qb.DocType("Site")
+
+ status = filters.get("status")
+ if status == "Archived":
+ sites = query.where(Site.status == status).run(as_dict=1)
+ else:
+ benches_with_available_update = benches_with_available_update()
+ sites = query.where(Site.status != "Archived").select(Site.bench).run(as_dict=1)
+
+ for site in sites:
+ if site.bench in benches_with_available_update:
+ site.status = "Update Available"
+
+ return sites
+
+ @staticmethod
+ def on_not_found(name):
+ # If name is a custom domain then redirect to the site name
+ site_name = frappe.db.get_value("Site Domain", name, "site")
+ if site_name:
+ frappe.response.message = {
+ "redirect": f"/dashboard/sites/{site_name}",
+ }
+ raise
+
+ @property
+ def database_server_name(self) -> str:
+ return frappe.get_value("Server", self.server, "database_server")
+
+ @property
+ def app_server_agent(self) -> Agent:
+ return Agent(self.server)
+
+ @property
+ def database_server_agent(self) -> Agent:
+ return Agent(self.database_server_name, server_type="Database Server")
+
+ def get_doc(self, doc):
+ from press.api.client import get
+
+ group = frappe.db.get_value(
+ "Release Group",
+ self.group,
+ ["title", "public", "team", "central_bench", "version"],
+ as_dict=1,
+ )
+ doc.group_title = group.title
+ doc.version = group.version
+ doc.group_team = group.team
+ doc.group_public = group.public or group.central_bench
+ doc.latest_frappe_version = frappe.db.get_value(
+ "Frappe Version", {"status": "Stable", "public": True}, order_by="name desc"
+ )
+ doc.eol_versions = frappe.db.get_all(
+ "Frappe Version",
+ filters={"status": "End of Life"},
+ fields=["name"],
+ order_by="name desc",
+ pluck="name",
+ )
+ doc.owner_email = frappe.db.get_value("Team", self.team, "user")
+ doc.current_usage = self.current_usage
+ doc.current_plan = get("Site Plan", self.plan) if self.plan else None
+ doc.last_updated = self.last_updated
+ doc.creation_failure_retention_days = CREATION_FAILURE_RETENTION_DAYS
+ doc.has_scheduled_updates = bool(
+ frappe.db.exists("Site Update", {"site": self.name, "status": "Scheduled"})
+ )
+ doc.update_information = self.get_update_information()
+ doc.actions = self.get_actions()
+ server = frappe.get_value(
+ "Server", self.server, ["ip", "proxy_server", "team", "title", "provider"], as_dict=1
+ )
+ doc.cluster = frappe.db.get_value("Cluster", self.cluster, ["title", "image"], as_dict=1)
+ doc.outbound_ip = server.ip
+ doc.server_team = server.team
+ doc.server_title = server.title
+ doc.server_provider = server.provider
+ doc.plan_provider = server.provider
+ if not frappe.db.exists("Cloud Provider", server.provider):
+ # fallback for unlisted providers (Scaleway, Generic) to show available plans
+ doc.plan_provider = "AWS EC2"
+ doc.inbound_ip = self.inbound_ip
+ doc.is_dedicated_server = is_dedicated_server(self.server)
+ doc.suspension_reason = (
+ frappe.db.get_value("Site Activity", {"site": self.name, "action": "Suspend Site"}, "reason")
+ if self.status == "Suspended"
+ else None
+ )
+ doc.communication_infos = self.get_communication_infos()
+ if doc.owner == "Administrator":
+ doc.signup_by = frappe.db.get_value("Account Request", doc.account_request, "email")
+ if broken_domain_tls_certificate := frappe.db.get_value(
+ "Site Domain", {"site": self.name, "status": "Broken"}, "tls_certificate"
+ ):
+ doc.broken_domain_error, doc.tls_cert_retry_count = frappe.db.get_values(
+ "TLS Certificate", broken_domain_tls_certificate, ("error", "retry_count")
+ )[0]
+
+ return doc
+
+ def site_action(allowed_status: list[str], disallowed_message: str | dict[str, str] | None = None):
+ def outer_wrapper(func):
+ @wraps(func)
+ def wrapper(inst, *args, **kwargs):
+ user_type = frappe.session.data.user_type or frappe.get_cached_value(
+ "User", frappe.session.user, "user_type"
+ )
+ if user_type == "System User":
+ return func(inst, *args, **kwargs)
+ if has_support_access(inst.doctype, inst.name):
+ return func(inst, *args, **kwargs)
+
+ status, creation_failed = frappe.get_value(
+ inst.doctype, inst.name, ["status", "creation_failed"], for_update=True
+ )
+ action_name_refined = func.__name__.replace("_", " ")
+
+ if status not in allowed_status:
+ if disallowed_message and isinstance(disallowed_message, str):
+ frappe.throw(disallowed_message)
+ elif disallowed_message and status in disallowed_message:
+ custom_message = disallowed_message[status]
+ frappe.throw(custom_message)
+ else:
+ frappe.throw(
+ f"Site is in {frappe.bold(status.lower())} state. Your site have to be active to {frappe.bold(action_name_refined)}."
+ )
+
+ check_allowed_actions(creation_failed, func.__name__, action_name_refined)
+
+ return func(inst, *args, **kwargs)
+
+ return wrapper
+
+ return outer_wrapper
-class Site(Document):
def _get_site_name(self, subdomain: str):
"""Get full site domain name given subdomain."""
if not self.domain:
@@ -46,42 +414,70 @@ def _get_site_name(self, subdomain: str):
def autoname(self):
self.name = self._get_site_name(self.subdomain)
+ @role_guard.action()
def validate(self):
if self.has_value_changed("subdomain"):
self.validate_site_name()
+ self.validate_bench()
self.set_site_admin_password()
self.validate_installed_apps()
self.validate_host_name()
self.validate_site_config()
self.validate_auto_update_fields()
+ self.validate_site_plan()
+ self.validate_backup_times()
def before_insert(self):
+ if not self.bench and self.group:
+ if self.server and self.team != "Administrator": # Check to avoid standby sites
+ self.set_bench_for_server()
+ else:
+ self.set_latest_bench()
# initialize site.config based on plan
self._update_configuration(self.get_plan_config(), save=False)
- if not self.notify_email and self.team != "Administrator":
- self.notify_email = frappe.db.get_value("Team", self.team, "notify_email")
- def validate_site_name(self):
- site_regex = r"^[a-z0-9][a-z0-9-]*[a-z0-9]$"
- if not re.match(site_regex, self.subdomain):
+ if not self.setup_wizard_status_check_next_retry_on:
+ self.setup_wizard_status_check_next_retry_on = now_datetime()
+
+ if (
+ self.server
+ and frappe.get_value("Server", self.server, "enable_logical_replication_during_site_update")
+ and frappe.db.count("Site", {"server": self.server, "status": ("!=", "Archived")}) >= 1
+ ):
frappe.throw(
- "Subdomain contains invalid characters. Use lowercase"
- " characters, numbers and hyphens"
+ "Logical replication is enabled for this server. You can only deploy a single site on the server."
)
- if len(self.subdomain) > 32:
- frappe.throw("Subdomain too long. Use 32 or less characters")
- if len(self.subdomain) < 5:
- frappe.throw("Subdomain too short. Use 5 or more characters")
+ def validate_site_name(self):
+ validate_subdomain(self.subdomain)
def set_site_admin_password(self):
# set site.admin_password if doesn't exist
if not self.admin_password:
self.admin_password = frappe.generate_hash(length=16)
+ def validate_bench(self):
+ if (
+ self.status not in ("Broken", "Archived")
+ and frappe.db.get_value("Bench", self.bench, "status", for_update=True) == "Archived"
+ ):
+ frappe.throw(
+ f"Bench {self.bench} is not active. Please try again if you've deployed a new bench."
+ )
+
+ bench_group = frappe.db.get_value("Bench", self.bench, "group")
+ if bench_group != self.group:
+ frappe.throw(
+ f"Bench release group {bench_group} is not the same as site release group {self.group}."
+ )
+
+ bench_server = frappe.db.get_value("Bench", self.bench, "server")
+ if bench_server != self.server:
+ frappe.throw(f"Bench server {bench_server} is not the same as site server {self.server}.")
+
def validate_installed_apps(self):
# validate apps to be installed on site
- bench_apps = frappe.get_doc("Bench", self.bench).apps
+ bench_apps: Table[BenchApp] = frappe.get_doc("Bench", self.bench).apps
for app in self.apps:
if not find(bench_apps, lambda x: x.app == app.app):
frappe.throw(f"app {app.app} is not available on Bench {self.bench}.")
@@ -95,8 +491,13 @@ def validate_installed_apps(self):
# Install apps in the same order as bench
if self.is_new():
- bench_app_names = [app.app for app in bench_apps]
- self.apps.sort(key=lambda x: bench_app_names.index(x.app))
+ self.sort_apps(bench_apps)
+
+ def sort_apps(self, bench_apps: Table[BenchApp]):
+ bench_app_names = [app.app for app in bench_apps]
+ self.apps.sort(key=lambda x: bench_app_names.index(x.app))
+ for idx, app in enumerate(self.apps):
+ app.idx = idx + 1
def validate_host_name(self):
# set or update site.host_name
@@ -110,21 +511,133 @@ def validate_site_config(self):
# update site._keys_removed_in_last_update value
old_keys = json.loads(self.config)
new_keys = [x.key for x in self.configuration]
- self._keys_removed_in_last_update = json.dumps(
- [x for x in old_keys if x not in new_keys]
- )
+ self._keys_removed_in_last_update = json.dumps([x for x in old_keys if x not in new_keys])
# generate site.config from site.configuration
self.update_config_preview()
# create an agent request if config has been updated
# if not self.is_new() and self.has_value_changed("config"):
- # Agent(self.server).update_site_config(self)
+ # Agent(self.server).update_site_config(self)
def validate_auto_update_fields(self):
# Validate day of month
if not (1 <= self.update_on_day_of_month <= 31):
frappe.throw("Day of the month must be between 1 and 31 (included)!")
+ # If site is on public bench, don't allow to disable auto updates
+ if self.skip_auto_updates and self.is_group_public:
+ frappe.throw(
+ f'Auto updates can\'t be disabled for sites on public benches! Please move to a private bench .'
+ )
+
+ def validate_site_plan(self): # noqa: C901
+ if hasattr(self, "subscription_plan") and self.subscription_plan:
+ """
+ If `release_groups` in site plan is empty, then site can be deployed in any release group.
+ Otherwise, site can only be deployed in the clusters mentioned in the release groups.
+ """
+ release_groups = frappe.db.get_all(
+ "Site Plan Release Group",
+ pluck="release_group",
+ filters={
+ "parenttype": "Site Plan",
+ "parentfield": "release_groups",
+ "parent": self.subscription_plan,
+ },
+ )
+ clusters = frappe.db.get_all("Bench", pluck="cluster", filters={"group": ("in", release_groups)})
+ is_valid = len(clusters) == 0 or self.cluster in clusters
+ if not is_valid:
+ frappe.throw(f"In {self.subscription_plan}, you can't deploy site in {self.cluster} cluster")
+
+ """
+ If `allowed_apps` in site plan is empty, then site can be deployed with any apps.
+ Otherwise, site can only be deployed with the apps mentioned in the site plan.
+ """
+ allowed_apps = frappe.db.get_all(
+ "Site Plan Allowed App",
+ pluck="app",
+ filters={
+ "parenttype": "Site Plan",
+ "parentfield": "allowed_apps",
+ "parent": self.subscription_plan,
+ },
+ )
+ if allowed_apps:
+ selected_apps = [app.app for app in self.apps]
+
+ for app in selected_apps:
+ if app not in allowed_apps:
+ frappe.throw(f"In {self.subscription_plan}, you can't deploy site with {app} app")
+
+ plan = frappe.db.get_value(
+ "Site Plan",
+ self.subscription_plan,
+ ["dedicated_server_plan", "price_inr", "price_usd", "is_trial_plan"],
+ as_dict=True,
+ )
+ is_site_on_public_server = frappe.db.get_value("Server", self.server, "public")
+
+ # Don't allow free plan for non-system users
+ if not is_system_user():
+ is_plan_free = (plan.price_inr == 0 or plan.price_usd == 0) and not (
+ plan.dedicated_server_plan or plan.is_trial_plan
+ )
+ if is_plan_free:
+ frappe.throw("You can't select a free plan!")
+
+ # If site is on public server, don't allow unlimited plans
+ if is_site_on_public_server and plan.dedicated_server_plan:
+ self.subscription_plan = frappe.db.get_value(
+ "Site Plan",
+ {
+ "private_benches": 1,
+ "dedicated_server_plan": 0,
+ "document_type": "Site",
+ "price_inr": ["!=", 0],
+ },
+ order_by="price_inr asc",
+ )
+
+ # If site is on dedicated server, set unlimited plan
+ elif not plan.dedicated_server_plan and not is_site_on_public_server:
+ self.subscription_plan = frappe.db.get_value(
+ "Site Plan",
+ {
+ "dedicated_server_plan": 1,
+ "document_type": "Site",
+ "support_included": 0,
+ },
+ )
+
+ def validate_backup_times(self):
+ if self.schedule_logical_backup_at_custom_time and len(self.logical_backup_times) == 0:
+ frappe.throw(
+ "You are trying to enable logical backup schedule at custom time, but you have not set any backup times for it."
+ )
+
+ if self.schedule_physical_backup_at_custom_time and len(self.physical_backup_times) == 0:
+ frappe.throw(
+ "You are trying to enable physical backup schedule at custom time, but you have not set any backup times for it."
+ )
+
+ selected_backup_hours = [
+ (frappe.utils.get_time(x.backup_time).hour) for x in self.logical_backup_times
+ ] + [(frappe.utils.get_time(x.backup_time).hour) for x in self.physical_backup_times]
+
+ backup_hours = set()
+ for h in selected_backup_hours:
+ if h not in backup_hours:
+ backup_hours.add(h)
+ else:
+ frappe.throw(f"Multiple backups have been schedule at following hour > {h}:00:00")
+
+ def capture_signup_event(self, event: str):
+ team = frappe.get_doc("Team", self.team)
+ if frappe.db.count("Site", {"team": team.name}) <= 1 and team.account_request:
+ account_request: AccountRequest = frappe.get_doc("Account Request", team.account_request)
+ if not (account_request.is_saas_signup() or account_request.invited_by_parent_team):
+ capture(event, "fc_signup", team.user)
def on_update(self):
if self.status == "Active" and self.has_value_changed("host_name"):
@@ -134,11 +647,36 @@ def on_update(self):
self.update_subscription()
- if self.status not in ["Pending", "Archived", "Suspended"] and self.has_value_changed(
- "subdomain"
- ):
+ if self.has_value_changed("team"):
+ frappe.db.set_value("Site Domain", {"site": self.name}, "team", self.team)
+
+ if self.status not in [
+ "Pending",
+ "Archived",
+ "Suspended",
+ ] and (self.has_value_changed("subdomain") or self.has_value_changed("domain")):
self.rename(self._get_site_name(self.subdomain))
+ # Telemetry: Send event if first site status changed to Active
+ if self.status == "Active" and self.has_value_changed("status"):
+ self.capture_signup_event("first_site_status_changed_to_active")
+
+ if self.has_value_changed("status"):
+ create_site_status_update_webhook_event(self.name)
+
+ def generate_saas_communication_secret(self, create_agent_job=False, save=True):
+ if not self.standby_for and not self.standby_for_product:
+ return
+ if not self.saas_communication_secret:
+ self.saas_communication_secret = frappe.generate_hash(length=32)
+ config = {
+ "fc_communication_secret": self.saas_communication_secret,
+ }
+ if create_agent_job:
+ self.update_site_config(config)
+ else:
+ self._update_configuration(config=config, save=save)
+
def rename_upstream(self, new_name: str):
proxy_server = frappe.db.get_value("Server", self.server, "proxy_server")
agent = Agent(proxy_server, server_type="Proxy Server")
@@ -147,10 +685,25 @@ def rename_upstream(self, new_name: str):
)
agent.rename_upstream_site(self.server, self, new_name, site_domains)
+ def set_apps(self, apps: list):
+ self.apps = []
+ bench_apps = frappe.get_doc("Bench", self.bench).apps
+ for app in apps:
+ if not find(bench_apps, lambda x: x.app == app):
+ continue
+ self.append("apps", {"app": app})
+ self.save()
+
+ @frappe.whitelist()
+ def sync_apps(self):
+ agent = Agent(self.server)
+ apps_list = agent.get_site_apps(site=self)
+ self.set_apps(apps_list)
+
@frappe.whitelist()
def retry_rename(self):
"""Retry rename with current subdomain"""
- if not self.name == self._get_site_name(self.subdomain):
+ if self.name != self._get_site_name(self.subdomain):
self.rename(self._get_site_name(self.subdomain))
else:
frappe.throw("Please choose a different subdomain")
@@ -163,8 +716,21 @@ def retry_archive(self):
frappe.throw(f"Another site already exists in {self.bench} with name: {site_name}")
self.archive(site_name=site_name, reason="Retry Archive")
+ def check_duplicate_site(self):
+ if frappe.db.exists(
+ "Site",
+ {
+ "subdomain": self.subdomain,
+ "domain": self.domain,
+ "status": ("!=", "Archived"),
+ "name": ("!=", self.name),
+ },
+ ):
+ frappe.throw("Site with same subdomain already exists")
+
def rename(self, new_name: str):
- self.create_dns_record()
+ self.check_duplicate_site()
+ create_dns_record(doc=self, record_name=self._get_site_name(self.subdomain))
agent = Agent(self.server)
agent.rename_site(self, new_name)
self.rename_upstream(new_name)
@@ -173,14 +739,13 @@ def rename(self, new_name: str):
try:
# remove old dns record from route53 after rename
- domain = frappe.get_doc("Root Domain", self.domain)
proxy_server = frappe.get_value("Server", self.server, "proxy_server")
- self.remove_dns_record(domain, proxy_server, self.name)
+ self.remove_dns_record(proxy_server)
except Exception:
log_error("Removing Old Site from Route53 Failed")
def update_config_preview(self):
- """Regenrates site.config on each site.validate from the site.configuration child table data"""
+ """Regenerates site.config on each site.validate from the site.configuration child table data"""
new_config = {}
# Update from site.configuration
@@ -188,20 +753,21 @@ def update_config_preview(self):
# update internal flag from master
row.internal = frappe.db.get_value("Site Config Key", row.key, "internal")
key_type = row.type or row.get_type()
- if key_type == "Password":
- # we don't support password type yet!
- key_type = "String"
row.type = key_type
if key_type == "Number":
- key_value = (
- int(row.value) if isinstance(row.value, (float, int)) else json.loads(row.value)
- )
+ key_value = int(row.value) if isinstance(row.value, float | int) else json.loads(row.value)
elif key_type == "Boolean":
key_value = (
- row.value if isinstance(row.value, bool) else bool(json.loads(cstr(row.value)))
+ row.value if isinstance(row.value, bool) else bool(sbool(json.loads(cstr(row.value))))
)
elif key_type == "JSON":
+ """
+ Handle the old value for the `allow_cors` key
+ Previously it was of string type, now it is a JSON object.
+ """
+ if row.key == "allow_cors" and not is_list(row.value):
+ row.value = json.dumps([row.value])
key_value = json.loads(cstr(row.value))
else:
key_value = row.value
@@ -210,27 +776,78 @@ def update_config_preview(self):
self.config = json.dumps(new_config, indent=4)
- def install_app(self, app):
- if not find(self.apps, lambda x: x.app == app):
- log_site_activity(self.name, "Install App")
- self.append("apps", {"app": app})
- agent = Agent(self.server)
- agent.install_app_site(self, app)
- self.status = "Pending"
- self.save()
+ def install_marketplace_conf(self, app: str, plan: str | None = None):
+ if plan:
+ MarketplaceAppPlan.create_marketplace_app_subscription(self.name, app, plan, self.team)
+ else:
+ create_free_app_subscription(app, self.name)
+ marketplace_app_hook(app=app, site=self, op="install")
+
+ def uninstall_marketplace_conf(self, app: str):
+ marketplace_app_hook(app=app, site=self, op="uninstall")
+
+ # disable marketplace plan if it exists
+ marketplace_app_name = frappe.db.get_value("Marketplace App", {"app": app})
+ app_subscription = frappe.db.exists(
+ "Subscription",
+ {
+ "team": self.team,
+ "site": self.name,
+ "document_type": "Marketplace App",
+ "document_name": marketplace_app_name,
+ },
+ )
+ if marketplace_app_name and app_subscription:
+ frappe.db.set_value("Subscription", app_subscription, "enabled", 0)
+
+ def check_marketplace_app_installable(self, plan: str | None = None):
+ if not plan:
+ return
+ if (
+ not frappe.db.get_value("Marketplace App Plan", plan, "price_usd") <= 0
+ and not frappe.local.team().can_install_paid_apps()
+ ):
+ frappe.throw(
+ "You cannot install a Paid app on Free Credits. Please buy credits before trying to install again."
+ )
+
+ # TODO: check if app is available and can be installed
+
+ @dashboard_whitelist()
+ @site_action(["Active"])
+ def install_app(self, app: str, plan: str | None = None) -> str | None:
+ self.check_marketplace_app_installable(plan)
+
+ if find(self.apps, lambda x: x.app == app):
+ return None
+
+ agent = Agent(self.server)
+ job = agent.install_app_site(self, app)
+ log_site_activity(self.name, "Install App", app, job.name)
+ self.status = "Pending"
+ self.save()
+ self.install_marketplace_conf(app, plan)
- marketplace_app_hook(app=app, site=self.name, op="install")
+ return job.name
- def uninstall_app(self, app):
- app_doc = find(self.apps, lambda x: x.app == app)
- log_site_activity(self.name, "Uninstall App")
- self.remove(app_doc)
+ @dashboard_whitelist()
+ @site_action(["Active"])
+ def uninstall_app(self, app: str, feedback: str = "") -> str:
+ from press.marketplace.doctype.marketplace_app_feedback.marketplace_app_feedback import (
+ collect_app_uninstall_feedback,
+ )
+
+ collect_app_uninstall_feedback(app, feedback, self.name)
agent = Agent(self.server)
- agent.uninstall_app_site(self, app_doc.app)
+ job = agent.uninstall_app_site(self, app)
+
+ log_site_activity(self.name, "Uninstall App", app, job.name)
+
+ self.uninstall_marketplace_conf(app)
self.status = "Pending"
self.save()
- marketplace_app_hook(app=app, site=self.name, op="uninstall")
+ return job.name
def _create_default_site_domain(self):
"""Create Site Domain with Site name."""
@@ -241,102 +858,192 @@ def _create_default_site_domain(self):
"domain": self.name,
"status": "Active",
"retry_count": 0,
- "dns_type": "A",
+ "dns_type": "CNAME",
}
).insert(ignore_if_duplicate=True)
def after_insert(self):
+ self.capture_signup_event("created_first_site")
+
+ if hasattr(self, "subscription_plan") and self.subscription_plan:
+ # create subscription
+ self.create_subscription(self.subscription_plan)
+ self.reload()
+
+ if hasattr(self, "app_plans") and self.app_plans:
+ for app, plan in self.app_plans.items():
+ MarketplaceAppPlan.create_marketplace_app_subscription(
+ self.name, app, plan["name"], self.team, True
+ )
+
# log activity
log_site_activity(self.name, "Create")
self._create_default_site_domain()
- self.create_dns_record()
+ create_dns_record(self, record_name=self._get_site_name(self.subdomain))
self.create_agent_request()
- @frappe.whitelist()
- def create_dns_record(self):
- """Check if site needs dns records and creates one."""
- domain = frappe.get_doc("Root Domain", self.domain)
- is_standalone = frappe.get_value("Server", self.server, "is_standalone")
- if self.cluster == domain.default_cluster and not is_standalone:
- return
- if is_standalone:
- self._change_dns_record("UPSERT", domain, self.server)
- else:
- proxy_server = frappe.get_value("Server", self.server, "proxy_server")
- self._change_dns_record("UPSERT", domain, proxy_server)
+ if hasattr(self, "share_details_consent") and self.share_details_consent:
+ # create partner lead
+ frappe.get_doc(
+ doctype="Site Partner Lead",
+ team=self.team,
+ site=self.name,
+ created_on=frappe.utils.now_datetime(),
+ ).insert(ignore_permissions=True)
- def remove_dns_record(self, domain: Document, proxy_server: str, site: str):
+ create_site_status_update_webhook_event(self.name)
+
+ def remove_dns_record(self, proxy_server: str):
"""Remove dns record of site pointing to proxy."""
- self._change_dns_record("DELETE", domain, proxy_server, site)
+ if self.status != "Archived":
+ self._create_default_site_domain()
+ domains = frappe.db.get_all(
+ "Site Domain", filters={"site": self.name}, fields=["domain"], pluck="domain"
+ )
+ for domain in domains:
+ root_domain = domain.split(".", 1)[1]
+ if bool(frappe.db.exists("Root Domain", root_domain)):
+ _change_dns_record(
+ method="DELETE",
+ domain=frappe.get_doc("Root Domain", root_domain),
+ proxy_server=proxy_server,
+ record_name=domain,
+ )
+
+ def is_this_version_or_above(self, version: int) -> bool:
+ group: ReleaseGroup = frappe.get_cached_doc("Release Group", self.group)
+ return group.is_this_version_or_above(version)
+
+ @property
+ def restore_space_required_on_app(self):
+ db_size, public_size, private_size = (
+ frappe.get_doc("Remote File", file_name).size if file_name else 0
+ for file_name in (
+ self.remote_database_file,
+ self.remote_public_file,
+ self.remote_private_file,
+ )
+ )
+ return self.get_restore_space_required_on_app(
+ db_file_size=db_size, public_file_size=public_size, private_file_size=private_size
+ )
- def _change_dns_record(
- self, method: str, domain: Document, proxy_server: str, site: str = None
+ @property
+ def restore_space_required_on_db(self):
+ if not self.remote_database_file:
+ return 0
+ db_size = frappe.get_doc("Remote File", self.remote_database_file).size
+ return self.get_restore_space_required_on_db(db_file_size=db_size)
+
+ def get_restore_space_required_on_app(
+ self, db_file_size: int = 0, public_file_size: int = 0, private_file_size: int = 0
+ ) -> int:
+ space_for_download = db_file_size + public_file_size + private_file_size
+ space_for_extracted_files = (
+ (0 if self.is_this_version_or_above(14) else (8 * db_file_size))
+ + public_file_size
+ + private_file_size
+ ) # 8 times db size for extraction; estimated
+ return space_for_download + space_for_extracted_files
+
+ def get_restore_space_required_on_db(self, db_file_size: int = 0) -> int:
+ """Returns the space required on the database server for restoration."""
+ return 8 * db_file_size * 2 # double for binlogs
+
+ def check_and_increase_disk(
+ self, server: "BaseServer", space_required: int, no_increase=False, purpose="create site"
):
- """
- Change dns record of site
+ mountpoint = server.guess_data_disk_mountpoint()
+ free_space = server.free_space(mountpoint)
+ if (diff := free_space - space_required) <= 0:
+ msg = f"Insufficient estimated space on {DOCTYPE_SERVER_TYPE_MAP[server.doctype]} server to {purpose}. Required: {human_readable(space_required)}, Available: {human_readable(free_space)} (Need {human_readable(abs(diff))} more)."
+ if server.public and not no_increase:
+ self.try_increasing_disk(server, mountpoint, diff, msg)
+ else:
+ frappe.throw(msg, InsufficientSpaceOnServer)
- method: CREATE | DELETE | UPSERT
- """
+ def try_increasing_disk(self, server: "BaseServer", mountpoint: str, diff: int, err_msg: str):
try:
- site_name = self._get_site_name(self.subdomain) if not site else site
- client = boto3.client(
- "route53",
- aws_access_key_id=domain.aws_access_key_id,
- aws_secret_access_key=domain.get_password("aws_secret_access_key"),
+ server.calculated_increase_disk_size(
+ mountpoint=mountpoint, additional=cint(diff / 1024 / 1024 // 1024)
)
- zones = client.list_hosted_zones_by_name()["HostedZones"]
- hosted_zone = find(reversed(zones), lambda x: domain.name.endswith(x["Name"][:-1]))[
- "Id"
- ]
- client.change_resource_record_sets(
- ChangeBatch={
- "Changes": [
- {
- "Action": method,
- "ResourceRecordSet": {
- "Name": site_name,
- "Type": "CNAME",
- "TTL": 600,
- "ResourceRecords": [{"Value": proxy_server}],
- },
- }
- ]
- },
- HostedZoneId=hosted_zone,
+ except VolumeResizeLimitError:
+ frappe.throw(
+ f"{err_msg} Please wait {fmt_timedelta(server.time_to_wait_before_updating_volume)} before trying again.",
+ InsufficientSpaceOnServer,
)
- except Exception:
- log_error(
- "Route 53 Record Creation Error",
- domain=domain.name,
- site=site_name,
- proxy_server=proxy_server,
+
+ @property
+ def backup_space_required_on_app(self) -> int:
+ """Returns the space required on the app server for backup."""
+ db_size, public_size, private_size = (
+ frappe.get_doc("Remote File", file_name).size if file_name else 0
+ for file_name in (
+ self.remote_database_file,
+ self.remote_public_file,
+ self.remote_private_file,
)
+ )
+ return db_size + public_size + private_size
+
+ def check_space_on_server_for_backup(self):
+ provider = frappe.get_value("Cluster", self.cluster, "cloud_provider")
+ app: "Server" = frappe.get_doc("Server", self.server)
+ no_increase = True
+ if app.auto_increase_storage or (app.public and provider in ["AWS EC2", "OCI"]):
+ no_increase = False
+ self.check_and_increase_disk(
+ app, self.backup_space_required_on_app, no_increase=no_increase, purpose="backup site"
+ )
+
+ def check_space_on_server_for_restore(self):
+ app: Server = frappe.get_doc("Server", self.server)
+ self.check_and_increase_disk(app, self.restore_space_required_on_app)
+
+ if app.database_server:
+ db: DatabaseServer = frappe.get_doc("Database Server", app.database_server)
+ space_required = self.restore_space_required_on_db
+ if db.ip == app.ip:
+ space_required += self.restore_space_required_on_app
+ self.check_and_increase_disk(db, space_required)
def create_agent_request(self):
agent = Agent(self.server)
if self.remote_database_file:
agent.new_site_from_backup(self, skip_failing_patches=self.skip_failing_patches)
else:
- agent.new_site(self)
+ """
+ If the site is creating for saas / product trial purpose,
+ Create a system user with password at the time of site creation.
+
+ If `ignore_additional_system_user_creation` is set, don't create additional system user
+ """
+ if (self.standby_for) and not self.is_standby:
+ user_details = self.get_user_details()
+ if self.flags.get("ignore_additional_system_user_creation", False):
+ user_details = None
+ self.flags.new_site_agent_job_name = agent.new_site(self, create_user=user_details).name
+ else:
+ self.flags.new_site_agent_job_name = agent.new_site(self).name
- server = frappe.get_all(
- "Server", filters={"name": self.server}, fields=["proxy_server"], limit=1
- )[0]
+ server = frappe.get_all("Server", filters={"name": self.server}, fields=["proxy_server"], limit=1)[0]
agent = Agent(server.proxy_server, server_type="Proxy Server")
- agent.new_upstream_site(self.server, self.name)
+ agent.new_upstream_file(server=self.server, site=self.name)
- @frappe.whitelist()
+ @dashboard_whitelist()
+ @site_action(["Active", "Broken"])
def reinstall(self):
- log_site_activity(self.name, "Reinstall")
agent = Agent(self.server)
- agent.reinstall_site(self)
+ job = agent.reinstall_site(self)
+ log_site_activity(self.name, "Reinstall", job=job.name)
self.status = "Pending"
self.save()
+ return job.name
- @frappe.whitelist()
+ @dashboard_whitelist()
+ @site_action(["Active", "Broken"])
def migrate(self, skip_failing_patches=False):
- log_site_activity(self.name, "Migrate")
agent = Agent(self.server)
activate = True
if self.status in ("Inactive", "Suspended"):
@@ -347,7 +1054,8 @@ def migrate(self, skip_failing_patches=False):
"Suspended",
):
activate = False
- agent.migrate_site(self, skip_failing_patches=skip_failing_patches, activate=activate)
+ job = agent.migrate_site(self, skip_failing_patches=skip_failing_patches, activate=activate)
+ log_site_activity(self.name, "Migrate", job=job.name)
self.status = "Pending"
self.save()
@@ -370,7 +1078,10 @@ def last_migrate_failed(self):
if site_update.status == "Recovered":
migrate_site_step = frappe.get_all(
"Agent Job Step",
- filters={"step_name": "Migrate Site", "agent_job": site_update.update_job},
+ filters={
+ "step_name": "Migrate Site",
+ "agent_job": site_update.update_job,
+ },
fields=["status"],
limit=1,
)
@@ -388,80 +1099,259 @@ def restore_tables(self):
self.status = "Pending"
self.save()
- @frappe.whitelist()
+ @dashboard_whitelist()
def clear_site_cache(self):
- log_site_activity(self.name, "Clear Cache")
agent = Agent(self.server)
- agent.clear_site_cache(self)
+ job = agent.clear_site_cache(self)
- @frappe.whitelist()
+ log_site_activity(self.name, "Clear Cache", job=job.name)
+
+ @dashboard_whitelist()
+ @site_action(["Active", "Broken"])
def restore_site(self, skip_failing_patches=False):
- if not frappe.get_doc("Remote File", self.remote_database_file).exists():
- raise Exception(
- "Remote File {0} is unavailable on S3".format(self.remote_database_file)
- )
+ if (
+ self.remote_database_file
+ and not frappe.get_doc("Remote File", self.remote_database_file).exists()
+ ):
+ raise Exception(f"Remote File {self.remote_database_file} is unavailable on S3")
- log_site_activity(self.name, "Restore")
agent = Agent(self.server)
- agent.restore_site(self, skip_failing_patches=skip_failing_patches)
+ job = agent.restore_site(self, skip_failing_patches=skip_failing_patches)
+ log_site_activity(self.name, "Restore", job=job.name)
self.status = "Pending"
self.save()
+ return job.name
+
+ @dashboard_whitelist()
+ @site_action(["Active", "Broken"])
+ def restore_site_from_physical_backup(self, backup: str):
+ if frappe.db.get_single_value("Press Settings", "disable_physical_backup"):
+ frappe.throw("Currently, Physical Backup & Restoration is disabled system wide. Try again later.")
+
+ frappe.db.set_value("Site", self.name, "status", "Pending")
+ # fetch database_name if not available
+ if not self.database_name:
+ self.sync_info()
+ self.reload()
+
+ doc = frappe.get_doc(
+ {
+ "doctype": "Physical Backup Restoration",
+ "site": self.name,
+ "status": "Pending",
+ "site_backup": backup,
+ "source_database": self.database_name,
+ "destination_database": self.database_name,
+ "destination_server": frappe.get_value("Server", self.server, "database_server"),
+ "deactivate_site_during_restoration": True,
+ "restore_specific_tables": False,
+ "tables_to_restore": "[]",
+ }
+ )
+ doc.insert(ignore_permissions=True)
+ doc.execute()
+
+ @dashboard_whitelist()
+ @site_action(["Active", "Broken"])
+ def restore_site_from_files(self, files, skip_failing_patches=False):
+ self.remote_database_file = files["database"]
+ self.remote_public_file = files["public"]
+ self.remote_private_file = files["private"]
+ self.save()
+ self.reload()
+ return self.restore_site(skip_failing_patches=skip_failing_patches)
+
+ @frappe.whitelist()
+ def physical_backup(self, for_site_update: bool = False):
+ return self.backup(physical=True, for_site_update=for_site_update)
+
+ @dashboard_whitelist()
+ def schedule_backup(self, with_files=False, physical=False):
+ """
+ This function meant to be called from dashboard only
+ Allow only few params which can be passed to backup(....) function
+ """
+ if physical and not self.allow_physical_backup_by_user:
+ frappe.throw(_("Physical backup is not enabled for this site. Please reach out to support."))
+
+ if physical and frappe.db.get_single_value("Press Settings", "disable_physical_backup"):
+ frappe.throw(_("Physical backup is disabled system wide. Please try again later."))
+ # Site deactivation required only for physical backup
+ return self.backup(with_files=with_files, physical=physical, deactivate_site_during_backup=physical)
@frappe.whitelist()
- def backup(self, with_files=False, offsite=False):
+ def backup(
+ self,
+ with_files=False,
+ offsite=False,
+ force=False,
+ physical=False,
+ for_site_update: bool = False,
+ deactivate_site_during_backup: bool = False,
+ ):
+ if (
+ self.status == "Suspended"
+ and frappe.db.count(
+ "Site Backup",
+ filters=dict(
+ site=self.name,
+ status="Success",
+ creation=(">=", get_suspended_time(self.name)),
+ ),
+ )
+ > 3
+ ):
+ frappe.throw("You cannot take more than 3 backups after site suspension")
+
return frappe.get_doc(
{
"doctype": "Site Backup",
"site": self.name,
"with_files": with_files,
"offsite": offsite,
+ "force": force,
+ "physical": physical,
+ "for_site_update": for_site_update,
+ "deactivate_site_during_backup": deactivate_site_during_backup,
}
).insert()
- @frappe.whitelist()
- def schedule_update(self, skip_failing_patches=False, skip_backups=False):
- log_site_activity(self.name, "Update")
+ @dashboard_whitelist()
+ def get_backup_download_link(self, backup, file):
+ from botocore.exceptions import ClientError
+
+ if file not in ["database", "public", "private", "config"]:
+ frappe.throw("Invalid file type")
+
+ try:
+ remote_file = frappe.db.get_value(
+ "Site Backup",
+ {"name": backup, "site": self.name},
+ f"remote_{file}_file",
+ )
+ return frappe.get_doc("Remote File", remote_file).download_link
+ except ClientError:
+ log_error(title="Offsite Backup Response Exception")
+
+ def site_migration_scheduled(self):
+ return frappe.db.get_value(
+ "Site Migration", {"site": self.name, "status": "Scheduled"}, "scheduled_time"
+ )
+
+ def site_update_scheduled(self):
+ return frappe.db.get_value(
+ "Site Update", {"site": self.name, "status": "Scheduled"}, "scheduled_time"
+ )
+
+ def check_move_scheduled(self):
+ if time := self.site_migration_scheduled():
+ frappe.throw(f"Site Migration is scheduled for {self.name} at {time}")
+ if time := self.site_update_scheduled():
+ frappe.throw(f"Site Update is scheduled for {self.name} at {time}")
+
+ def ready_for_move(self):
+ if self.status in TRANSITORY_STATES:
+ frappe.throw(f"Site is in {self.status} state. Cannot Update", SiteUnderMaintenance)
+ elif self.status == "Archived":
+ frappe.throw("Site is archived. Cannot Update", SiteAlreadyArchived)
+ self.check_move_scheduled()
+
self.status_before_update = self.status
self.status = "Pending"
self.save()
+
+ @dashboard_whitelist()
+ @site_action(["Active", "Inactive", "Suspended"])
+ def schedule_update(
+ self,
+ skip_failing_patches: bool = False,
+ skip_backups: bool = False,
+ physical_backup: bool = False,
+ scheduled_time: str | None = None,
+ ):
+ log_site_activity(self.name, "Update")
+
doc = frappe.get_doc(
{
"doctype": "Site Update",
"site": self.name,
+ "backup_type": "Physical" if physical_backup else "Logical",
"skipped_failing_patches": skip_failing_patches,
"skipped_backups": skip_backups,
+ "status": "Scheduled" if scheduled_time else "Pending",
+ "scheduled_time": scheduled_time,
}
).insert()
return doc.name
+ @dashboard_whitelist()
+ def edit_scheduled_update(
+ self,
+ name,
+ skip_failing_patches: bool = False,
+ skip_backups: bool = False,
+ scheduled_time: str | None = None,
+ ):
+ doc = frappe.get_doc("Site Update", name)
+ doc.skipped_failing_patches = skip_failing_patches
+ doc.skipped_backups = skip_backups
+ doc.scheduled_time = scheduled_time
+ doc.save()
+ return doc.name
+
+ @dashboard_whitelist()
+ def cancel_scheduled_update(self, site_update: str):
+ try:
+ if (
+ _status := frappe.db.get_value(
+ "Site Update", site_update, "status", for_update=True, wait=False
+ )
+ ) != "Scheduled":
+ frappe.throw(f"Cannot cancel a Site Update with status {_status}")
+
+ except (frappe.QueryTimeoutError, frappe.QueryDeadlockError):
+ frappe.throw("The update is probably underway. Please reload/refresh to get the latest status.")
+
+ # used document api for applying doc permissions
+ doc = frappe.get_doc("Site Update", site_update)
+ doc.status = "Cancelled"
+ doc.save()
+
@frappe.whitelist()
- def move_to_group(self, group, skip_failing_patches=False):
+ def move_to_group(self, group, skip_failing_patches=False, skip_backups=False):
log_site_activity(self.name, "Update")
- self.status_before_update = self.status
- self.status = "Pending"
- self.save()
+
return frappe.get_doc(
{
"doctype": "Site Update",
"site": self.name,
"destination_group": group,
"skipped_failing_patches": skip_failing_patches,
+ "skipped_backups": skip_backups,
+ "ignore_past_failures": True,
}
).insert()
@frappe.whitelist()
def move_to_bench(self, bench, deactivate=True, skip_failing_patches=False):
- log_site_activity(self.name, "Update")
- self.status_before_update = self.status
- self.status = "Pending"
- self.save()
+ frappe.only_for("System Manager")
+ self.ready_for_move()
+
+ if bench == self.bench:
+ frappe.throw("Site is already on the selected bench.")
+
agent = Agent(self.server)
- agent.move_site_to_bench(self, bench, deactivate, skip_failing_patches)
+ job = agent.move_site_to_bench(self, bench, deactivate, skip_failing_patches)
+ log_site_activity(self.name, "Update", job=job.name)
+
+ return job
- def reset_previous_status(self):
+ def reset_previous_status(self, fix_broken=False):
+ if self.status == "Archived":
+ return
self.status = self.status_before_update
self.status_before_update = None
- if not self.status:
+ if not self.status or (self.status == "Broken" and fix_broken):
status_map = {402: "Suspended", 503: "Inactive"}
try:
response = requests.get(f"https://{self.name}")
@@ -471,11 +1361,10 @@ def reset_previous_status(self):
self.save()
@frappe.whitelist()
+ @site_action(["Active"])
def update_without_backup(self):
- log_site_activity(self.name, "Update without Backup")
- self.status_before_update = self.status
- self.status = "Pending"
- self.save()
+ log_site_activity(self.name, "Update")
+
frappe.get_doc(
{
"doctype": "Site Update",
@@ -484,10 +1373,15 @@ def update_without_backup(self):
}
).insert()
- @frappe.whitelist()
+ @dashboard_whitelist()
+ @site_action(["Active"])
def add_domain(self, domain):
- domain = domain.lower()
- if check_dns(self.name, domain)["matched"]:
+ domain = domain.lower().strip(".")
+ response = check_dns_cname_a(self.name, domain)
+ if response["matched"]:
+ if frappe.db.exists("Site Domain", {"domain": domain}):
+ frappe.throw(f"The domain {frappe.bold(domain)} is already used by a site")
+
log_site_activity(self.name, "Add Domain")
frappe.get_doc(
{
@@ -495,46 +1389,78 @@ def add_domain(self, domain):
"status": "Pending",
"site": self.name,
"domain": domain,
- "dns_type": "CNAME",
- "ssl": False,
+ "dns_type": response["type"],
+ "dns_response": json.dumps(response, indent=4, default=str),
}
).insert()
- def get_config_value_for_key(self, key: str) -> Any:
- """
- Get site config value configuration child table for given key.
-
- :returns: None if key not in config.
- """
+ def add_domain_for_product_site(self, domain):
+ domain = domain.lower().strip(".")
+ log_site_activity(self.name, "Add Domain")
+ create_dns_record(doc=self, record_name=domain)
+ frappe.get_doc(
+ {
+ "doctype": "Site Domain",
+ "status": "Pending",
+ "site": self.name,
+ "domain": domain,
+ "dns_type": "CNAME",
+ }
+ ).insert(ignore_if_duplicate=True)
+
+ @frappe.whitelist()
+ def create_dns_record(self):
+ self._create_default_site_domain()
+ domains = frappe.db.get_all(
+ "Site Domain", filters={"site": self.name}, fields=["domain"], pluck="domain"
+ )
+ for domain in domains:
+ if bool(frappe.db.exists("Root Domain", domain.split(".", 1)[1])):
+ create_dns_record(doc=self, record_name=domain)
+
+ @frappe.whitelist()
+ def update_dns_record(self, value):
+ domain = frappe.get_doc("Root Domain", self.domain)
+ record_name = self._get_site_name(self.subdomain)
+ _change_dns_record("UPSERT", domain, value, record_name)
+
+ def get_config_value_for_key(self, key: str) -> Any:
+ """
+ Get site config value configuration child table for given key.
+
+ :returns: None if key not in config.
+ """
key_obj = find(self.configuration, lambda x: x.key == key)
if key_obj:
return json.loads(key_obj.get("value"))
return None
def add_domain_to_config(self, domain: str):
- domains = self.get_config_value_for_key("domains") or []
- domains.append(domain)
- self._update_configuration({"domains": domains})
+ domains = set(self.get_config_value_for_key("domains") or [])
+ domains.add(domain)
+ self._update_configuration({"domains": list(domains)})
agent = Agent(self.server)
agent.add_domain(self, domain)
def remove_domain_from_config(self, domain):
- domains = self.get_config_value_for_key("domains")
- domains.remove(domain)
- self._update_configuration({"domains": domains})
+ domains = set(self.get_config_value_for_key("domains") or [])
+ if domain not in domains:
+ return
+ domains.discard(domain)
+ self._update_configuration({"domains": list(domains)})
agent = Agent(self.server)
agent.remove_domain(self, domain)
+ @dashboard_whitelist()
+ @site_action(["Active"])
def remove_domain(self, domain):
if domain == self.name:
- raise Exception("Cannot delete default site_domain")
- site_domain = frappe.get_all(
- "Site Domain", filters={"site": self.name, "domain": domain}
- )[0]
- site_domain = frappe.delete_doc("Site Domain", site_domain.name)
+ frappe.throw("Cannot delete default site_domain")
+ site_domain = frappe.get_all("Site Domain", filters={"site": self.name, "domain": domain})[0]
+ frappe.delete_doc("Site Domain", site_domain.name)
def retry_add_domain(self, domain):
- if check_dns(self.name, domain)["matched"]:
+ if check_dns_cname_a(self.name, domain)["matched"]:
site_domain = frappe.get_all(
"Site Domain",
filters={
@@ -548,9 +1474,7 @@ def retry_add_domain(self, domain):
site_domain.retry()
def _check_if_domain_belongs_to_site(self, domain: str):
- if not frappe.db.exists(
- {"doctype": "Site Domain", "site": self.name, "domain": domain}
- ):
+ if not frappe.db.exists({"doctype": "Site Domain", "site": self.name, "domain": domain}):
frappe.throw(
msg=f"Site Domain {domain} for site {self.name} does not exist",
exc=frappe.exceptions.LinkValidationError,
@@ -559,21 +1483,20 @@ def _check_if_domain_belongs_to_site(self, domain: str):
def _check_if_domain_is_active(self, domain: str):
status = frappe.get_value("Site Domain", domain, "status")
if status != "Active":
- frappe.throw(
- msg="Only active domains can be primary", exc=frappe.LinkValidationError
- )
+ frappe.throw(msg="Only active domains can be primary", exc=frappe.LinkValidationError)
def _validate_host_name(self):
"""Perform checks for primary domain."""
self._check_if_domain_belongs_to_site(self.host_name)
self._check_if_domain_is_active(self.host_name)
+ @dashboard_whitelist()
def set_host_name(self, domain: str):
"""Set host_name/primary domain of site."""
self.host_name = domain
self.save()
- def _get_redirected_domains(self) -> List[str]:
+ def _get_redirected_domains(self) -> list[str]:
"""Get list of redirected site domains for site."""
return frappe.get_all(
"Site Domain",
@@ -585,52 +1508,65 @@ def _update_redirects_for_all_site_domains(self):
domains = self._get_redirected_domains()
if domains:
return self.set_redirects_in_proxy(domains)
+ return None
def _remove_redirects_for_all_site_domains(self):
domains = self._get_redirected_domains()
if domains:
self.unset_redirects_in_proxy(domains)
- def set_redirects_in_proxy(self, domains: List[str]):
- target = self.host_name
- proxy_server = frappe.db.get_value("Server", self.server, "proxy_server")
- agent = Agent(proxy_server, server_type="Proxy Server")
+ def set_redirects_in_proxy(self, domains: list[str]):
+ target = str(self.host_name)
+ if self.is_on_standalone:
+ agent = Agent(self.server)
+ else:
+ proxy_server = frappe.db.get_value("Server", self.server, "proxy_server")
+ agent = Agent(proxy_server, server_type="Proxy Server")
return agent.setup_redirects(self.name, domains, target)
- def unset_redirects_in_proxy(self, domains: List[str]):
- proxy_server = frappe.db.get_value("Server", self.server, "proxy_server")
- agent = Agent(proxy_server, server_type="Proxy Server")
+ def unset_redirects_in_proxy(self, domains: list[str]):
+ if self.is_on_standalone:
+ agent = Agent(self.server)
+ else:
+ proxy_server = frappe.db.get_value("Server", self.server, "proxy_server")
+ agent = Agent(proxy_server, server_type="Proxy Server")
agent.remove_redirects(self.name, domains)
+ @dashboard_whitelist()
def set_redirect(self, domain: str):
"""Enable redirect to primary for domain."""
self._check_if_domain_belongs_to_site(domain)
- site_domain = frappe.get_doc("Site Domain", domain)
+ site_domain: SiteDomain = frappe.get_doc("Site Domain", domain)
site_domain.setup_redirect()
+ @dashboard_whitelist()
def unset_redirect(self, domain: str):
"""Disable redirect to primary for domain."""
self._check_if_domain_belongs_to_site(domain)
- site_domain = frappe.get_doc("Site Domain", domain)
+ site_domain: SiteDomain = frappe.get_doc("Site Domain", domain)
site_domain.remove_redirect()
- @frappe.whitelist()
+ @dashboard_whitelist()
+ @site_action(["Active", "Broken", "Inactive", "Suspended"])
def archive(self, site_name=None, reason=None, force=False):
- log_site_activity(self.name, "Archive", reason)
agent = Agent(self.server)
self.status = "Pending"
self.save()
- agent.archive_site(self, site_name, force)
+ job = agent.archive_site(self, site_name, force)
+ log_site_activity(self.name, "Archive", reason, job.name)
- server = frappe.get_all(
- "Server", filters={"name": self.server}, fields=["proxy_server"], limit=1
- )[0]
+ server = frappe.get_all("Server", filters={"name": self.server}, fields=["proxy_server"], limit=1)[0]
agent = Agent(server.proxy_server, server_type="Proxy Server")
- agent.remove_upstream_site(self.server, self.name, site_name)
+ agent.remove_upstream_file(
+ server=self.server,
+ site=self.name,
+ site_name=site_name,
+ )
self.db_set("host_name", None)
+ self.delete_physical_backups()
self.delete_offsite_backups()
frappe.db.set_value(
"Site Backup",
@@ -641,28 +1577,59 @@ def archive(self, site_name=None, reason=None, force=False):
self.disable_subscription()
self.disable_marketplace_subscriptions()
+ self.archive_site_database_users()
+
@frappe.whitelist()
def cleanup_after_archive(self):
site_cleanup_after_archive(self.name)
+ def delete_physical_backups(self):
+ log_site_activity(self.name, "Drop Physical Backups")
+
+ site_db_snapshots = frappe.get_all(
+ "Site Backup",
+ filters={
+ "site": self.name,
+ "physical": True,
+ "files_availability": "Available",
+ "for_site_update": False,
+ },
+ pluck="database_snapshot",
+ order_by="creation desc",
+ )
+
+ for snapshot in site_db_snapshots:
+ # Take lock on the row, because in case of Pending snapshot
+ # the background sync job might cause timestamp mismatch error or version error
+ frappe.get_doc("Virtual Disk Snapshot", snapshot, for_update=True).delete_snapshot()
+
def delete_offsite_backups(self):
- from press.press.doctype.remote_file.remote_file import delete_remote_backup_objects
+ from press.press.doctype.remote_file.remote_file import (
+ delete_remote_backup_objects,
+ )
log_site_activity(self.name, "Drop Offsite Backups")
sites_remote_files = []
site_backups = frappe.get_all(
"Site Backup",
- filters={"site": self.name, "offsite": True, "files_availability": "Available"},
+ filters={
+ "site": self.name,
+ "offsite": True,
+ "physical": False,
+ "files_availability": "Available",
+ },
pluck="name",
order_by="creation desc",
- )[
- 1:
- ] # Keep latest backup
+ )[1:] # Keep latest backup
for backup_files in frappe.get_all(
"Site Backup",
filters={"name": ("in", site_backups)},
- fields=["remote_database_file", "remote_public_file", "remote_private_file"],
+ fields=[
+ "remote_database_file",
+ "remote_public_file",
+ "remote_private_file",
+ ],
as_list=True,
order_by="creation desc",
ignore_ifnull=True,
@@ -670,7 +1637,7 @@ def delete_offsite_backups(self):
sites_remote_files += backup_files
if not sites_remote_files:
- return
+ return None
frappe.db.set_value(
"Site Backup",
@@ -681,28 +1648,173 @@ def delete_offsite_backups(self):
return delete_remote_backup_objects(sites_remote_files)
+ @dashboard_whitelist()
+ def send_change_team_request(self, team_mail_id: str, reason: str):
+ """Send email to team to accept site transfer request"""
+
+ if self.team != get_current_team():
+ frappe.throw(
+ "You should belong to the team owning the site to initiate a site ownership transfer."
+ )
+
+ if not frappe.db.exists("Team", {"user": team_mail_id, "enabled": 1}):
+ frappe.throw("No Active Team record found.")
+
+ old_team = frappe.db.get_value("Team", self.team, "user")
+
+ if old_team == team_mail_id:
+ frappe.throw(f"Site is already owned by the team {team_mail_id}")
+
+ key = frappe.generate_hash("Site Transfer Link", 20)
+ frappe.get_doc(
+ {
+ "doctype": "Team Change",
+ "document_type": "Site",
+ "document_name": self.name,
+ "to_team": frappe.db.get_value("Team", {"user": team_mail_id, "enabled": 1}),
+ "from_team": self.team,
+ "reason": reason,
+ "key": key,
+ }
+ ).insert()
+
+ link = get_url(f"/api/method/press.api.site.confirm_site_transfer?key={key}")
+
+ if frappe.conf.developer_mode:
+ print(f"\nSite transfer link for {team_mail_id}\n{link}\n")
+
+ frappe.sendmail(
+ recipients=team_mail_id,
+ subject="Transfer Site Ownership Confirmation",
+ template="transfer_team_confirmation",
+ args={
+ "name": self.host_name or self.name,
+ "type": "site",
+ "old_team": old_team,
+ "new_team": team_mail_id,
+ "transfer_url": link,
+ },
+ )
+
+ @dashboard_whitelist()
+ @site_action(["Active", "Broken"])
+ @action_guard(SiteActions.LoginAsAdmin)
+ def login_as_admin(self, reason=None):
+ sid = self.login(reason=reason)
+ return f"https://{self.host_name or self.name}/app?sid={sid}"
+
+ @dashboard_whitelist()
+ @site_action(["Active"])
+ def login_as_team(self, reason=None):
+ if self.additional_system_user_created:
+ team_user = frappe.db.get_value("Team", self.team, "user")
+ sid = self.get_login_sid(user=team_user)
+ if self.standby_for_product and self.is_setup_wizard_complete:
+ redirect_route = (
+ frappe.db.get_value("Product Trial", self.standby_for_product, "redirect_to_after_login")
+ or "/app"
+ )
+ else:
+ redirect_route = "/app"
+ return f"https://{self.host_name or self.name}{redirect_route}?sid={sid}"
+
+ frappe.throw("No additional system user created for this site")
+ return None
+
+ @site_action(["Active"])
+ def login_as_user(self, user_email, reason=None):
+ try:
+ sid = self.get_login_sid(user=user_email)
+ if self.standby_for_product:
+ redirect_route = (
+ frappe.db.get_value("Product Trial", self.standby_for_product, "redirect_to_after_login")
+ or "/app"
+ )
+ else:
+ redirect_route = "/app"
+ return f"https://{self.host_name or self.name}{redirect_route}?sid={sid}"
+ except Exception as e:
+ frappe.throw(str(e))
+
+ @frappe.whitelist()
def login(self, reason=None):
log_site_activity(self.name, "Login as Administrator", reason=reason)
return self.get_login_sid()
- def get_connection_as_admin(self):
- password = get_decrypted_password("Site", self.name, "admin_password")
- conn = FrappeClient(f"https://{self.name}", "Administrator", password)
+ def create_user_with_team_info(self):
+ team_user = frappe.db.get_value("Team", self.team, "user")
+ user = frappe.get_doc("User", team_user)
+ return self.create_user(user.email, user.first_name or "", user.last_name or "")
+
+ def create_user(self, email, first_name, last_name, password=None):
+ if self.additional_system_user_created:
+ return None
+ agent = Agent(self.server)
+ return agent.create_user(self, email, first_name, last_name, password)
- return conn
+ @frappe.whitelist()
+ def show_admin_password(self):
+ frappe.msgprint(self.get_password("admin_password"), title="Password", indicator="green")
- def get_login_sid(self):
+ def get_connection_as_admin(self):
password = get_decrypted_password("Site", self.name, "admin_password")
- response = requests.post(
- f"https://{self.name}/api/method/login",
- data={"usr": "Administrator", "pwd": password},
- )
- sid = response.cookies.get("sid")
- if not sid:
+ return FrappeClient(f"https://{self.name}", "Administrator", password)
+
+ def get_sid_from_agent(self, user: str) -> str | None:
+ try:
agent = Agent(self.server)
- sid = agent.get_site_sid(self)
+ return agent.get_site_sid(self, user)
+ except requests.HTTPError as e:
+ if "validate_ip_address" in str(e):
+ frappe.throw(
+ f"Login with {user}'s credentials is IP restricted. Please remove the same and try again.",
+ frappe.ValidationError,
+ )
+ elif f"User {user} does not exist" in str(e):
+ frappe.throw(f"User {user} does not exist in the site", frappe.ValidationError)
+ elif "certificate has expired" in str(e):
+ frappe.throw(
+ "SSL certificate for the site has expired. Please check the domains tab.",
+ frappe.ValidationError,
+ )
+ elif "no space left on device" in str(e):
+ frappe.throw(
+ "Site is unresponsive due to no space left on device. Please contact support.",
+ frappe.ValidationError,
+ )
+ elif frappe.db.exists(
+ "Incident",
+ {
+ "server": self.server,
+ "status": ("not in", ["Resolved", "Auto-Resolved", "Press-Resolved"]),
+ },
+ ):
+ frappe.throw(
+ "Server appears to be unresponsive. Please try again in some time.",
+ frappe.ValidationError,
+ )
+ else:
+ raise e
+ except AgentRequestSkippedException:
+ frappe.throw(
+ "Server is unresponsive. Please try again in some time.",
+ frappe.ValidationError,
+ )
+ return None
+
+ def get_login_sid(self, user: str = "Administrator"):
+ sid = None
+ if user == "Administrator":
+ password = get_decrypted_password("Site", self.name, "admin_password")
+ response = requests.post(
+ f"https://{self.name}/api/method/login",
+ data={"usr": user, "pwd": password},
+ )
+ sid = response.cookies.get("sid")
if not sid or sid == "Guest":
- frappe.throw("Could not login as Administrator", frappe.ValidationError)
+ sid = self.get_sid_from_agent(user)
+ if not sid or sid == "Guest":
+ frappe.throw(f"Could not login as {user}", frappe.ValidationError)
return sid
def fetch_info(self):
@@ -711,6 +1823,8 @@ def fetch_info(self):
def fetch_analytics(self):
agent = Agent(self.server)
+ if agent.should_skip_requests():
+ return None
return agent.get_site_analytics(self)
def get_disk_usages(self):
@@ -721,21 +1835,21 @@ def get_disk_usages(self):
return {
"database": last_usage.database,
+ "database_free": last_usage.database_free,
"backups": last_usage.backups,
"public": last_usage.public,
"private": last_usage.private,
+ "creation": last_usage.creation,
}
- def _sync_config_info(self, fetched_config: Dict) -> bool:
+ def _sync_config_info(self, fetched_config: dict) -> bool:
"""Update site doc config with the fetched_config values.
:fetched_config: Generally data passed is the config part of the agent info response
:returns: True if value has changed
"""
config = {
- key: fetched_config[key]
- for key in fetched_config
- if key not in get_client_blacklisted_keys()
+ key: fetched_config[key] for key in fetched_config if key not in get_client_blacklisted_keys()
}
new_config = {**json.loads(self.config or "{}"), **config}
current_config = json.dumps(new_config, indent=4)
@@ -745,52 +1859,55 @@ def _sync_config_info(self, fetched_config: Dict) -> bool:
return True
return False
- def _sync_usage_info(self, fetched_usage: Dict):
+ def _sync_usage_info(self, fetched_usage: dict):
"""Generate a Site Usage doc for the site using the fetched_usage data.
:fetched_usage: Requires backups, database, public, private keys with Numeric values
"""
- def _insert_usage(usage: dict):
- current_usages = self.get_disk_usages()
- site_usage_data = {
- "site": self.name,
- "backups": usage["backups"],
- "database": usage["database"],
- "public": usage["public"],
- "private": usage["private"],
- }
-
- same_as_last_usage = (
- current_usages["backups"] == site_usage_data["backups"]
- and current_usages["database"] == site_usage_data["database"]
- and current_usages["public"] == site_usage_data["public"]
- and current_usages["private"] == site_usage_data["private"]
- )
+ if isinstance(fetched_usage, list):
+ for usage in fetched_usage:
+ self._insert_site_usage(usage)
+ else:
+ self._insert_site_usage(fetched_usage)
+
+ def _insert_site_usage(self, usage: dict):
+ current_usages = self.get_disk_usages()
+ site_usage_data = {
+ "site": self.name,
+ "backups": usage["backups"],
+ "database": usage["database"],
+ "database_free": usage.get("database_free", 0),
+ "database_free_tables": json.dumps(usage.get("database_free_tables", []), indent=1),
+ "public": usage["public"],
+ "private": usage["private"],
+ }
- if same_as_last_usage:
- return
+ same_as_last_usage = (
+ current_usages["backups"] == site_usage_data["backups"]
+ and current_usages["database"] == site_usage_data["database"]
+ and current_usages["public"] == site_usage_data["public"]
+ and current_usages["private"] == site_usage_data["private"]
+ and current_usages["database_free"] == site_usage_data["private"]
+ )
- equivalent_site_time = None
- if usage.get("timestamp"):
- equivalent_site_time = convert_utc_to_user_timezone(
- dateutil.parser.parse(usage["timestamp"])
- ).replace(tzinfo=None)
- if frappe.db.exists(
- "Site Usage", {"site": self.name, "creation": equivalent_site_time}
- ):
- return
+ if same_as_last_usage:
+ return
- site_usage = frappe.get_doc({"doctype": "Site Usage", **site_usage_data}).insert()
+ equivalent_site_time = None
+ if usage.get("timestamp"):
+ equivalent_site_time = convert_utc_to_user_timezone(
+ dateutil.parser.parse(usage["timestamp"])
+ ).replace(tzinfo=None)
+ if frappe.db.exists("Site Usage", {"site": self.name, "creation": equivalent_site_time}):
+ return
+ if current_usages["creation"] and equivalent_site_time < current_usages["creation"]:
+ return
- if equivalent_site_time:
- site_usage.db_set("creation", equivalent_site_time)
+ site_usage = frappe.get_doc({"doctype": "Site Usage", **site_usage_data}).insert()
- if isinstance(fetched_usage, list):
- for usage in fetched_usage:
- _insert_usage(usage)
- else:
- _insert_usage(fetched_usage)
+ if equivalent_site_time:
+ site_usage.db_set("creation", equivalent_site_time)
def _sync_timezone_info(self, timezone: str) -> bool:
"""Update site doc timezone with the passed value of timezone.
@@ -798,6 +1915,14 @@ def _sync_timezone_info(self, timezone: str) -> bool:
:timezone: Timezone passed in part of the agent info response
:returns: True if value has changed
"""
+ # Validate timezone string
+ # Empty string is fine, since we default to IST
+ if timezone:
+ try:
+ pytz.timezone(timezone)
+ except pytz.exceptions.UnknownTimeZoneError:
+ return False
+
if self.timezone != timezone:
self.timezone = timezone
return True
@@ -816,6 +1941,9 @@ def sync_info(self, data=None):
if not data:
data = self.fetch_info()
+ if not data:
+ return
+
fetched_usage = data["usage"]
fetched_config = data["config"]
fetched_timezone = data["timezone"]
@@ -831,22 +1959,167 @@ def sync_info(self, data=None):
def sync_analytics(self, analytics=None):
if not analytics:
analytics = self.fetch_analytics()
- create_site_analytics(self.name, analytics)
+ if analytics:
+ create_site_analytics(self.name, analytics)
+
+ def create_sync_user_webhook(self):
+ """
+ Create 3 webhook records in the site to sync the user with press
+ - One for user record creation
+ - One for user record update
+ - One for user record deletion
+ """
+ conn = self.get_connection_as_admin()
+ doctype_data = {
+ "doctype": "Webhook",
+ "webhook_doctype": "User",
+ "enabled": 1,
+ "request_url": "https://frappecloud.com/api/method/press.api.site_login.sync_product_site_user",
+ "request_method": "POST",
+ "request_structure": "JSON",
+ "webhook_json": """{ "user_info": { "email": "{{doc.email}}", "enabled": "{{doc.enabled}}" } }""",
+ "webhook_headers": [
+ {"key": "x-site", "value": self.name},
+ {"key": "Content-Type", "value": "application/json"},
+ {"key": "x-site-token", "value": self.saas_communication_secret},
+ ],
+ }
+
+ webhook_data = [
+ {
+ "name": "Sync User records with Frappe Cloud on create",
+ "webhook_docevent": "after_insert",
+ },
+ {
+ "name": "Sync User records with Frappe Cloud on update",
+ "webhook_docevent": "on_update",
+ "condition": """doc.has_value_changed("enabled")""",
+ },
+ {
+ "name": "Sync User records with Frappe Cloud on delete",
+ "webhook_docevent": "on_trash",
+ },
+ ]
+
+ for webhook in webhook_data:
+ try:
+ conn.insert({**doctype_data, **webhook})
+ except FrappeException as ex:
+ if "frappe.exceptions.DuplicateEntryError" not in str(ex):
+ raise ex
+
+ def sync_users_to_product_site(self, analytics=None):
+ from press.press.doctype.site_user.site_user import create_user_for_product_site
+
+ if self.is_standby:
+ return
+ if not analytics:
+ analytics = self.fetch_analytics()
+ if analytics:
+ create_user_for_product_site(self.name, analytics)
+
+ def prefill_setup_wizard(self, system_settings_payload: dict, user_payload: dict):
+ """Prefill setup wizard with the given payload.
+
+ :param payload: Payload to prefill setup wizard.
+ """
+ if self.setup_wizard_complete or not system_settings_payload or not user_payload:
+ return
+
+ conn = self.get_connection_as_admin()
+ method = "frappe.desk.page.setup_wizard.setup_wizard.initialize_system_settings_and_user"
+ params = {"system_settings_data": system_settings_payload, "user_data": user_payload}
+ conn.post_api(method, params)
+ self.db_set("additional_system_user_created", 1)
+ @dashboard_whitelist()
def is_setup_wizard_complete(self):
if self.setup_wizard_complete:
return True
sid = self.get_login_sid()
conn = FrappeClient(f"https://{self.name}?sid={sid}")
- value = conn.get_value("System Settings", "setup_complete", "System Settings")
- if value:
- setup_complete = cint(value["setup_complete"])
- self.setup_wizard_complete = setup_complete
- self.save()
- return setup_complete
- def _set_configuration(self, config):
+ try:
+ value = conn.get_value("System Settings", "setup_complete", "System Settings")
+ except json.JSONDecodeError:
+ # the proxy might be down or network failure
+ # that's why the response is blank and get_value try to parse the json
+ # and raise json.JSONDecodeError
+ return False
+ except Exception:
+ if self.ping().status_code == requests.codes.ok:
+ # Site is up but setup status fetch failed
+ log_error("Fetching Setup Status Failed", doc=self)
+ return False
+
+ setup_complete = cint(value["setup_complete"])
+ if not setup_complete:
+ return False
+
+ self.reload()
+ self.setup_wizard_complete = 1
+
+ self.team = (
+ frappe.db.get_value(
+ "Team",
+ {"user": frappe.db.get_value("Account Request", self.account_request, "email")},
+ "name",
+ )
+ if self.team == "Administrator"
+ else self.team
+ )
+
+ self.save()
+
+ # Telemetry: Capture event for setup wizard completion
+ if self.setup_wizard_complete:
+ self.capture_signup_event("first_site_setup_wizard_completed")
+
+ return setup_complete
+
+ def fetch_setup_wizard_complete_status(self):
+ with suppress(Exception):
+ # max retries = 18, backoff time = 10s, with exponential backoff it will try for 30 days
+ if self.setup_wizard_status_check_retries >= 18:
+ return
+ is_completed = self.is_setup_wizard_complete()
+ if not is_completed:
+ self.setup_wizard_status_check_retries += 1
+ exponential_backoff_duration = 10 * (2**self.setup_wizard_status_check_retries)
+ self.setup_wizard_status_check_next_retry_on = add_to_date(
+ now_datetime(), seconds=exponential_backoff_duration
+ )
+ self.save()
+
+ @frappe.whitelist()
+ def set_status_based_on_ping(self):
+ if self.status in ("Active", "Archived", "Inactive", "Suspended"):
+ return
+ try:
+ response = self.ping()
+ except Exception:
+ return
+ else:
+ if response.status_code == requests.codes.ok:
+ self.status = "Active"
+ self.save()
+
+ def is_responsive(self):
+ try:
+ response = self.ping()
+ if response.status_code != requests.codes.ok:
+ return False
+ if response.json().get("message") != "pong":
+ return False
+ return True
+ except Exception:
+ return False
+
+ def ping(self):
+ return requests.get(f"https://{self.name}/api/method/ping", timeout=5)
+
+ def _set_configuration(self, config: list[dict]):
"""Similar to _update_configuration but will replace full configuration at once
This is necessary because when you update site config from the UI, you can update the key,
update the value, remove the key. All of this can be handled by setting the full configuration at once.
@@ -854,9 +2127,7 @@ def _set_configuration(self, config):
Args:
config (list): List of dicts with key, value, and type
"""
- blacklisted_config = [
- x for x in self.configuration if x.key in get_client_blacklisted_keys()
- ]
+ blacklisted_config = [x for x in self.configuration if x.key in get_client_blacklisted_keys()]
self.configuration = []
# Maintain keys that aren't accessible to Dashboard user
@@ -870,6 +2141,9 @@ def _set_configuration(self, config):
value = json.dumps(d.value)
else:
value = d.value
+ # Value is mandatory, skip None and empty strings
+ if value is None or cstr(value).strip() == "":
+ continue
self.append("configuration", {"key": d.key, "value": value, "type": d.type})
self.save()
@@ -879,21 +2153,119 @@ def _update_configuration(self, config, save=True):
Args:
config (dict): Python dict for any suitable frappe.conf
"""
- keys = {x.key: i for i, x in enumerate(self.configuration)}
+ existing_keys = {x.key: i for i, x in enumerate(self.configuration)}
for key, value in config.items():
- if key in keys:
- self.configuration[keys[key]].value = convert(value)
- self.configuration[keys[key]].type = guess_type(value)
+ _type = frappe.get_value("Site Config Key", {"key": key}, "type") or guess_type(value)
+ converted_value = convert(value)
+ if converted_value is None or cstr(converted_value).strip() == "":
+ continue
+ if key in existing_keys:
+ self.configuration[existing_keys[key]].value = converted_value
+ self.configuration[existing_keys[key]].type = _type
else:
self.append(
- "configuration", {"key": key, "value": convert(value), "type": guess_type(value)}
+ "configuration",
+ {"key": key, "value": converted_value, "type": _type},
)
if save:
self.save()
+ def check_server_script_enabled_on_public_bench(self, key: str):
+ if (
+ key == "server_script_enabled"
+ and self.is_group_public
+ and self.is_this_version_or_above(SERVER_SCRIPT_DISABLED_VERSION)
+ ):
+ frappe.throw(
+ f'You cannot enable server scripts on public benches. Please move to a private bench .'
+ )
+
+ def validate_encryption_key(self, key: str, value: Any):
+ if key != "encryption_key":
+ return
+ from cryptography.fernet import Fernet, InvalidToken
+
+ try:
+ Fernet(value)
+ except (ValueError, InvalidToken):
+ frappe.throw(
+ _(
+ "This is not a valid encryption key. Please copy it exactly. Read here if you have lost the encryption key."
+ )
+ )
+
+ @dashboard_whitelist()
+ @site_action(["Active"])
+ def update_config(self, config=None):
+ """Updates site.configuration, meant for dashboard and API users"""
+ if config is None:
+ return
+ # config = {"key1": value1, "key2": value2}
+ config = frappe.parse_json(config)
+
+ sanitized_config = {}
+ for key, value in config.items():
+ if key in get_client_blacklisted_keys():
+ frappe.throw(_(f"The key {key} is blacklisted or internal and cannot be updated"))
+ self.check_server_script_enabled_on_public_bench(key)
+ self.validate_encryption_key(key, value)
+
+ _type = self._site_config_key_type(key, value)
+
+ if _type == "Number":
+ value = flt(value)
+ elif _type == "Boolean":
+ value = bool(sbool(value))
+ elif _type == "JSON":
+ value = frappe.parse_json(value)
+ elif _type == "Password" and value == "*******":
+ value = frappe.get_value("Site Config", {"key": key, "parent": self.name}, "value")
+ sanitized_config[key] = value
+
+ self.update_site_config(sanitized_config)
+
+ def _site_config_key_type(self, key, value):
+ if frappe.db.exists("Site Config Key", key):
+ return frappe.db.get_value("Site Config Key", key, "type")
+
+ if isinstance(value, dict | list):
+ return "JSON"
+ if isinstance(value, bool):
+ return "Boolean"
+ if isinstance(value, int | float):
+ return "Number"
+ return "String"
+
+ @dashboard_whitelist()
+ @site_action(["Active"])
+ def delete_config(self, key):
+ """Deletes a key from site configuration, meant for dashboard and API users"""
+ if key in get_client_blacklisted_keys():
+ return None
+
+ updated_config = []
+ for row in self.configuration:
+ if row.key != key and not row.internal:
+ updated_config.append({"key": row.key, "value": row.value, "type": row.type})
+
+ return self.update_site_config(updated_config)
+
+ def delete_multiple_config(self, keys: list[str]):
+ # relies on self._keys_removed_in_last_update in self.validate
+ # used by https://frappecloud.com/app/marketplace-app/email_delivery_service
+ config_list: list[dict] = []
+ for row in self.configuration:
+ config = {}
+ if row.key not in keys and not row.internal:
+ config["key"] = row.key
+ config["value"] = row.value
+ config["type"] = row.type
+ config_list.append(config)
+ self.update_site_config(config_list)
+
@frappe.whitelist()
- def update_site_config(self, config=None):
+ def update_site_config(self, config=None) -> AgentJob:
"""Updates site.configuration, site.config and runs site.save which initiates an Agent Request
This checks for the blacklisted config keys via Frappe Validations, but not for internal usages.
Don't expose this directly to an external API. Pass through `press.utils.sanitize_config` or use
@@ -929,6 +2301,118 @@ def update_subscription(self):
subscription.team = self.team
subscription.save(ignore_permissions=True)
+ @frappe.whitelist()
+ def disable_monitoring(self, reason=None):
+ if self.is_monitoring_disabled:
+ return
+
+ self.is_monitoring_disabled = True
+ if not reason:
+ reason = f"Monitoring disabled by user ({frappe.session.user})"
+ self.reason_for_disabling_monitoring = reason
+ self.save()
+
+ log_site_activity(
+ self.name, "Disable Monitoring And Alerts", reason=self.reason_for_disabling_monitoring
+ )
+ frappe.msgprint("Monitoring has been disabled")
+
+ @dashboard_whitelist()
+ def enable_monitoring(self): # noqa: C901
+ if not self.is_monitoring_disabled:
+ frappe.throw("Monitoring is already enabled")
+
+ if self.status != "Active":
+ frappe.throw("Make sure site is Active before trying to enable monitoring")
+
+ # Check ping before enabling monitoring
+ result = {"enabled": False, "reason": "", "solution": ""}
+
+ # First validate DNS records
+ dns_result = check_dns_cname_a(self.name, self.host_name, throw_error=False)
+ if not dns_result.get("valid"):
+ msg = f"DNS record of {self.host_name} are not pointing correctly\n"
+ msg += f" Type: {dns_result.get('exc_type')}\n"
+ msg += f" Details: {dns_result.get('exc_message')}\n"
+
+ dns_record_exists = dns_result.get("A", {}).get("exists") or dns_result.get("CNAME", {}).get(
+ "exists"
+ )
+ if dns_record_exists:
+ msg += "Current DNS Records:\n"
+ if dns_result.get("A", {}).get("exists"):
+ msg += f" A: {', '.join(dns_result.get('A').get('answer'))}\n"
+
+ if dns_result.get("CNAME", {}).get("exists"):
+ msg += f" CNAME: {', '.join(dns_result.get('CNAME').get('answer'))}\n"
+ else:
+ msg += f"No Correct DNS records found for {self.host_name}\n"
+
+ solution = "Required DNS Records:\n"
+ solution += f" A record with value {self.inbound_ip}\n"
+ solution += f" Or, CNAME record with value {self.name}\n"
+ solution += (
+ "Please check with your Domain Registrar / DNS provider to add the required records.\n"
+ )
+
+ result.update(
+ {
+ "enabled": False,
+ "reason": msg,
+ "solution": solution,
+ }
+ )
+ return result
+
+ # Send ping request
+ try:
+ resp = requests.get(f"https://{self.host_name}/api/method/ping", timeout=5, verify=True)
+ is_pingable = resp.status_code == 200
+ if not is_pingable:
+ result.update(
+ {
+ "enabled": False,
+ "reason": f"Site not pingable, status code: {resp.status_code}",
+ "solution": "Please ensure site is up and try again. If you are still facing issues, please contact support.",
+ }
+ )
+ return result
+ except requests.exceptions.SSLError:
+ result.update(
+ {
+ "enabled": False,
+ "reason": "SSL Certificate Error",
+ "solution": f"Try removing and adding {self.host_name} domain again. If you are still facing issues, please contact support.",
+ }
+ )
+ return result
+ except requests.exceptions.Timeout as e:
+ result.update(
+ {
+ "enabled": False,
+ "reason": f"Timeout Error\n: {e}",
+ "solution": "Please ensure site is up and try again. If you are still facing issues, please contact support.",
+ }
+ )
+ return result
+
+ log_site_activity(self.name, "Enable Monitoring And Alerts")
+
+ self.is_monitoring_disabled = False
+ self.reason_for_disabling_monitoring = ""
+ self.save()
+ result["enabled"] = True
+ return result
+
+ def is_site_pingable(self):
+ try:
+ response = self.ping()
+ if response.status_code == requests.codes.ok:
+ return True
+ except Exception:
+ return False
+ return False
+
def enable_subscription(self):
subscription = self.subscription
if subscription:
@@ -950,6 +2434,11 @@ def disable_marketplace_subscriptions(self):
subscription_doc = frappe.get_doc("Marketplace App Subscription", subscription)
subscription_doc.disable()
+ subscriptions = frappe.get_all("Subscription", {"site": self.name, "enabled": 1}, pluck="name")
+ for subscription in subscriptions:
+ subscription_doc = frappe.get_doc("Subscription", subscription)
+ subscription_doc.disable()
+
def can_change_plan(self, ignore_card_setup):
if is_system_user(frappe.session.user):
return
@@ -958,37 +2447,42 @@ def can_change_plan(self, ignore_card_setup):
# ignore card setup for prepaid app payments
return
+ if bool(frappe.db.get_value("Cluster", self.cluster, "hybrid")):
+ # skip validation if site is on hybrid server
+ return
+
team = frappe.get_doc("Team", self.team)
- if team.is_defaulter():
- frappe.throw("Cannot change plan because you have unpaid invoices")
+ if team.parent_team:
+ team = frappe.get_doc("Team", team.parent_team)
- if team.payment_mode == "Partner Credits" and (
- not team.get_available_partner_credits() > 0
- ):
- frappe.throw("Cannot change plan because you don't have sufficient partner credits")
+ if team.payment_mode == "Paid By Partner" and team.billing_team:
+ team = frappe.get_doc("Team", team.billing_team)
- if team.payment_mode != "Partner Credits" and not (
- team.default_payment_method or team.get_balance()
- ):
+ trial_plans = frappe.get_all("Site Plan", {"is_trial_plan": 1, "enabled": 1}, pluck="name")
+ if (
+ not (team.default_payment_method or team.get_balance()) and self.plan in trial_plans
+ ) or not team.payment_mode:
frappe.throw(
- "Cannot change plan because you haven't added a card and not have enough balance"
+ "Cannot change plan because you haven't added a card and not have enough balance",
+ CannotChangePlan,
)
+ # TODO: rename to change_plan and remove the need for ignore_card_setup param
+ @dashboard_whitelist()
+ def set_plan(self, plan):
+ from press.api.site import validate_plan
+
+ validate_plan(self.server, plan)
+ self.change_plan(plan)
+
def change_plan(self, plan, ignore_card_setup=False):
self.can_change_plan(ignore_card_setup)
+ self.reset_disk_usage_exceeded_status(save=False)
plan_config = self.get_plan_config(plan)
- if (
- frappe.db.exists(
- "Marketplace App Subscription", {"status": "Active", "site": self.name}
- )
- and self.trial_end_date
- ):
- plan_config["app_include_js"] = []
-
self._update_configuration(plan_config)
- frappe.get_doc(
+ ret = frappe.get_doc(
{
"doctype": "Site Plan Change",
"site": self.name,
@@ -1009,56 +2503,95 @@ def change_plan(self, plan, ignore_card_setup=False):
self.trial_end_date = ""
self.save()
- def unsuspend_if_applicable(self):
- try:
- usage = frappe.get_last_doc("Site Usage", {"site": self.name})
- except frappe.DoesNotExistError:
- # If no doc is found, it means the site was created a few moments before
- # team was suspended, potentially due to failure in payment. Don't unsuspend
- # site in that case. team.unsuspend_sites should handle that, then.
- return
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "revoke_database_access_on_plan_change",
+ enqueue_after_commit=True,
+ )
+ return ret
- plan_name = self.plan
- # get plan from subscription
- if not plan_name:
- subscription = self.subscription
- if not subscription:
- return
- plan_name = subscription.plan
+ def archive_site_database_users(self):
+ db_users = frappe.get_all(
+ "Site Database User",
+ filters={
+ "site": self.name,
+ "status": ("!=", "Archived"),
+ },
+ pluck="name",
+ )
- plan = frappe.get_doc("Plan", plan_name)
+ for db_user in db_users:
+ frappe.get_doc("Site Database User", db_user).archive(
+ raise_error=False, skip_remove_db_user_step=True
+ )
- disk_usage = usage.public + usage.private
- if usage.database < plan.max_database_usage and disk_usage < plan.max_storage_usage:
- self.current_database_usage = (usage.database / plan.max_database_usage) * 100
- self.current_disk_usage = (
- (usage.public + usage.private) / plan.max_storage_usage
- ) * 100
- self.unsuspend(reason="Plan Upgraded")
+ def revoke_database_access_on_plan_change(self):
+ # If the new plan doesn't have database access, disable it
+ if frappe.db.get_value("Site Plan", self.plan, "database_access"):
+ return
- @frappe.whitelist()
+ self.archive_site_database_users()
+
+ def unsuspend_if_applicable(self):
+ if self.site_usage_exceeded:
+ self.reset_disk_usage_exceeded_status()
+ else:
+ self.unsuspend("Plan Upgraded")
+
+ @dashboard_whitelist()
+ @site_action(["Active", "Broken"])
def deactivate(self):
+ plan = frappe.db.get_value("Site Plan", self.plan, ["is_frappe_plan", "is_trial_plan"], as_dict=True)
+ if self.plan and plan.is_trial_plan:
+ frappe.throw(_("Cannot deactivate site on a trial plan"))
+
+ if self.plan and plan.is_frappe_plan:
+ frappe.throw(_("Cannot deactivate site on a Frappe plan"))
+
log_site_activity(self.name, "Deactivate Site")
self.status = "Inactive"
self.update_site_config({"maintenance_mode": 1})
self.update_site_status_on_proxy("deactivated")
- @frappe.whitelist()
+ @dashboard_whitelist()
+ @site_action(
+ ["Inactive", "Broken"],
+ disallowed_message="You can activate only inactive or broken site",
+ )
def activate(self):
log_site_activity(self.name, "Activate Site")
- self.status = "Active"
+ if self.status == "Suspended":
+ self.reset_disk_usage_exceeded_status()
+ # If site was broken, check if it's responsive before marking it as active
+ self.status = "Broken" if (self.status == "Broken" and not self.is_responsive()) else "Active"
self.update_site_config({"maintenance_mode": 0})
self.update_site_status_on_proxy("activated")
self.reactivate_app_subscriptions()
@frappe.whitelist()
- def suspend(self, reason=None):
+ def suspend(self, reason=None, skip_reload=False):
log_site_activity(self.name, "Suspend Site", reason)
self.status = "Suspended"
self.update_site_config({"maintenance_mode": 1})
- self.update_site_status_on_proxy("suspended")
+ self.update_site_status_on_proxy("suspended", skip_reload=skip_reload)
self.deactivate_app_subscriptions()
+ if self.standby_for_product:
+ from press.saas.doctype.product_trial.product_trial import send_suspend_mail
+
+ send_suspend_mail(self.name, self.standby_for_product)
+
+ if self.site_usage_exceeded:
+ frappe.sendmail(
+ recipients=get_communication_info("Email", "Site Activity", "Site", self.name),
+ subject=f"Action Required: Site {self.host_name} suspended",
+ template="site_suspend_due_to_exceeding_disk_usage",
+ args={
+ "subject": f"Site {self.host_name} has been suspended",
+ },
+ )
+
def deactivate_app_subscriptions(self):
frappe.db.set_value(
"Marketplace App Subscription",
@@ -1074,6 +2607,7 @@ def reactivate_app_subscriptions(self):
)
@frappe.whitelist()
+ @site_action(["Suspended"], disallowed_message="You can unsuspend only suspended site.")
def unsuspend(self, reason=None):
log_site_activity(self.name, "Unsuspend Site", reason)
self.status = "Active"
@@ -1086,10 +2620,29 @@ def reset_site_usage(self):
agent = Agent(self.server)
agent.reset_site_usage(self)
- def update_site_status_on_proxy(self, status):
+ def update_site_status_on_proxy(self, status, skip_reload=False):
proxy_server = frappe.db.get_value("Server", self.server, "proxy_server")
agent = Agent(proxy_server, server_type="Proxy Server")
- agent.update_site_status(self.server, self.name, status)
+ agent.update_site_status(self.server, self.name, status, skip_reload=skip_reload)
+
+ def get_user_details(self):
+ if frappe.db.get_value("Team", self.team, "user") == "Administrator" and self.account_request:
+ ar = frappe.get_doc("Account Request", self.account_request)
+ user_email = ar.email
+ user_first_name = ar.first_name
+ user_last_name = ar.last_name
+ else:
+ user_email = frappe.db.get_value("Team", self.team, "user")
+ user = frappe.db.get_value(
+ "User", {"email": user_email}, ["first_name", "last_name"], as_dict=True
+ )
+ user_first_name = user.first_name if (user and user.first_name) else ""
+ user_last_name = user.last_name if (user and user.last_name) else ""
+ return {
+ "email": user_email,
+ "first_name": user_first_name or "",
+ "last_name": user_last_name or "",
+ }
def setup_erpnext(self):
account_request = frappe.get_doc("Account Request", self.account_request)
@@ -1113,9 +2666,7 @@ def setup_erpnext(self):
@property
def subscription(self):
- name = frappe.db.get_value(
- "Subscription", {"document_type": "Site", "document_name": self.name}
- )
+ name = frappe.db.get_value("Subscription", {"document_type": "Site", "document_name": self.name})
return frappe.get_doc("Subscription", name) if name else None
def can_charge_for_subscription(self, subscription=None):
@@ -1125,17 +2676,146 @@ def can_charge_for_subscription(self, subscription=None):
and self.team
and self.team != "Administrator"
and not self.free
- and (
- today > get_datetime(self.trial_end_date).date() if self.trial_end_date else True
- )
+ and (today > get_datetime(self.trial_end_date).date() if self.trial_end_date else True)
)
- def get_plan_config(self, plan=None):
+ def get_plan_name(self, plan=None):
if not plan:
plan = self.subscription_plan if hasattr(self, "subscription_plan") else self.plan
- if not plan:
- return {}
- return get_plan_config(plan)
+ if plan and not isinstance(plan, str):
+ frappe.throw("Site.subscription_plan must be a string")
+ return plan
+
+ def get_plan_config(self, plan=None):
+ plan = self.get_plan_name(plan)
+ config = get_plan_config(plan)
+ if plan in UNLIMITED_PLANS:
+ # PERF: do not enable usage tracking on unlimited sites.
+ config["rate_limit"] = {}
+ return config
+
+ def _get_benches_for_(self, proxy_servers, release_group_names=None):
+ from pypika.terms import PseudoColumn
+
+ benches = frappe.qb.DocType("Bench")
+ servers = frappe.qb.DocType("Server")
+
+ bench_query = (
+ frappe.qb.from_(benches)
+ .select(
+ benches.name,
+ benches.server,
+ benches.group,
+ benches.cluster,
+ PseudoColumn(f"`tabBench`.`cluster` = '{self.cluster}' `in_primary_cluster`"),
+ )
+ .left_join(servers)
+ .on(benches.server == servers.name)
+ .where(servers.proxy_server.isin(proxy_servers))
+ .where(benches.status == "Active")
+ .orderby(PseudoColumn("in_primary_cluster"), order=frappe.qb.desc)
+ .orderby(servers.use_for_new_sites, order=frappe.qb.desc)
+ .orderby(benches.creation, order=frappe.qb.desc)
+ .limit(1)
+ )
+ if release_group_names:
+ groups = frappe.qb.DocType("Release Group")
+ bench_query = (
+ bench_query.where(benches.group.isin(release_group_names))
+ .join(groups)
+ .on(benches.group == groups.name)
+ .where(groups.version == self.version)
+ )
+ else:
+ restricted_release_group_names = frappe.db.get_all(
+ "Site Plan Release Group",
+ pluck="release_group",
+ filters={"parenttype": "Site Plan", "parentfield": "release_groups"},
+ )
+ if self.group in restricted_release_group_names:
+ frappe.throw(f"Site can't be deployed on this release group {self.group} due to restrictions")
+ bench_query = bench_query.where(benches.group == self.group)
+ if self.server:
+ bench_query = bench_query.where(servers.name == self.server)
+ return bench_query.run(as_dict=True)
+
+ def set_bench_for_server(self):
+ if not self.server:
+ return
+
+ server_details = frappe.db.get_value("Server", self.server, ["public", "team"], as_dict=True)
+
+ if not server_details:
+ frappe.throw(f"Server {self.server} not found")
+
+ if server_details.team != get_current_team():
+ frappe.throw("You don't have permission to deploy on this server")
+
+ bench = frappe.db.get_value(
+ "Bench",
+ {"group": self.group, "status": "Active", "server": self.server},
+ ["name", "cluster"],
+ as_dict=True,
+ )
+
+ if not bench:
+ frappe.throw(
+ f"No active bench available for group {self.group} on server {self.server}. "
+ "Please contact support."
+ )
+
+ self.bench = bench.name
+ if self.cluster != bench.cluster:
+ frappe.throw(f"Site cannot be deployed on {self.cluster} yet. Please contact support.")
+
+ def set_latest_bench(self):
+ if not (self.domain and self.cluster and self.group):
+ frappe.throw("domain, cluster and group are required to create site")
+
+ proxy_servers_names = frappe.db.get_all(
+ "Proxy Server Domain", {"domain": self.domain}, pluck="parent"
+ )
+ proxy_servers = frappe.db.get_all(
+ "Proxy Server",
+ {"status": "Active", "name": ("in", proxy_servers_names)},
+ pluck="name",
+ )
+ if not proxy_servers:
+ frappe.throw(
+ f"No active proxy servers found for domain {self.domain}. Please contact support.",
+ )
+
+ """
+ For restricted plans, just choose any bench from the release groups and clusters combination
+ For others, don't allow to deploy on those specific release group benches, choose anything except that
+ """
+
+ release_group_names = []
+ if self.get_plan_name():
+ release_group_names = frappe.db.get_all(
+ "Site Plan Release Group",
+ pluck="release_group",
+ filters={
+ "parenttype": "Site Plan",
+ "parentfield": "release_groups",
+ "parent": self.get_plan_name(),
+ },
+ )
+
+ benches = self._get_benches_for_(
+ proxy_servers,
+ release_group_names,
+ )
+ if len(benches) == 0:
+ frappe.throw("No bench available to deploy this site")
+ return
+
+ self.bench = benches[0].name
+ self.server = benches[0].server
+ if release_group_names:
+ self.group = benches[0].group
+ if self.cluster != benches[0].cluster:
+ frappe.throw(f"Site cannot be deployed on {self.cluster} yet. Please contact support.")
def _create_initial_site_plan_change(self, plan):
frappe.get_doc(
@@ -1149,65 +2829,199 @@ def _create_initial_site_plan_change(self, plan):
}
).insert(ignore_permissions=True)
- @frappe.whitelist()
- def enable_database_access(self, mode="read_only"):
- if not frappe.db.get_value("Plan", self.plan, "database_access"):
- frappe.throw(f"Database Access is not available on {self.plan} plan")
- log_site_activity(self.name, "Enable Database Access")
-
- server_agent = Agent(self.server)
- credentials = server_agent.create_database_access_credentials(self, mode)
- self.database_access_mode = mode
- self.database_access_user = credentials["user"]
- self.database_access_password = credentials["password"]
- self.save()
+ def check_db_access_enabling(self):
+ if frappe.db.get_value(
+ "Agent Job",
+ filters={
+ "site": self.name,
+ "job_type": "Add User to ProxySQL",
+ "status": ["in", ["Running", "Pending"]],
+ },
+ for_update=True,
+ ):
+ frappe.throw("Database Access is already being enabled on this site. Please check after a while.")
+
+ def get_auto_update_info(self):
+ fields = [
+ "auto_updates_scheduled",
+ "auto_update_last_triggered_on",
+ "update_trigger_frequency",
+ "update_trigger_time",
+ "update_on_weekday",
+ "update_end_of_month",
+ "update_on_day_of_month",
+ ]
+ return {field: self.get(field) for field in fields}
- proxy_server = frappe.db.get_value("Server", self.server, "proxy_server")
- agent = Agent(proxy_server, server_type="Proxy Server")
+ def get_update_information(self):
+ from press.press.doctype.site_update.site_update import (
+ benches_with_available_update,
+ )
- database_server_name = frappe.db.get_value("Server", self.server, "database_server")
- database_server = frappe.get_doc("Database Server", database_server_name)
+ out = frappe._dict()
+ out.update_available = self.bench in benches_with_available_update(site=self.name)
+ if not out.update_available:
+ return out
- return agent.add_proxysql_user(
- self,
- credentials["database"],
- credentials["user"],
- credentials["password"],
- database_server,
+ bench: "Bench" = frappe.get_doc("Bench", self.bench)
+ source = bench.candidate
+ destinations = frappe.get_all(
+ "Deploy Candidate Difference",
+ filters={"source": source},
+ limit=1,
+ pluck="destination",
)
+ if not destinations:
+ out.update_available = False
+ return out
- @frappe.whitelist()
- def disable_database_access(self):
- log_site_activity(self.name, "Disable Database Access")
+ destination = destinations[0]
- server_agent = Agent(self.server)
- server_agent.revoke_database_access_credentials(self)
+ destination_candidate: "DeployCandidate" = frappe.get_doc("Deploy Candidate", destination)
- proxy_server = frappe.db.get_value("Server", self.server, "proxy_server")
- agent = Agent(proxy_server, server_type="Proxy Server")
+ current_apps = bench.apps
+ next_apps = destination_candidate.apps
+ out.apps = get_updates_between_current_and_next_apps(current_apps, next_apps)
- user = self.database_access_user
+ out.installed_apps = self.apps
+ out.update_available = any([app["update_available"] for app in out.apps])
+ return out
- self.database_access_mode = None
- self.database_access_user = None
- self.database_access_password = None
- self.save()
- return agent.remove_proxysql_user(self, user)
+ def fetch_running_optimize_tables_job(self):
+ return frappe.db.exists(
+ "Agent Job",
+ {
+ "site": self.name,
+ "job_type": "Optimize Tables",
+ "status": ["in", ["Undelivered", "Running", "Pending"]],
+ },
+ )
- @frappe.whitelist()
- def get_database_credentials(self):
- proxy_server = frappe.db.get_value("Server", self.server, "proxy_server")
- config = self.fetch_info()["config"]
+ @dashboard_whitelist()
+ def optimize_tables(self, ignore_checks: bool = True, tables: list[str] | None = None):
+ if not ignore_checks and (job := self.fetch_running_optimize_tables_job()):
+ # check for running `Optimize Tables` agent job
+ return {
+ "success": True,
+ "message": "Optimize Tables job is already running on this site.",
+ "job_name": job,
+ }
+ agent = Agent(self.server)
+ job_name = agent.optimize_tables(self, tables).name
return {
- "host": proxy_server,
- "port": 3306,
- "database": config["db_name"],
- "username": self.database_access_user,
- "password": self.get_password("database_access_password"),
- "mode": self.database_access_mode,
+ "success": True,
+ "message": "Optimize Tables has been triggered on this site.",
+ "job_name": job_name,
}
+ @dashboard_whitelist()
+ def get_database_performance_report(self):
+ from press.press.report.mariadb_slow_queries.mariadb_slow_queries import get_data as get_slow_queries
+
+ agent = Agent(self.server)
+ # fetch slow queries of last 7 days
+ slow_queries = get_slow_queries(
+ frappe._dict(
+ {
+ "database": self.database_name,
+ "start_datetime": frappe.utils.add_to_date(None, days=-7),
+ "stop_datetime": frappe.utils.now_datetime(),
+ "search_pattern": ".*",
+ "max_lines": 2000,
+ "normalize_queries": True,
+ }
+ )
+ )
+ # convert all the float to int
+ for query in slow_queries:
+ for key, value in query.items():
+ if isinstance(value, float):
+ query[key] = int(value)
+
+ # Sort by duration
+ slow_queries.sort(key=lambda x: x["duration"], reverse=True)
+
+ is_performance_schema_enabled = False
+ if database_server := frappe.db.get_value("Server", self.server, "database_server"):
+ is_performance_schema_enabled = frappe.db.get_value(
+ "Database Server",
+ database_server,
+ "is_performance_schema_enabled",
+ )
+ result = None
+ if is_performance_schema_enabled:
+ with suppress(Exception):
+ # for larger table or if database has any locks, fetching perf report will be failed
+ result = agent.get_summarized_performance_report_of_database(self)
+ # remove `parent` & `creation` indexes from unused_indexes
+ result["unused_indexes"] = [
+ index
+ for index in result.get("unused_indexes", [])
+ if index["index_name"] not in ["parent", "creation"]
+ ]
+
+ if not result:
+ result = {}
+ result["unused_indexes"] = []
+ result["redundant_indexes"] = []
+ result["top_10_time_consuming_queries"] = []
+ result["top_10_queries_with_full_table_scan"] = []
+
+ # sort the slow queries by `rows_examined`
+ result["slow_queries"] = sorted(slow_queries, key=lambda x: x["rows_examined"], reverse=True)
+ result["is_performance_schema_enabled"] = is_performance_schema_enabled
+ return result
+
+ def check_if_disk_usage_exceeded(self, save=True): # noqa: C901
+ if self.disable_site_usage_exceed_check:
+ # Flag to disable disk usage exceeded check
+ return
+
+ if self.free or frappe.get_cached_value("Team", self.team, "free_account"):
+ # Ignore for free sites and teams
+ return
+ if not frappe.db.get_value("Server", self.server, "public"):
+ # Don't check disk usage for dedicated servers
+ return
+
+ # Check if disk usage exceeded
+ disk_usage_exceeded = self.current_database_usage > 120 or self.current_disk_usage > 120
+ # If disk usage not exceeded, and site
+ if not disk_usage_exceeded and self.site_usage_exceeded:
+ # Reset site usage exceeded flags
+ self.reset_disk_usage_exceeded_status(save=save)
+ return
+
+ # If that's detected previously as well, just update the last checked time
+ if disk_usage_exceeded and self.site_usage_exceeded:
+ self.site_usage_exceeded_last_checked_on = now_datetime()
+ if save:
+ self.save()
+ return
+
+ if disk_usage_exceeded and not self.site_usage_exceeded:
+ # If disk usage exceeded, set the flags
+ self.site_usage_exceeded = True
+ self.site_usage_exceeded_on = now_datetime()
+ self.site_usage_exceeded_last_checked_on = now_datetime()
+ if save:
+ self.save()
+
+ def reset_disk_usage_exceeded_status(self, save=True):
+ self.site_usage_exceeded = False
+ self.site_usage_exceeded_on = None
+ self.site_usage_exceeded_last_checked_on = None
+ self.last_site_usage_warning_mail_sent_on = None
+
+ if self.status == "Suspended":
+ self.unsuspend(reason="Disk usage issue resolved")
+ elif self.status_before_update == "Suspended":
+ self.status_before_update = "Active"
+
+ if save:
+ self.save()
+
@property
def server_logs(self):
return Agent(self.server).get(f"benches/{self.bench}/sites/{self.name}/logs")
@@ -1215,16 +3029,23 @@ def server_logs(self):
def get_server_log(self, log):
return Agent(self.server).get(f"benches/{self.bench}/sites/{self.name}/logs/{log}")
+ def get_server_log_for_log_browser(self, log):
+ return Agent(self.server).get(f"benches/{self.bench}/sites/{self.name}/logs_v2/{log}")
+
@property
def has_paid(self) -> bool:
"""Has the site been paid for by customer."""
invoice_items = frappe.get_all(
"Invoice Item",
- {"document_type": self.doctype, "document_name": self.name, "Amount": (">", 0)},
+ {
+ "document_type": self.doctype,
+ "document_name": self.name,
+ "Amount": (">", 0),
+ },
pluck="parent",
)
today = frappe.utils.getdate()
- today_last_month = today.replace(month=today.month - 1)
+ today_last_month = frappe.utils.add_to_date(today, months=-1)
last_month_last_date = frappe.utils.get_last_day(today_last_month)
return frappe.db.exists(
"Invoice",
@@ -1236,19 +3057,136 @@ def has_paid(self) -> bool:
},
)
+ @property
+ def inbound_ip(self):
+ server = frappe.db.get_value(
+ "Server",
+ self.server,
+ ["ip", "is_standalone", "proxy_server", "team"],
+ as_dict=True,
+ )
+ if server.is_standalone:
+ ip = server.ip
+ else:
+ ip = frappe.db.get_value("Proxy Server", server.proxy_server, "ip")
+ return ip
+
+ @property
+ def current_usage(self):
+ from press.api.analytics import get_current_cpu_usage
+
+ result = frappe.db.get_all(
+ "Site Usage",
+ fields=["database", "public", "private"],
+ filters={"site": self.name},
+ order_by="creation desc",
+ limit=1,
+ )
+ usage = result[0] if result else {}
+
+ # number of hours until cpu usage resets
+ now = frappe.utils.now_datetime()
+ today_end = now.replace(hour=23, minute=59, second=59)
+ hours_left_today = flt(time_diff_in_hours(today_end, now), 2)
+
+ return {
+ "cpu": flt(get_current_cpu_usage(self.name) / (3.6 * (10**9)), 5),
+ "storage": usage.get("public", 0) + usage.get("private", 0),
+ "database": usage.get("database", 0),
+ "hours_until_cpu_usage_resets": hours_left_today,
+ }
+
+ @property
+ def last_updated(self):
+ result = frappe.db.get_all(
+ "Site Activity",
+ filters={"site": self.name, "action": "Update"},
+ order_by="creation desc",
+ limit=1,
+ pluck="creation",
+ )
+ return result[0] if result else None
+
@classmethod
- def get_sites_for_backup(cls, interval: int):
- sites = cls.get_sites_without_backup_in_interval(interval)
+ def get_sites_with_backup_time(cls, backup_type: Literal["Logical", "Physical"]) -> list[dict]:
+ site_backup_times = frappe.qb.DocType("Site Backup Time")
+ site_filters: dict[str, Any] = {"status": "Active"}
+ if backup_type == "Logical":
+ site_filters.update(
+ {
+ "skip_scheduled_logical_backups": 0,
+ "schedule_logical_backup_at_custom_time": 1,
+ }
+ )
+ elif backup_type == "Physical":
+ site_filters.update(
+ {
+ "skip_scheduled_physical_backups": 0,
+ "schedule_physical_backup_at_custom_time": 1,
+ }
+ )
+
+ sites = frappe.get_all("Site", filters=site_filters, pluck="name")
+ if not sites:
+ return []
+
+ query = (
+ frappe.qb.from_(site_backup_times)
+ .select(site_backup_times.parent.as_("name"), site_backup_times.backup_time)
+ .where(site_backup_times.parent.isin(sites))
+ )
+
+ if backup_type == "Logical":
+ query = query.where(site_backup_times.parentfield == "logical_backup_times")
+ elif backup_type == "Physical":
+ query = query.where(site_backup_times.parentfield == "physical_backup_times")
+
+ # check for backup time
+ """
+ Backup time should be between current_hr:00:00 to current_hr:59:59
+ """
+ current_hr = frappe.utils.get_datetime().hour
+ query = query.where(
+ (site_backup_times.backup_time >= f"{current_hr}:00:00")
+ & (site_backup_times.backup_time <= f"{current_hr}:59:59")
+ )
+
+ return query.run(as_dict=True)
+
+ @classmethod
+ def get_sites_for_backup(
+ cls, interval: int, backup_type: Literal["Logical", "Physical"] = "Logical"
+ ) -> list[dict]:
+ sites = cls.get_sites_without_backup_in_interval(interval, backup_type)
+ servers_with_backups = frappe.get_all(
+ "Server",
+ {"status": "Active", "skip_scheduled_backups": False},
+ pluck="name",
+ )
+ filters: dict[str, Any] = {
+ "name": ("in", sites),
+ "server": ("in", servers_with_backups),
+ }
+
+ if backup_type == "Logical":
+ filters["skip_scheduled_logical_backups"] = False
+ filters["schedule_logical_backup_at_custom_time"] = False
+ elif backup_type == "Physical":
+ filters["skip_scheduled_physical_backups"] = False
+ filters["schedule_physical_backup_at_custom_time"] = False
+
return frappe.get_all(
"Site",
- {"name": ("in", sites), "skip_scheduled_backups": False},
+ filters,
["name", "timezone", "server"],
order_by="server",
ignore_ifnull=True,
)
@classmethod
- def get_sites_without_backup_in_interval(cls, interval: int) -> List[str]:
+ def get_sites_without_backup_in_interval(
+ cls, interval: int, backup_type: Literal["Logical", "Physical"] = "Logical"
+ ) -> list[str]:
"""Return active sites that haven't had backup taken in interval hours."""
interval_hrs_ago = frappe.utils.add_to_date(None, hours=-interval)
all_sites = set(
@@ -1265,61 +3203,674 @@ def get_sites_without_backup_in_interval(cls, interval: int) -> List[str]:
)
return list(
all_sites
- - set(cls.get_sites_with_backup_in_interval(interval_hrs_ago))
- - set(cls.get_sites_with_pending_backups(interval_hrs_ago))
+ - set(cls.get_sites_with_backup_in_interval(interval_hrs_ago, backup_type))
+ - set(cls.get_sites_with_pending_backups(interval_hrs_ago, backup_type))
)
# TODO: query using creation time of account request for actual new sites <03-09-21, Balamurali M> #
@classmethod
- def get_sites_with_pending_backups(cls, interval_hrs_ago: datetime) -> List[str]:
+ def get_sites_with_pending_backups(
+ cls, interval_hrs_ago: datetime, backup_type: Literal["Logical", "Physical"] = "Logical"
+ ) -> list[str]:
return frappe.get_all(
"Site Backup",
{
"status": ("in", ["Running", "Pending"]),
"creation": (">=", interval_hrs_ago),
+ "physical": bool(backup_type == "Physical"),
+ },
+ pluck="site",
+ )
+
+ @classmethod
+ def get_sites_with_backup_in_interval(
+ cls, interval_hrs_ago, backup_type: Literal["Logical", "Physical"] = "Logical"
+ ) -> list[str]:
+ return frappe.get_all(
+ "Site Backup",
+ {
+ "creation": (">", interval_hrs_ago),
+ "status": ("!=", "Failure"),
+ "owner": "Administrator",
+ "physical": bool(backup_type == "Physical"),
},
pluck="site",
+ ignore_ifnull=True,
+ )
+
+ @classmethod
+ def exists(cls, subdomain, domain) -> bool:
+ """Check if subdomain is available"""
+ banned_domains = frappe.get_all("Blocked Domain", {"block_for_all": 1}, pluck="name")
+ if banned_domains and subdomain in banned_domains:
+ return True
+ return bool(
+ frappe.db.exists("Blocked Domain", {"name": subdomain, "root_domain": domain})
+ or frappe.db.exists(
+ "Site",
+ {
+ "subdomain": subdomain,
+ "domain": domain,
+ "status": ("!=", "Archived"),
+ },
+ )
+ or frappe.db.exists(
+ "Site Domain",
+ f"{subdomain}.{domain}",
+ )
+ )
+
+ @frappe.whitelist()
+ def run_after_migrate_steps(self):
+ agent = Agent(self.server)
+ agent.run_after_migrate_steps(self)
+
+ @cached_property
+ def is_group_public(self):
+ return bool(frappe.get_cached_value("Release Group", self.group, "public"))
+
+ @frappe.whitelist()
+ def get_actions(self):
+ actions = [
+ {
+ "action": "Activate site",
+ "description": "Activate site to make it accessible on the internet",
+ "button_label": "Activate",
+ "condition": self.status in ["Inactive", "Broken"],
+ "doc_method": "activate",
+ },
+ {
+ "action": "Manage database users",
+ "description": "Manage users and permissions for your site database",
+ "button_label": "Manage",
+ "doc_method": "dummy",
+ "condition": not self.hybrid_site and has_permission("Site Database User"),
+ },
+ {
+ "action": "Notification Settings",
+ "description": "Manage notification settings for your site",
+ "button_label": "Manage",
+ "doc_method": "dummy",
+ },
+ {
+ "action": "Schedule backup",
+ "description": "Schedule a backup for this site",
+ "button_label": "Schedule",
+ "doc_method": "schedule_backup",
+ },
+ {
+ "action": "Transfer site",
+ "description": "Transfer ownership of this site to another team",
+ "button_label": "Transfer",
+ "doc_method": "send_change_team_request",
+ },
+ {
+ "action": "Version upgrade",
+ "description": "Upgrade your site to a major version",
+ "button_label": "Upgrade",
+ "doc_method": "upgrade",
+ "condition": self.status in ["Active", "Broken", "Inactive"],
+ },
+ {
+ "action": "Change region",
+ "description": "Move your site to a different region",
+ "button_label": "Change",
+ "doc_method": "change_region",
+ "condition": self.status in ["Active", "Broken", "Inactive"],
+ },
+ {
+ "action": "Change bench group",
+ "description": "Move your site to a different bench group",
+ "button_label": "Change",
+ "doc_method": "change_bench",
+ "condition": self.status in ["Active", "Broken", "Inactive"],
+ },
+ {
+ "action": "Change server",
+ "description": "Move your site to a different server",
+ "button_label": "Change",
+ "doc_method": "change_server",
+ "condition": self.status in ["Active", "Broken", "Inactive"] and not self.is_group_public,
+ },
+ {
+ "action": "Clear cache",
+ "description": "Clear cache on your site",
+ "button_label": "Clear",
+ "doc_method": "clear_site_cache",
+ },
+ {
+ "action": "Deactivate site",
+ "description": "Deactivating will put the site in maintenance mode and make it inaccessible",
+ "button_label": "Deactivate",
+ "condition": self.status == "Active",
+ "doc_method": "deactivate",
+ },
+ {
+ "action": "Migrate site",
+ "description": "Run bench migrate command on your site",
+ "button_label": "Migrate",
+ "doc_method": "migrate",
+ "group": "Dangerous Actions",
+ },
+ {
+ "action": "Restore with files",
+ "description": "Restore with database, public and private files",
+ "button_label": "Restore",
+ "doc_method": "restore_site_from_files",
+ "group": "Dangerous Actions",
+ },
+ {
+ "action": "Restore from an existing site",
+ "description": "Restore with database, public and private files from another site",
+ "button_label": "Restore",
+ "doc_method": "restore_site_from_files",
+ "group": "Dangerous Actions",
+ },
+ {
+ "action": "Reset site",
+ "description": "Reset your site database to a clean state",
+ "button_label": "Reset",
+ "doc_method": "reinstall",
+ "group": "Dangerous Actions",
+ },
+ {
+ "action": "Drop site",
+ "description": "When you drop your site, all site data is deleted forever",
+ "button_label": "Drop",
+ "doc_method": "archive",
+ "group": "Dangerous Actions",
+ },
+ ]
+
+ return [d for d in actions if d.get("condition", True)]
+
+ @property
+ def hybrid_site(self) -> bool:
+ return bool(frappe.get_cached_value("Server", self.server, "is_self_hosted"))
+
+ @property
+ def pending_for_long(self) -> bool:
+ if self.status != "Pending":
+ return False
+ return (frappe.utils.now_datetime() - self.modified).total_seconds() > 60 * 60 * 4 # 4 hours
+
+ @frappe.whitelist()
+ def fetch_bench_from_agent(self):
+ agent = Agent(self.server)
+ benches_with_this_site = []
+ for bench in agent.get("server")["benches"].values():
+ if self.name in bench["sites"]:
+ benches_with_this_site.append(bench["name"])
+ if len(benches_with_this_site) == 1:
+ frappe.db.set_value("Site", self.name, "bench", benches_with_this_site[0])
+
+ @cached_property
+ def is_on_dedicated_plan(self):
+ return bool(frappe.db.get_value("Site Plan", self.plan, "dedicated_server_plan"))
+
+ @frappe.whitelist()
+ def forcefully_remove_site(self, bench):
+ """Bypass all agent/press callbacks and just remove this site from the target bench/server"""
+ from press.utils import get_mariadb_root_password
+
+ frappe.only_for("System Manager")
+
+ if bench == self.bench:
+ frappe.throw("Use Archive Site action to remove site from current bench")
+
+ # Mimic archive_site method in the agent.py
+ server = frappe.db.get_value("Bench", bench, ["server"])
+ data = {
+ "mariadb_root_password": get_mariadb_root_password(self),
+ "force": True,
+ }
+
+ response = {"server": server, "bench": bench}
+ agent = Agent(server)
+ result = agent.request("POST", f"benches/{bench}/sites/{self.name}/archive", data, raises=False)
+ if "job" in result:
+ job = result["job"]
+ response["job"] = job
+ else:
+ response["error"] = result["error"]
+ self.add_comment(
+ text=f"{frappe.session.user} attempted to forcefully remove site from {bench}.{json.dumps(response, indent=1)} "
+ )
+ return response
+
+ @dashboard_whitelist()
+ def fetch_database_table_schema(self, reload=False):
+ """
+ Store dump in redis cache
+ """
+ key_for_schema = f"database_table_schema__data:{self.name}"
+ key_for_schema_status = (
+ f"database_table_schema__status:{self.name}" # 1 - loading, 2 - done, None - not available
+ )
+
+ if reload:
+ frappe.cache().delete_value(key_for_schema)
+ frappe.cache().delete_value(key_for_schema_status)
+
+ status = cint(frappe.cache().get_value(key_for_schema_status))
+ if status:
+ if status == 1:
+ return {
+ "loading": True,
+ "data": [],
+ }
+ if status == 2:
+ return {
+ "loading": False,
+ "data": json.loads(frappe.cache().get_value(key_for_schema)),
+ }
+
+ # Check if any agent job is created within 5 minutes and in pending/running condition
+ # Checks to prevent duplicate agent job creation due to race condition
+ if not frappe.db.exists(
+ "Agent Job",
+ {
+ "job_type": "Fetch Database Table Schema",
+ "site": self.name,
+ "status": ["in", ["Undelivered", "Pending", "Running"]],
+ "creation": (">", frappe.utils.add_to_date(None, minutes=-5)),
+ },
+ ):
+ # create the agent job and put it in loading state
+ frappe.cache().set_value(key_for_schema_status, 1, expires_in_sec=600)
+ Agent(self.server).fetch_database_table_schema(
+ self, include_index_info=True, include_table_size=True
+ )
+ return {
+ "loading": True,
+ "data": [],
+ }
+
+ @dashboard_whitelist()
+ def fetch_database_processes(self):
+ agent = Agent(self.server)
+ if agent.should_skip_requests():
+ return None
+ return agent.fetch_database_processes(self)
+
+ @dashboard_whitelist()
+ def kill_database_process(self, id):
+ agent = Agent(self.server)
+ if agent.should_skip_requests():
+ return None
+ processes = agent.fetch_database_processes(self)
+ if not processes:
+ return None
+ isFoundPid = True
+ for process in processes:
+ if str(process["id"]) == str(id):
+ isFoundPid = True
+ break
+ if not isFoundPid:
+ return None
+ return agent.kill_database_process(self, id)
+
+ @dashboard_whitelist()
+ def run_sql_query_in_database(self, query: str, commit: bool):
+ if not query:
+ return {"success": False, "output": "SQL Query cannot be empty"}
+ doc = frappe.get_doc(
+ {
+ "doctype": "SQL Playground Log",
+ "site": self.name,
+ "team": self.team,
+ "query": query,
+ "committed": commit,
+ }
+ )
+ response = Agent(self.server).run_sql_query_in_database(self, query, commit)
+ doc.is_successful = response.get("success", False)
+ doc.insert(ignore_permissions=True)
+ return response
+
+ @dashboard_whitelist()
+ def suggest_database_indexes(self):
+ from press.press.report.mariadb_slow_queries.mariadb_slow_queries import get_data as get_slow_queries
+
+ existing_agent_job_name = frappe.db.exists(
+ "Agent Job",
+ {
+ "site": self.name,
+ "status": ("not in", ("Failure", "Delivery Failure")),
+ "job_type": "Analyze Slow Queries",
+ "creation": (
+ ">",
+ frappe.utils.add_to_date(None, minutes=-30),
+ ),
+ "retry_count": 0,
+ },
+ )
+
+ if existing_agent_job_name:
+ existing_agent_job = frappe.get_doc("Agent Job", existing_agent_job_name)
+ if existing_agent_job.status == "Success":
+ return {
+ "loading": False,
+ "data": json.loads(existing_agent_job.data).get("result", []),
+ }
+ return {
+ "loading": True,
+ "data": [],
+ }
+
+ # fetch slow queries of last 7 days
+ slow_queries = get_slow_queries(
+ frappe._dict(
+ {
+ "database": self.database_name,
+ "start_datetime": frappe.utils.add_to_date(None, days=-7),
+ "stop_datetime": frappe.utils.now_datetime(),
+ "search_pattern": ".*",
+ "max_lines": 1000,
+ "normalize_queries": True,
+ }
+ )
+ )
+ slow_queries = [{"example": x["example"], "normalized": x["query"]} for x in slow_queries]
+ if len(slow_queries) == 0:
+ return {
+ "loading": False,
+ "data": [],
+ }
+ agent = Agent(self.server)
+ agent.analyze_slow_queries(self, slow_queries)
+
+ return {
+ "loading": True,
+ "data": [],
+ }
+
+ @dashboard_whitelist()
+ def add_database_index(self, table, column):
+ record = frappe.db.exists(
+ "Agent Job",
+ {
+ "site": self.name,
+ "status": ["in", ["Undelivered", "Running", "Pending"]],
+ "job_type": "Add Database Index",
+ },
+ )
+ if record:
+ return {
+ "success": False,
+ "message": "There is already a job running for adding database index. Please wait until finished.",
+ "job_name": record,
+ }
+ doctype = get_doctype_name(table)
+ agent = Agent(self.server)
+ job = agent.add_database_index(self, doctype=doctype, columns=[column])
+ return {
+ "success": True,
+ "message": "Database index will be added on site.",
+ "job_name": job.name,
+ }
+
+ @dashboard_whitelist()
+ @site_action(["Active"])
+ def fetch_certificate(self, domain: str):
+ tls_certificate: TLSCertificate = frappe.get_last_doc("TLS Certificate", {"domain": domain})
+ tls_certificate.obtain_certificate()
+
+ def fetch_database_name(self):
+ if not self.database_name:
+ synced = self._sync_config_info()
+ if not synced:
+ frappe.throw("Unable to fetch database name. Please try again.")
+ self.save()
+ return self.database_name
+
+ def is_binlog_indexer_running(self):
+ return bool(
+ frappe.db.get_value("Database Server", self.database_server_name, "is_binlog_indexer_running")
+ )
+
+ def is_binlog_indexing_enabled(self):
+ return bool(
+ frappe.db.get_value(
+ "Database Server", self.database_server_name, "enable_binlog_indexing", cache=True
+ )
+ )
+
+ @dashboard_whitelist()
+ def binlog_indexing_service_status(self):
+ hosted_on_shared_server = bool(
+ frappe.db.get_value("Database Server", self.database_server_name, "public", cache=True)
+ )
+ data = {
+ "enabled": self.is_binlog_indexing_enabled(),
+ "indexer_running": self.is_binlog_indexer_running(),
+ "database_server": self.database_server_name,
+ "hosted_on_shared_server": hosted_on_shared_server,
+ "database_server_memory": 0
+ if hosted_on_shared_server
+ else frappe.db.get_value("Database Server", self.database_server_name, "ram", cache=True),
+ }
+ # If the site is on hosted on shared server, only allow `System User` to view the binlog indexing service status
+ if hosted_on_shared_server and not frappe.local.system_user():
+ data["enabled"] = False
+
+ # Turn off hosted_on_shared_server flag if the user is System User
+ if frappe.local.system_user():
+ data["hosted_on_shared_server"] = False
+ data["database_server_memory"] = (
+ frappe.db.get_value("Database Server", self.database_server_name, "ram", cache=True),
+ )
+
+ return data
+
+ @dashboard_whitelist()
+ def fetch_binlog_timeline( # noqa: C901
+ self,
+ start: int,
+ end: int,
+ table: str | None = None,
+ query_type: str | None = None,
+ event_size_comparator: Literal["gt", "lt"] | None = None,
+ event_size: int | None = None,
+ ):
+ if (not self.is_binlog_indexing_enabled()) or (self.is_binlog_indexer_running()):
+ frappe.throw("Binlog indexing service is not enabled or in maintenance.")
+
+ if start >= end:
+ frappe.throw("Invalid time range. Start time must be less than end time.")
+
+ data = self.database_server_agent.get_binlogs_timeline(
+ start=start,
+ end=end,
+ table=table,
+ type=query_type,
+ database=self.fetch_database_name(),
+ event_size_comparator=event_size_comparator,
+ event_size=event_size,
+ )
+
+ start_timestamp = data.get("start_timestamp")
+ end_timestamp = data.get("end_timestamp")
+ interval = data.get("interval")
+ dataset = {}
+ time_series = []
+ blank_data = {
+ "INSERT": 0,
+ "UPDATE": 0,
+ "DELETE": 0,
+ "SELECT": 0,
+ "OTHER": 0,
+ }
+ current_timestamp = start_timestamp
+ while current_timestamp < end_timestamp:
+ dataset[current_timestamp] = blank_data.copy()
+ time_series.append(current_timestamp)
+ current_timestamp += interval
+
+ if current_timestamp == end_timestamp:
+ time_series.append(end_timestamp)
+ dataset[current_timestamp] = blank_data.copy()
+ elif len(time_series) > 0 and time_series[-1] != end_timestamp:
+ dataset[end_timestamp] = blank_data.copy()
+
+ # TODO optimize this loop
+ if len(time_series) > 1:
+ for i in range(len(time_series) - 1):
+ start_timestamp = time_series[i]
+ end_timestamp = time_series[i + 1]
+ key = f"{start_timestamp}:{end_timestamp}"
+ if key not in data["results"]:
+ continue
+
+ query_data: dict = data["results"][key]
+ dataset[start_timestamp]["INSERT"] = query_data.get("INSERT", 0)
+ dataset[start_timestamp]["UPDATE"] = query_data.get("UPDATE", 0)
+ dataset[start_timestamp]["DELETE"] = query_data.get("DELETE", 0)
+ dataset[start_timestamp]["SELECT"] = query_data.get("SELECT", 0)
+ dataset[start_timestamp]["OTHER"] = query_data.get("OTHER", 0)
+
+ # Convert dataset to list of dicts
+ converted_dataset = []
+ for timestamp in sorted(dataset.keys()):
+ entry = {"timestamp": timestamp}
+ entry.update(dataset[timestamp])
+ converted_dataset.append(entry)
+
+ return {
+ "dataset": converted_dataset,
+ "tables": sorted(data.get("tables", [])),
+ }
+
+ @dashboard_whitelist()
+ def search_binlogs(
+ self,
+ start: int,
+ end: int,
+ query_type: str | None = None,
+ table: str | None = None,
+ search_string: str | None = None,
+ event_size_comparator: Literal["gt", "lt"] | None = None,
+ event_size: int | None = None,
+ ):
+ if (not self.is_binlog_indexing_enabled()) or (self.is_binlog_indexer_running()):
+ frappe.throw("Binlog indexing service is not enabled or in maintenance.")
+
+ if start >= end:
+ frappe.throw("Invalid time range. Start time must be less than end time.")
+
+ if (end - start) > 60 * 60 * 6:
+ frappe.throw("Binlog search is limited to 6 hours. Please select a smaller time range.")
+
+ if not table:
+ table = None
+ if not search_string:
+ search_string = None
+
+ return self.database_server_agent.search_binlogs(
+ start=start,
+ end=end,
+ type=query_type,
+ database=self.fetch_database_name(),
+ table=table,
+ search_str=search_string,
+ event_size_comparator=event_size_comparator,
+ event_size=event_size,
+ )
+
+ @dashboard_whitelist()
+ def fetch_queries_from_binlog(self, row_ids: dict[str, list[int]]):
+ # Don't allow to fetch more than 100 rows at a time
+ total_row_ids = sum(len(v) for v in row_ids.values())
+ if total_row_ids > 100:
+ frappe.throw("Cannot fetch more than 100 rows at a time from binlog.")
+
+ return self.database_server_agent.get_binlog_queries(
+ row_ids=row_ids, database=self.fetch_database_name()
+ )
+
+ @dashboard_whitelist()
+ def get_communication_infos(self):
+ return (
+ [{"channel": c.channel, "type": c.type, "value": c.value} for c in self.communication_infos]
+ if hasattr(self, "communication_infos")
+ else []
)
- @classmethod
- def get_sites_with_backup_in_interval(cls, interval_hrs_ago) -> List[str]:
- return frappe.get_all(
- "Site Backup",
- {
- "creation": (">=", interval_hrs_ago),
- "status": ("!=", "Failure"),
- "owner": "Administrator",
- },
- pluck="site",
- ignore_ifnull=True,
+ @dashboard_whitelist()
+ def update_communication_infos(self, values: list[dict]):
+ from press.press.doctype.communication_info.communication_info import (
+ update_communication_infos as update_infos,
)
- @classmethod
- def exists(cls, subdomain, domain) -> bool:
- """Check if subdomain is available"""
- banned_domains = frappe.get_all("Blocked Domain", {"block_for_all": 1}, pluck="name")
- if banned_domains and subdomain in banned_domains:
- return True
- else:
- return bool(
- frappe.db.exists("Blocked Domain", {"name": subdomain, "root_domain": domain})
- or frappe.db.exists(
- "Site", {"subdomain": subdomain, "domain": domain, "status": ("!=", "Archived")}
- )
+ update_infos("Site", self.name, values)
+
+ @property
+ def recent_offsite_backups_(self):
+ site_backups = frappe.qb.DocType("Site Backup")
+ return (
+ frappe.qb.from_(site_backups)
+ .select(site_backups.name)
+ .where(
+ (site_backups.site == self.name)
+ & (site_backups.with_files == 1)
+ & (site_backups.offsite == 1)
+ & (site_backups.creation > frappe.utils.add_to_date(frappe.utils.now(), days=-1))
)
+ )
- @frappe.whitelist()
- def run_after_migrate_steps(self):
- agent = Agent(self.server)
- agent.run_after_migrate_steps(self)
+ @property
+ def recent_offsite_backup_exists(self):
+ site_backups = frappe.qb.DocType("Site Backup")
+ return self.recent_offsite_backups_.where(
+ (site_backups.status == "Success") & (site_backups.files_availability == "Available")
+ ).run()
- @frappe.whitelist()
- def enable_read_write(self):
- self.enable_database_access("read_write")
+ @property
+ def recent_offsite_backup_pending(self):
+ site_backups = frappe.qb.DocType("Site Backup")
+ return self.recent_offsite_backups_.where(site_backups.status.isin(["Pending", "Running"])).run()
- @frappe.whitelist()
- def disable_read_write(self):
- self.enable_database_access("read_only")
+ @property
+ def is_on_standalone(self):
+ return bool(frappe.db.get_value("Server", self.server, "is_standalone"))
+
+ @cached_property
+ def last_backup(self) -> SiteBackup | None:
+ return get_last_doc(
+ "Site Backup",
+ {
+ "site": self.name,
+ "with_files": True,
+ "offsite": True,
+ "status": "Success",
+ "files_availability": "Available",
+ },
+ )
+
+ def get_estimated_duration_for_server_change(self) -> str | None:
+ """2x backup duration (backup + restore) in seconds"""
+ last_backup = self.last_backup
+ if not last_backup:
+ return None
+ d: timedelta = frappe.get_value("Agent Job", last_backup.job, "duration")
+ if not d:
+ return None
+ return str(timedelta(seconds=round(d.total_seconds() * 2)))
+
+
+def check_allowed_actions(creation_failed, function_name, action_name_refined):
+ allowed_actions_after_creation_failure = [
+ "restore_site_from_physical_backup",
+ "restore_site_from_files",
+ "restore_site",
+ "archive",
+ ]
+ if creation_failed and function_name not in allowed_actions_after_creation_failure:
+ frappe.throw(
+ _(
+ "Site action '{0}' is blocked because site creation has failed. Please restore from a backup or drop this site to create a new one."
+ ).format(frappe.bold(action_name_refined))
+ )
def site_cleanup_after_archive(site):
@@ -1328,19 +3879,18 @@ def site_cleanup_after_archive(site):
release_name(site)
-def delete_site_subdomain(site):
- site_doc = frappe.get_doc("Site", site)
- domain = frappe.get_doc("Root Domain", site_doc.domain)
- is_standalone = frappe.get_value("Server", site_doc.server, "is_standalone")
- if is_standalone:
- proxy_server = site_doc.server
+def delete_site_subdomain(site_name):
+ site: Site = frappe.get_doc("Site", site_name)
+ if site.is_on_standalone:
+ proxy_server = site.server
else:
- proxy_server = frappe.get_value("Server", site_doc.server, "proxy_server")
- site_doc.remove_dns_record(domain, proxy_server, site)
+ proxy_server = frappe.get_value("Server", site.server, "proxy_server")
+ site.remove_dns_record(proxy_server)
def delete_site_domains(site):
domains = frappe.get_all("Site Domain", {"site": site})
+ frappe.db.set_value("Site", site, "host_name", None)
for domain in domains:
frappe.delete_doc("Site Domain", domain.name)
@@ -1353,7 +3903,55 @@ def release_name(name):
frappe.rename_doc("Site", name, new_name)
-def process_new_site_job_update(job):
+def process_fetch_database_table_schema_job_update(job):
+ key_for_schema = f"database_table_schema__data:{job.site}"
+ key_for_schema_status = (
+ f"database_table_schema__status:{job.site}" # 1 - loading, 2 - done, None - not available
+ )
+
+ if job.status in ["Failure", "Delivery Failure"]:
+ frappe.cache().delete_value(key_for_schema)
+ frappe.cache().delete_value(key_for_schema_status)
+ return
+
+ if job.status == "Success":
+ """
+ Support old agent versions
+ Remove this once all agents are updated
+ """
+ data = json.loads(job.data)
+ is_old_agent = False
+
+ if len(data) > 0 and isinstance(data[next(iter(data.keys()))], list):
+ is_old_agent = True
+
+ if is_old_agent:
+ data_copy = data.copy()
+ data = {}
+ for key, value in data_copy.items():
+ data[key] = {
+ "columns": value,
+ "size": {
+ "data_length": 0,
+ "index_length": 0,
+ "data_free": 0,
+ "total_size": 0,
+ }, # old agent api doesn't have size info
+ }
+ for column in data[key]["columns"]:
+ column["index_info"] = {
+ "index_usage": {x: 0 for x in column["indexes"]}, # just fill some dummy value
+ "indexes": column["indexes"],
+ "is_indexed": len(column["indexes"]) > 0,
+ }
+
+ frappe.cache().set_value(key_for_schema, json.dumps(data), expires_in_sec=6000)
+ frappe.cache().set_value(key_for_schema_status, 2, expires_in_sec=6000)
+
+
+def process_new_site_job_update(job): # noqa: C901
+ site_status = frappe.get_value("Site", job.site, "status", for_update=True)
+
other_job_types = {
"Add Site to Upstream": ("New Site", "New Site from Backup"),
"New Site": ("Add Site to Upstream",),
@@ -1361,21 +3959,25 @@ def process_new_site_job_update(job):
}[job.job_type]
first = job.status
- second = frappe.get_all(
+ second = frappe.get_value(
"Agent Job",
- fields=["status"],
- filters={"job_type": ("in", other_job_types), "site": job.site},
- )[0].status
+ {"job_type": ("in", other_job_types), "site": job.site},
+ "status",
+ for_update=True,
+ )
backup_tests = frappe.get_all(
- "Backup Restoration Test", dict(test_site=job.site, status="Running"), pluck="name"
+ "Backup Restoration Test",
+ dict(test_site=job.site, status="Running"),
+ pluck="name",
)
if "Success" == first == second:
updated_status = "Active"
- marketplace_app_hook(site=job.site, op="install")
- elif "Failure" in (first, second):
+ marketplace_app_hook(site=Site("Site", job.site), op="install")
+ elif "Failure" in (first, second) or "Delivery Failure" in (first, second):
updated_status = "Broken"
+ frappe.db.set_value("Site", job.site, "creation_failed", frappe.utils.now())
elif "Running" in (first, second):
updated_status = "Installing"
else:
@@ -1388,15 +3990,109 @@ def process_new_site_job_update(job):
"Pending": "Running",
}
- site_status = frappe.get_value("Site", job.site, "status")
if updated_status != site_status:
if backup_tests:
frappe.db.set_value(
- "Backup Restoration Test", backup_tests[0], "status", status_map[updated_status]
+ "Backup Restoration Test",
+ backup_tests[0],
+ "status",
+ status_map[updated_status],
)
frappe.db.commit()
frappe.db.set_value("Site", job.site, "status", updated_status)
+ create_site_status_update_webhook_event(job.site)
+
+ if job.status == "Success":
+ request_data = json.loads(job.request_data)
+ if "create_user" in request_data:
+ frappe.db.set_value("Site", job.site, "additional_system_user_created", True)
+ frappe.db.commit()
+
+ # Update in product trial request
+ if job.job_type in ("New Site", "Add Site to Upstream") and updated_status in (
+ "Active",
+ "Broken",
+ ):
+ update_product_trial_request_status_based_on_site_status(
+ job.site, updated_status == "Active", job.data
+ )
+
+ # check if new bench related to a site group deploy
+ site_group_deploy = frappe.db.get_value(
+ "Site Group Deploy",
+ {
+ "site": job.site,
+ "status": "Creating Site",
+ },
+ )
+ if site_group_deploy:
+ frappe.get_doc("Site Group Deploy", site_group_deploy).update_site_group_deploy_on_process_job(job)
+
+
+def update_product_trial_request_status_based_on_site_status(site, is_site_active, error=None):
+ records = frappe.get_list("Product Trial Request", filters={"site": site}, fields=["name"])
+ if not records:
+ return
+ product_trial_request = frappe.get_doc("Product Trial Request", records[0].name, for_update=True)
+ if is_site_active:
+ product_trial_request.prefill_setup_wizard_data()
+ product_trial_request.status = "Site Created"
+ product_trial_request.save(ignore_permissions=True)
+ else:
+ product_trial_request.status = "Error"
+ product_trial_request.error = error
+ product_trial_request.save(ignore_permissions=True)
+
+
+def process_complete_setup_wizard_job_update(job):
+ records = frappe.get_list("Product Trial Request", filters={"site": job.site}, fields=["name"])
+ if not records:
+ return
+ product_trial_request = frappe.get_doc("Product Trial Request", records[0].name, for_update=True)
+ if job.status == "Success":
+ frappe.db.set_value("Site", job.site, "additional_system_user_created", True)
+ if frappe.get_all("Site Domain", filters={"site": job.site, "status": ["!=", "Active"]}):
+ product_trial_request.status = "Adding Domain"
+ else:
+ product_trial_request.status = "Site Created"
+ product_trial_request.site_creation_completed_on = now_datetime()
+ product_trial_request.save(ignore_permissions=True)
+ elif job.status in ("Failure", "Delivery Failure"):
+ product_trial_request.status = "Error"
+ product_trial_request.save(ignore_permissions=True)
+
+
+def process_add_domain_job_update(job):
+ records = frappe.get_list("Product Trial Request", filters={"site": job.site}, fields=["name"])
+ if not records:
+ return
+
+ product_trial_request = frappe.get_doc("Product Trial Request", records[0].name, for_update=True)
+ if job.status == "Success":
+ if product_trial_request.status == "Site Created":
+ return
+
+ product_trial_request.status = "Site Created"
+ product_trial_request.site_creation_completed_on = now_datetime()
+
+ product_trial_request.save(ignore_permissions=True)
+
+ site_domain = json.loads(job.request_data).get("domain")
+ site = Site("Site", job.site)
+ auto_generated_domain = site.host_name
+ site.host_name = site_domain
+ site.save()
+ site.set_redirect(auto_generated_domain)
+
+ elif job.status in ("Failure", "Delivery Failure"):
+ # temporarily retry to avoid race condition
+ if job.status == "Failure" and int(job.retry_count) < 1:
+ job.db_set("retry_count", job.retry_count + 1)
+ job.retry_in_place()
+ else:
+ product_trial_request.status = "Error"
+ product_trial_request.save(ignore_permissions=True)
def get_remove_step_status(job):
@@ -1409,19 +4105,58 @@ def get_remove_step_status(job):
"Agent Job Step",
{"step_name": remove_step_name, "agent_job": job.name},
"status",
+ for_update=True,
+ )
+
+
+def get_backup_restoration_tests(site: str) -> list[str]:
+ return frappe.get_all(
+ "Backup Restoration Test",
+ dict(test_site=site, status=("in", ("Success", "Archive Failed"))),
+ pluck="name",
)
-def process_archive_site_job_update(job):
+def update_backup_restoration_test(site: str, status: str):
+ backup_tests = get_backup_restoration_tests(site)
+
+ if not backup_tests:
+ return
+ if status == "Archived":
+ frappe.db.set_value(
+ "Backup Restoration Test",
+ backup_tests[0],
+ "status",
+ "Archive Successful",
+ )
+ elif status == "Broken":
+ frappe.db.set_value("Backup Restoration Test", backup_tests[0], "status", "Archive Failed")
+
+
+def process_archive_site_job_update(job: "AgentJob"): # noqa: C901
+ with suppress(Exception):
+ is_secondary_server = frappe.db.get_value("Server", job.upstream, "is_secondary")
+ if is_secondary_server:
+ return
+
+ site_status = frappe.get_value("Site", job.site, "status", for_update=True)
+
other_job_type = {
"Remove Site from Upstream": "Archive Site",
"Archive Site": "Remove Site from Upstream",
}[job.job_type]
- other_job = frappe.get_last_doc(
- "Agent Job",
- filters={"job_type": other_job_type, "site": job.site},
- )
+ try:
+ other_job = frappe.get_last_doc(
+ "Agent Job",
+ filters={"job_type": other_job_type, "site": job.site},
+ for_update=True,
+ )
+
+ except frappe.DoesNotExistError:
+ # Site is already renamed, the other job beat us to it
+ # Our work is done
+ return
first = get_remove_step_status(job)
second = get_remove_step_status(other_job)
@@ -1434,31 +4169,97 @@ def process_archive_site_job_update(job):
updated_status = "Archived"
elif "Failure" in (first, second):
updated_status = "Broken"
+ elif "Delivery Failure" == first == second:
+ updated_status = "Active"
+ elif "Delivery Failure" in (first, second):
+ updated_status = "Broken"
else:
updated_status = "Pending"
- site_status = frappe.get_value("Site", job.site, "status")
if updated_status != site_status:
frappe.db.set_value(
"Site",
job.site,
{"status": updated_status, "archive_failed": updated_status != "Archived"},
)
+ update_backup_restoration_test(job.site, updated_status)
if updated_status == "Archived":
site_cleanup_after_archive(job.site)
def process_install_app_site_job_update(job):
updated_status = {
- "Pending": "Active",
+ "Pending": "Pending",
+ "Running": "Installing",
+ "Success": "Active",
+ "Failure": "Active",
+ "Delivery Failure": "Active",
+ }[job.status]
+
+ site_status = frappe.get_value("Site", job.site, "status")
+ if updated_status != site_status:
+ site: Site = frappe.get_doc("Site", job.site)
+ site.sync_apps()
+ frappe.db.set_value("Site", job.site, "status", updated_status)
+ create_site_status_update_webhook_event(job.site)
+
+
+def process_uninstall_app_site_job_update(job):
+ updated_status = {
+ "Pending": "Pending",
"Running": "Installing",
"Success": "Active",
"Failure": "Active",
+ "Delivery Failure": "Active",
}[job.status]
site_status = frappe.get_value("Site", job.site, "status")
if updated_status != site_status:
+ site: Site = frappe.get_doc("Site", job.site)
+ site.sync_apps()
+ frappe.db.set_value("Site", job.site, "status", updated_status)
+ create_site_status_update_webhook_event(job.site)
+
+
+def process_marketplace_hooks_for_backup_restore(apps_from_backup: set[str], site: Site):
+ site_apps = set([app.app for app in site.apps])
+ apps_to_uninstall = site_apps - apps_from_backup
+ for app in apps_from_backup:
+ if (
+ frappe.get_cached_value("Marketplace App", app, "subscription_type") == "Free"
+ ): # like india_compliance; no need to check subscription
+ marketplace_app_hook(app=app, site=site, op="install")
+ for app in apps_to_uninstall:
+ if (
+ frappe.get_cached_value("Marketplace App", app, "subscription_type") == "Free"
+ ): # like india_compliance; no need to check subscription
+ marketplace_app_hook(app=app, site=site, op="uninstall")
+
+
+def process_restore_job_update(job, force=False):
+ """
+ force: force updates apps table sync
+ """
+ updated_status = {
+ "Pending": "Pending",
+ "Running": "Installing",
+ "Success": "Active",
+ "Failure": "Broken",
+ "Delivery Failure": "Active",
+ }[job.status]
+
+ site_status = frappe.get_value("Site", job.site, "status")
+ if force or updated_status != site_status:
+ if job.status == "Success":
+ apps_from_backup: list[str] = [line.split()[0] for line in job.output.splitlines() if line]
+ site = Site("Site", job.site)
+ process_marketplace_hooks_for_backup_restore(set(apps_from_backup), site)
+ site.set_apps(apps_from_backup)
+ frappe.db.set_value("Site", job.site, "creation_failed", None)
+ elif job.status == "Failure":
+ frappe.db.set_value("Site", job.site, "creation_failed", frappe.utils.now())
frappe.db.set_value("Site", job.site, "status", updated_status)
+ create_site_status_update_webhook_event(job.site)
def process_reinstall_site_job_update(job):
@@ -1467,11 +4268,17 @@ def process_reinstall_site_job_update(job):
"Running": "Installing",
"Success": "Active",
"Failure": "Broken",
+ "Delivery Failure": "Active",
}[job.status]
site_status = frappe.get_value("Site", job.site, "status")
if updated_status != site_status:
frappe.db.set_value("Site", job.site, "status", updated_status)
+ create_site_status_update_webhook_event(job.site)
+ if job.status == "Success":
+ frappe.db.set_value("Site", job.site, "setup_wizard_complete", 0)
+ frappe.db.set_value("Site", job.site, "database_name", None)
+ frappe.db.set_value("Site", job.site, "additional_system_user_created", False)
def process_migrate_site_job_update(job):
@@ -1480,16 +4287,18 @@ def process_migrate_site_job_update(job):
"Running": "Updating",
"Success": "Active",
"Failure": "Broken",
+ "Delivery Failure": "Active",
}[job.status]
if updated_status == "Active":
- site = frappe.get_doc("Site", job.site)
+ site: Site = frappe.get_doc("Site", job.site)
if site.status_before_update:
- site.reset_previous_status()
+ site.reset_previous_status(fix_broken=True)
return
site_status = frappe.get_value("Site", job.site, "status")
if updated_status != site_status:
frappe.db.set_value("Site", job.site, "status", updated_status)
+ create_site_status_update_webhook_event(job.site)
def get_rename_step_status(job):
@@ -1502,19 +4311,34 @@ def get_rename_step_status(job):
"Agent Job Step",
{"step_name": rename_step_name, "agent_job": job.name},
"status",
+ for_update=True,
)
-def process_rename_site_job_update(job):
+def process_rename_site_job_update(job): # noqa: C901
+ site_status = frappe.get_value("Site", job.site, "status", for_update=True)
+
other_job_type = {
"Rename Site": "Rename Site on Upstream",
"Rename Site on Upstream": "Rename Site",
}[job.job_type]
- other_job = frappe.get_last_doc(
- "Agent Job",
- filters={"job_type": other_job_type, "site": job.site},
- )
+ if job.job_type == "Rename Site" and job.status == "Success":
+ request_data = json.loads(job.request_data)
+ if "create_user" in request_data:
+ frappe.db.set_value("Site", job.site, "additional_system_user_created", True)
+
+ try:
+ other_job = frappe.get_last_doc(
+ "Agent Job",
+ filters={"job_type": other_job_type, "site": job.site},
+ for_update=True,
+ )
+ except frappe.DoesNotExistError:
+ # Site is already renamed, he other job beat us to it
+ # Our work is done
+ return
+
first = get_rename_step_status(job)
second = get_rename_step_status(other_job)
@@ -1523,30 +4347,21 @@ def process_rename_site_job_update(job):
# update job obj with new name
job.reload()
updated_status = "Active"
- from press.press.doctype.site.pool import create as create_pooled_sites
-
- create_pooled_sites()
elif "Failure" in (first, second):
updated_status = "Broken"
+ elif "Delivery Failure" == first == second:
+ updated_status = "Active"
+ elif "Delivery Failure" in (first, second):
+ updated_status = "Broken"
elif "Running" in (first, second):
updated_status = "Updating"
else:
updated_status = "Pending"
- site_status = frappe.get_value("Site", job.site, "status")
if updated_status != site_status:
frappe.db.set_value("Site", job.site, "status", updated_status)
-
-
-def process_add_proxysql_user_job_update(job):
- if job.status == "Success":
- frappe.db.set_value("Site", job.site, "is_database_access_enabled", True)
-
-
-def process_remove_proxysql_user_job_update(job):
- if job.status == "Success":
- frappe.db.set_value("Site", job.site, "is_database_access_enabled", False)
+ create_site_status_update_webhook_event(job.site)
def process_move_site_to_bench_job_update(job):
@@ -1560,17 +4375,20 @@ def process_move_site_to_bench_job_update(job):
dest_group = frappe.db.get_value("Bench", dest_bench, "group")
move_site_step_status = frappe.db.get_value(
- "Agent Job Step", {"step_name": "Move Site", "agent_job": job.name}, "status"
+ "Agent Job Step",
+ {"step_name": "Move Site", "agent_job": job.name},
+ "status",
)
if move_site_step_status == "Success":
frappe.db.set_value("Site", job.site, "bench", dest_bench)
frappe.db.set_value("Site", job.site, "group", dest_group)
if updated_status:
frappe.db.set_value("Site", job.site, "status", updated_status)
+ create_site_status_update_webhook_event(job.site)
return
if job.status == "Success":
site = frappe.get_doc("Site", job.site)
- site.reset_previous_status()
+ site.reset_previous_status(fix_broken=True)
def update_records_for_rename(job):
@@ -1580,7 +4398,7 @@ def update_records_for_rename(job):
if new_name == job.site: # idempotency
return
- site = frappe.get_doc("Site", job.site)
+ site = frappe.get_doc("Site", job.site, for_update=True)
if site.host_name == job.site:
# Host name already updated in f server, no need to create another job
site._update_configuration({"host_name": f"https://{new_name}"})
@@ -1601,24 +4419,32 @@ def process_restore_tables_job_update(job):
site_status = frappe.get_value("Site", job.site, "status")
if updated_status != site_status:
if updated_status == "Active":
- frappe.get_doc("Site", job.site).reset_previous_status()
+ frappe.get_doc("Site", job.site).reset_previous_status(fix_broken=True)
else:
frappe.db.set_value("Site", job.site, "status", updated_status)
+ frappe.db.set_value("Site", job.site, "database_name", None)
+ create_site_status_update_webhook_event(job.site)
+
+
+def process_create_user_job_update(job):
+ if job.status == "Success":
+ frappe.db.set_value("Site", job.site, "additional_system_user_created", True)
+ update_product_trial_request_status_based_on_site_status(job.site, True)
+ elif job.status in ("Failure", "Delivery Failure"):
+ update_product_trial_request_status_based_on_site_status(job.site, False, job.data)
get_permission_query_conditions = get_permission_query_conditions_for_doctype("Site")
-def prepare_site(site: str, subdomain: str = None) -> Dict:
+def prepare_site(site: str, subdomain: str | None = None) -> dict:
# prepare site details
doc = frappe.get_doc("Site", site)
- sitename = subdomain if subdomain else "brt-" + doc.subdomain
+ site_name = subdomain if subdomain else "brt-" + doc.subdomain
app_plans = [app.app for app in doc.apps]
backups = frappe.get_all(
"Site Backup",
- dict(
- status="Success", with_files=1, site=site, files_availability="Available", offsite=1
- ),
+ dict(status="Success", site=site, files_availability="Available", offsite=1),
pluck="name",
)
if not backups:
@@ -1626,19 +4452,434 @@ def prepare_site(site: str, subdomain: str = None) -> Dict:
backup = frappe.get_doc("Site Backup", backups[0])
files = {
- "config": "", # not necessary for test sites
+ "config": backup.remote_config_file,
"database": backup.remote_database_file,
"public": backup.remote_public_file,
"private": backup.remote_private_file,
}
- site_dict = {
+ return {
"domain": frappe.db.get_single_value("Press Settings", "domain"),
"plan": doc.plan,
- "name": sitename,
+ "name": site_name,
"group": doc.group,
"selected_app_plans": {},
"apps": app_plans,
"files": files,
}
- return site_dict
+
+@frappe.whitelist()
+def options_for_new(group: str | None = None, selected_values=None) -> dict:
+ domain = frappe.db.get_single_value("Press Settings", "domain")
+ selected_values = frappe.parse_json(selected_values) if selected_values else frappe._dict()
+
+ versions = []
+ bench = None
+ apps: list[dict] = []
+ clusters = []
+
+ versions_filters: dict[str, Any] = {"public": True}
+ if not group:
+ versions_filters.update({"status": ("!=", "End of Life")})
+
+ versions = frappe.db.get_all(
+ "Frappe Version",
+ ["name", "default", "status", "number"],
+ versions_filters,
+ order_by="number desc",
+ )
+ for v in versions:
+ v.label = v.name
+ v.value = v.name
+
+ if selected_values.version:
+ bench = _get_bench_for_new(selected_values.version)
+ apps = _get_apps_of_bench(selected_values.version, bench) if bench else []
+ cluster_names = unique(
+ frappe.db.get_all(
+ "Bench",
+ filters={"candidate": frappe.db.get_value("Bench", bench, "candidate")},
+ pluck="cluster",
+ )
+ )
+ clusters = frappe.db.get_all(
+ "Cluster",
+ filters={"name": ("in", cluster_names), "public": True},
+ fields=["name", "title", "image", "beta"],
+ )
+ for cluster in clusters:
+ cluster.label = cluster.title
+ cluster.value = cluster.name
+
+ return {
+ "domain": domain,
+ "bench": bench,
+ "versions": versions,
+ "apps": apps,
+ "clusters": clusters,
+ }
+
+
+def _get_bench_for_new(version):
+ restricted_release_group_names = frappe.db.get_all(
+ "Site Plan Release Group",
+ pluck="release_group",
+ filters={"parenttype": "Site Plan", "parentfield": "release_groups"},
+ )
+ release_group = frappe.db.get_value(
+ "Release Group",
+ fieldname=["name", "`default`", "title"],
+ filters={
+ "enabled": 1,
+ "public": 1,
+ "version": version,
+ "name": ("not in", restricted_release_group_names),
+ },
+ order_by="creation desc",
+ as_dict=1,
+ )
+ if not release_group:
+ return None
+
+ return frappe.db.get_value(
+ "Bench",
+ filters={"status": "Active", "group": release_group.name},
+ order_by="creation desc",
+ )
+
+
+def _get_apps_of_bench(version, bench):
+ team = frappe.local.team().name
+ bench_apps = frappe.db.get_all("Bench App", {"parent": bench}, pluck="source")
+ app_sources = frappe.get_all(
+ "App Source",
+ [
+ "name",
+ "app",
+ "repository_url",
+ "repository",
+ "repository_owner",
+ "branch",
+ "team",
+ "public",
+ "app_title",
+ "frappe",
+ ],
+ filters={"name": ("in", bench_apps), "frappe": 0},
+ or_filters={"public": True, "team": team},
+ )
+ for app in app_sources:
+ app.label = app.app_title
+ app.value = app.app
+ apps = sorted(app_sources, key=lambda x: bench_apps.index(x.name))
+ marketplace_apps = frappe.db.get_all(
+ "Marketplace App",
+ fields=["title", "image", "description", "app", "route"],
+ filters={"app": ("in", [app.app for app in apps])},
+ )
+ for app in apps:
+ marketplace_details = find(marketplace_apps, lambda x: x.app == app.app)
+ if marketplace_details:
+ app.update(marketplace_details)
+ app.plans = get_plans_for_app(app.app, version)
+ return apps
+
+
+def sync_sites_setup_wizard_complete_status():
+ team_name = frappe.get_value("Team", {"user": "Administrator"}, "name")
+ sites = frappe.get_all(
+ "Site",
+ filters={
+ "status": "Active",
+ "setup_wizard_complete": 0,
+ "setup_wizard_status_check_retries": ("<", 18),
+ "setup_wizard_status_check_next_retry_on": ("<=", frappe.utils.now()),
+ "team": ("!=", team_name),
+ },
+ pluck="name",
+ order_by="RAND()",
+ limit=100,
+ )
+ for site in sites:
+ frappe.enqueue(
+ "press.press.doctype.site.site.fetch_setup_wizard_complete_status_if_site_exists",
+ site=site,
+ queue="sync",
+ job_id=f"fetch_setup_wizard_complete_status:{site}",
+ deduplicate=True,
+ )
+
+
+def fetch_setup_wizard_complete_status_if_site_exists(site):
+ if not frappe.db.exists("Site", site):
+ return
+ with suppress(frappe.DoesNotExistError):
+ frappe.get_doc("Site", site).fetch_setup_wizard_complete_status()
+
+
+def create_site_status_update_webhook_event(site: str):
+ record = frappe.get_doc("Site", site)
+ if record.team == "Administrator":
+ return
+ create_webhook_event("Site Status Update", record, record.team)
+
+
+class SiteToArchive(frappe._dict):
+ name: str
+ plan: str
+ team: str
+ bench: str
+
+
+def get_suspended_time(site: str):
+ return frappe.get_all(
+ "Site Activity",
+ filters={"site": site, "action": "Suspend Site"},
+ fields=["creation"],
+ limit=1,
+ order_by="creation desc",
+ )[0].creation
+
+
+def archive_suspended_site(site_dict: SiteToArchive):
+ archive_after_days = ARCHIVE_AFTER_SUSPEND_DAYS
+
+ suspended_days = frappe.utils.date_diff(frappe.utils.today(), get_suspended_time(site_dict.name))
+
+ if suspended_days <= archive_after_days:
+ return
+
+ if frappe.db.get_value("Bench", site_dict.bench, "managed_database_service"):
+ return
+
+ site = Site("Site", site_dict.name)
+ # take an offsite backup before archive
+ if site.plan == "USD 10" and not site.recent_offsite_backup_exists:
+ if not site.recent_offsite_backup_pending:
+ site.backup(with_files=True, offsite=True)
+ return # last backup ongoing
+ site.archive(reason="Archive suspended site")
+
+
+def archive_suspended_sites():
+ archive_at_once = 4
+
+ filters = [
+ ["status", "=", "Suspended"],
+ ["trial_end_date", "is", "not set"],
+ ["plan", "!=", "ERPNext Trial"],
+ ]
+
+ sites = frappe.db.get_all(
+ "Site", filters=filters, fields=["name", "team", "plan", "bench"], order_by="creation asc"
+ )
+
+ archived_now = 0
+ for site_dict in sites:
+ try:
+ if archived_now > archive_at_once:
+ break
+ archive_suspended_site(site_dict)
+ frappe.db.commit()
+ archived_now = archived_now + 1
+ except (frappe.QueryDeadlockError, frappe.QueryTimeoutError):
+ frappe.db.rollback()
+ except Exception:
+ frappe.log_error(title="Suspended Site Archive Error")
+ frappe.db.rollback()
+
+ signup_cluster = frappe.db.get_value("Saas Settings", "erpnext", "cluster")
+ agent = frappe.get_doc("Proxy Server", {"cluster": signup_cluster}).agent
+ if archived_now:
+ agent.reload_nginx()
+
+
+def send_warning_mail_regarding_sites_exceeding_disk_usage():
+ if not frappe.db.get_single_value("Press Settings", "enforce_storage_limits"):
+ return
+
+ free_teams = frappe.get_all("Team", filters={"free_account": True, "enabled": True}, pluck="name")
+ sites_with_no_mail_sent_previously = frappe.get_all(
+ "Site",
+ filters={
+ "status": "Active",
+ "free": False,
+ "team": ("not in", free_teams),
+ "site_usage_exceeded": 1,
+ "last_site_usage_warning_mail_sent_on": ("is", "not set"),
+ },
+ pluck="name",
+ )
+
+ sites_with_recurring_alerts = frappe.get_all(
+ "Site",
+ filters={
+ "status": "Active",
+ "free": False,
+ "team": ("not in", free_teams),
+ "site_usage_exceeded": 1,
+ "last_site_usage_warning_mail_sent_on": ("<", frappe.utils.nowdate()),
+ },
+ pluck="name",
+ )
+
+ sites = list(set(sites_with_no_mail_sent_previously + sites_with_recurring_alerts))
+
+ for site in sites:
+ if has_job_timeout_exceeded():
+ break
+ try:
+ site_info = frappe.get_value(
+ "Site",
+ site,
+ ["current_disk_usage", "current_database_usage", "site_usage_exceeded_on"],
+ as_dict=True,
+ )
+ if site_info.current_disk_usage < 120 and site_info.current_database_usage < 120:
+ # Final check if site is still exceeding limits
+ continue
+ frappe.sendmail(
+ recipients=get_communication_info("Email", "Site Activity", "Site", site),
+ subject=f"Action Required: Site {site} exceeded plan limits",
+ template="site_exceeded_disk_usage_warning",
+ args={
+ "site": site,
+ "current_disk_usage": site_info.current_disk_usage,
+ "current_database_usage": site_info.current_database_usage,
+ "no_of_days_left_to_suspend": 14
+ - (frappe.utils.date_diff(frappe.utils.nowdate(), site_info.site_usage_exceeded_on) or 0),
+ },
+ )
+ frappe.db.set_value("Site", site, "last_site_usage_warning_mail_sent_on", frappe.utils.now())
+ frappe.db.commit()
+ except rq.timeouts.JobTimeoutException:
+ frappe.db.rollback()
+ return
+ except Exception as e:
+ print(e)
+ frappe.db.rollback()
+
+
+def suspend_sites_exceeding_disk_usage_for_last_14_days():
+ """Suspend sites if they have exceeded database or disk usage limits for the last 14 days."""
+
+ if not frappe.db.get_single_value("Press Settings", "enforce_storage_limits"):
+ return
+
+ free_teams = frappe.get_all("Team", filters={"free_account": True, "enabled": True}, pluck="name")
+ active_sites = frappe.get_all(
+ "Site",
+ filters={
+ "status": "Active",
+ "free": False,
+ "team": ("not in", free_teams),
+ "site_usage_exceeded": 1,
+ "site_usage_exceeded_on": ("<", frappe.utils.add_to_date(frappe.utils.now(), days=-14)),
+ },
+ fields=["name", "team", "current_database_usage", "current_disk_usage"],
+ )
+
+ for site in active_sites:
+ if site.current_database_usage > 120 or site.current_disk_usage > 120:
+ # Check once again and suspend if still exceeds limits
+ site: Site = frappe.get_doc("Site", site.name)
+ site.suspend(reason="Site Usage Exceeds Plan limits", skip_reload=True)
+
+
+def create_subscription_for_trial_sites():
+ # Get sites that are in "Site Created" status and has no entry in "Site Plan Change"
+ # For those sites, invoke "Create Subscription" that puts entry into "Site Plan Change" and "Subscription"
+ active_sites = frappe.db.sql(
+ """
+ SELECT trial.site, producttrial.trial_plan
+ FROM `tabProduct Trial Request` trial
+ LEFT JOIN `tabSite Plan Change` siteplanchange
+ ON trial.site = siteplanchange.name
+ LEFT JOIN `tabProduct Trial` producttrial ON trial.product_trial = producttrial.name WHERE trial.is_subscription_created = 0 AND siteplanchange.name is NULL AND trial.status="Site Created" LIMIT 25;
+ """,
+ as_dict=True,
+ )
+ for trial_site in active_sites:
+ if has_job_timeout_exceeded():
+ return
+ try:
+ site: Site = frappe.get_doc("Site", trial_site.site)
+ site.create_subscription(trial_site.trial_plan)
+ frappe.db.set_value(
+ "Product Trial Request",
+ {"site": trial_site.site},
+ {"is_subscription_created": 1},
+ update_modified=False,
+ )
+ frappe.db.commit()
+ except Exception:
+ frappe.db.rollback()
+ log_error(title="Creating subscription for trial sites", site=trial_site)
+
+
+def get_updates_between_current_and_next_apps(
+ current_apps: DF.Table[BenchApp],
+ next_apps: DF.Table[DeployCandidateApp],
+):
+ from press.utils import get_app_tag
+
+ apps = []
+ for app in next_apps:
+ bench_app = find(current_apps, lambda x: x.app == app.app)
+ current_hash = bench_app.hash if bench_app else None
+ source = frappe.get_doc("App Source", app.source)
+
+ will_branch_change = False
+ current_branch = source.branch
+ if bench_app:
+ current_source = frappe.get_doc("App Source", bench_app.source)
+ will_branch_change = current_source.branch != source.branch
+ current_branch = current_source.branch
+
+ current_tag = (
+ get_app_tag(source.repository, source.repository_owner, current_hash) if current_hash else None
+ )
+ next_hash = app.pullable_hash or app.hash
+ apps.append(
+ {
+ "title": app.title,
+ "app": app.app,
+ "repository": source.repository,
+ "repository_owner": source.repository_owner,
+ "repository_url": source.repository_url,
+ "branch": source.branch,
+ "current_hash": current_hash,
+ "current_tag": current_tag,
+ "next_hash": next_hash,
+ "next_tag": get_app_tag(source.repository, source.repository_owner, next_hash),
+ "will_branch_change": will_branch_change,
+ "current_branch": current_branch,
+ "update_available": not current_hash or current_hash != next_hash,
+ }
+ )
+ return apps
+
+
+def archive_creation_failed_sites():
+ creation_failure_retention_date = frappe.utils.add_days(
+ frappe.utils.now(), -CREATION_FAILURE_RETENTION_DAYS
+ )
+
+ filters = [
+ ["creation_failed", "!=", None],
+ ["creation_failed", "<", creation_failure_retention_date],
+ ["status", "=", "Broken"],
+ ]
+
+ failed_sites = frappe.db.get_all("Site", filters=filters, fields=["name"], pluck="name")
+
+ for site in failed_sites:
+ try:
+ site = Site("Site", site)
+ site.archive(
+ reason=f"Site creation failed and was not restored within {CREATION_FAILURE_RETENTION_DAYS} days"
+ )
+ frappe.db.commit()
+ except Exception:
+ frappe.log_error(title="Creation Failed Site Archive Error")
+ frappe.db.rollback()
diff --git a/press/press/doctype/site/site_usages.py b/press/press/doctype/site/site_usages.py
index 55a0fa1ffdd..af871b8b3f3 100644
--- a/press/press/doctype/site/site_usages.py
+++ b/press/press/doctype/site/site_usages.py
@@ -1,13 +1,20 @@
-import frappe
import functools
-from press.press.doctype.plan.plan import get_plan_config
-from press.api.analytics import get_current_cpu_usage
+from typing import TYPE_CHECKING
+
+import frappe
+import rq
+
+from press.api.analytics import get_current_cpu_usage_for_sites_on_server
+from press.press.doctype.site_plan.site_plan import get_plan_config
from press.utils import log_error
+if TYPE_CHECKING:
+ from press.press.doctype.site.site import Site
+
@functools.lru_cache(maxsize=128)
def get_cpu_limit(plan):
- return frappe.db.get_value("Plan", plan, "cpu_time_per_day") * 3600 * 1000_000
+ return frappe.db.get_value("Site Plan", plan, "cpu_time_per_day") * 3600 * 1000_000
@functools.lru_cache(maxsize=128)
@@ -17,7 +24,7 @@ def get_cpu_limits(plan):
@functools.lru_cache(maxsize=128)
def get_disk_limits(plan):
- return frappe.db.get_value("Plan", plan, ["max_database_usage", "max_storage_usage"])
+ return frappe.db.get_value("Site Plan", plan, ["max_database_usage", "max_storage_usage"])
@functools.lru_cache(maxsize=128)
@@ -25,38 +32,53 @@ def get_config(plan):
return get_plan_config(plan)
-def get_cpu_counter(site):
- cpu_usage = get_current_cpu_usage(site)
- return cpu_usage
-
-
def update_cpu_usages():
"""Update CPU Usages field Site.current_cpu_usage across all Active sites from Site Request Log"""
+ servers = frappe.get_all("Server", filters={"status": "Active", "is_primary": True}, pluck="name")
+ for server in servers:
+ frappe.enqueue(
+ "press.press.doctype.site.site_usages.update_cpu_usage_server",
+ server=server,
+ queue="long",
+ deduplicate=True,
+ job_id=f"update_cpu_usages:{server}",
+ )
+
+
+def update_cpu_usage_server(server):
+ usage = get_current_cpu_usage_for_sites_on_server(server)
sites = frappe.get_all(
- "Site", filters={"status": "Active"}, fields=["name", "plan", "current_cpu_usage"]
+ "Site",
+ filters={"status": "Active", "server": server},
+ fields=["name", "plan", "current_cpu_usage"],
)
for site in sites:
- cpu_usage = get_cpu_counter(site.name)
- cpu_limit = get_cpu_limits(site.plan)
- latest_cpu_usage = int((cpu_usage / cpu_limit) * 100)
+ if site.name not in usage:
+ continue
+ try:
+ cpu_usage = usage[site.name]
+ cpu_limit = get_cpu_limits(site.plan)
+ latest_cpu_usage = int((cpu_usage / cpu_limit) * 100)
- if site.current_cpu_usage != latest_cpu_usage:
- try:
+ if site.current_cpu_usage != latest_cpu_usage:
site_doc = frappe.get_doc("Site", site.name)
site_doc.current_cpu_usage = latest_cpu_usage
site_doc.save()
frappe.db.commit()
- except Exception():
- log_error("Site CPU Usage Update Error", cpu_usage=cpu_usage, cpu_limit=cpu_limit)
- frappe.db.rollback()
+ except rq.timeouts.JobTimeoutException:
+ frappe.db.rollback()
+ return
+ except Exception:
+ log_error("Site CPU Usage Update Error", site=site, cpu_usage=cpu_usage, cpu_limit=cpu_limit)
+ frappe.db.rollback()
def update_disk_usages():
"""Update Storage and Database Usages fields Site.current_database_usage and Site.current_disk_usage for sites that have Site Usage documents"""
latest_disk_usages = frappe.db.sql(
- r"""WITH disk_usage AS (
+ """WITH disk_usage AS (
SELECT
`site`,
`database`,
@@ -65,7 +87,7 @@ def update_disk_usages():
FROM
`tabSite Usage`
WHERE
- `site` NOT LIKE '%cloud.archived%'
+ `creation` > %s
),
joined AS (
SELECT
@@ -85,13 +107,13 @@ def update_disk_usages():
ON
u.site = site.name
LEFT JOIN
- `tabPlan` plan
+ `tabSite Plan` plan
ON
s.plan = plan.name
WHERE
`rank` = 1 AND
s.`document_type` = 'Site' AND
- s.`enabled`
+ site.`status` != "Archived"
)
SELECT
j.site,
@@ -103,16 +125,20 @@ def update_disk_usages():
ABS(j.latest_database_usage - j.current_database_usage ) > 1 OR
ABS(j.latest_disk_usage - j.current_disk_usage) > 1
""",
+ values=(frappe.utils.add_to_date(frappe.utils.now(), hours=-12),),
as_dict=True,
)
for usage in latest_disk_usages:
try:
- site = frappe.get_doc("Site", usage.site)
+ site: Site = frappe.get_doc("Site", usage.site, for_update=True)
site.current_database_usage = usage.latest_database_usage
site.current_disk_usage = usage.latest_disk_usage
+ site.check_if_disk_usage_exceeded(save=False)
site.save()
frappe.db.commit()
+ except frappe.DoesNotExistError:
+ frappe.db.rollback()
except Exception:
log_error("Site Disk Usage Update Error", usage=usage)
frappe.db.rollback()
diff --git a/press/press/doctype/site/sync.py b/press/press/doctype/site/sync.py
new file mode 100644
index 00000000000..ff7e4e5c34b
--- /dev/null
+++ b/press/press/doctype/site/sync.py
@@ -0,0 +1,27 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+import frappe
+
+
+def sync_setup_wizard_status():
+ sites = frappe.get_all(
+ "Site",
+ {
+ "status": "Active",
+ "setup_wizard_complete": False,
+ "is_standby": False,
+ "domain": ("in", ("erpnext.com", "frappe.cloud", "frappehr.com", "frappedesk.com")),
+ },
+ pluck="name",
+ order_by="RAND()",
+ limit=20,
+ )
+
+ for site_name in sites:
+ site = frappe.get_doc("Site", site_name)
+ try:
+ site.is_setup_wizard_complete()
+ frappe.db.commit()
+ except Exception:
+ frappe.db.rollback()
diff --git a/press/press/doctype/site/test_backups.py b/press/press/doctype/site/test_backups.py
index f7ace4227e1..3ed537e3fae 100644
--- a/press/press/doctype/site/test_backups.py
+++ b/press/press/doctype/site/test_backups.py
@@ -1,11 +1,15 @@
-import unittest
-from datetime import datetime, timedelta
+from datetime import timedelta
from unittest.mock import MagicMock, Mock, patch
import frappe
+from frappe.tests.utils import FrappeTestCase
from press.press.doctype.agent_job.agent_job import AgentJob
-from press.press.doctype.site.backups import ScheduledBackupJob
+from press.press.doctype.site.backups import (
+ ScheduledBackupJob,
+ schedule_logical_backups_for_sites_with_backup_time,
+ schedule_physical_backups_for_sites_with_backup_time,
+)
from press.press.doctype.site.site import Site
from press.press.doctype.site.test_site import create_test_site
from press.press.doctype.site_backup.test_site_backup import create_test_site_backup
@@ -14,7 +18,7 @@
@patch("press.press.doctype.site.backups.frappe.db.commit", new=MagicMock)
@patch("press.press.doctype.site.backups.frappe.db.rollback", new=MagicMock)
@patch.object(AgentJob, "after_insert", new=Mock())
-class TestScheduledBackupJob(unittest.TestCase):
+class TestScheduledBackupJob(FrappeTestCase):
def tearDown(self):
frappe.db.rollback()
@@ -25,19 +29,21 @@ def _with_files_count(self, site: str):
return frappe.db.count("Site Backup", {"site": site, "with_files": True})
def setUp(self):
+ super().setUp()
+
self.interval = 6
frappe.db.set_single_value("Press Settings", "backup_interval", 6)
def _interval_hrs_ago(self):
- return datetime.now() - timedelta(hours=self.interval)
+ return frappe.utils.now_datetime() - timedelta(hours=self.interval)
def _create_site_requiring_backup(self, **kwargs):
- return create_test_site(
- creation=self._interval_hrs_ago() - timedelta(hours=1), **kwargs
- )
+ return create_test_site(creation=self._interval_hrs_ago() - timedelta(hours=1), **kwargs)
@patch.object(
- ScheduledBackupJob, "is_backup_hour", new=lambda self, x: True # always backup hour
+ ScheduledBackupJob,
+ "is_backup_hour",
+ new=lambda self, x: True, # always backup hour
)
@patch.object(
ScheduledBackupJob,
@@ -46,7 +52,7 @@ def _create_site_requiring_backup(self, **kwargs):
)
def test_offsite_taken_once_per_day(self):
site = self._create_site_requiring_backup()
- job = ScheduledBackupJob()
+ job = ScheduledBackupJob(backup_type="Logical")
offsite_count_before = self._offsite_count(site.name)
job.start()
@@ -55,17 +61,19 @@ def test_offsite_taken_once_per_day(self):
self.assertEqual(offsite_count_after, offsite_count_before + 1)
offsite_count_before = self._offsite_count(site.name)
- job = ScheduledBackupJob()
+ job = ScheduledBackupJob(backup_type="Logical")
job.start()
offsite_count_after = self._offsite_count(site.name)
self.assertEqual(offsite_count_after, offsite_count_before)
@patch.object(
- ScheduledBackupJob, "is_backup_hour", new=lambda self, x: True # always backup hour
+ ScheduledBackupJob,
+ "is_backup_hour",
+ new=lambda self, x: True, # always backup hour
)
def test_with_files_taken_once_per_day(self):
site = self._create_site_requiring_backup()
- job = ScheduledBackupJob()
+ job = ScheduledBackupJob(backup_type="Logical")
offsite_count_before = self._with_files_count(site.name)
job.start()
@@ -74,7 +82,7 @@ def test_with_files_taken_once_per_day(self):
self.assertEqual(offsite_count_after, offsite_count_before + 1)
offsite_count_before = self._with_files_count(site.name)
- job = ScheduledBackupJob()
+ job = ScheduledBackupJob(backup_type="Logical")
job.start()
offsite_count_after = self._with_files_count(site.name)
self.assertEqual(offsite_count_after, offsite_count_before)
@@ -82,7 +90,7 @@ def test_with_files_taken_once_per_day(self):
def _create_x_sites_on_1_bench(self, x):
site = self._create_site_requiring_backup()
bench = site.bench
- for i in range(x - 1):
+ for _i in range(x - 1):
self._create_site_requiring_backup(bench=bench)
def test_limit_number_of_sites_backed_up(self):
@@ -90,7 +98,7 @@ def test_limit_number_of_sites_backed_up(self):
self._create_x_sites_on_1_bench(2)
limit = 3
- job = ScheduledBackupJob()
+ job = ScheduledBackupJob(backup_type="Logical")
sites_num_old = len(job.sites)
job.limit = limit
@@ -103,7 +111,7 @@ def test_limit_number_of_sites_backed_up(self):
"Success", # fake succesful backup
)
- job = ScheduledBackupJob()
+ job = ScheduledBackupJob(backup_type="Logical")
sites_num_new = len(job.sites)
self.assertLess(sites_num_new, sites_num_old)
@@ -128,3 +136,103 @@ def test_sites_considered_for_backup(self):
sites_for_backup = [site.name for site in sites]
self.assertIn(site_2.name, sites_for_backup)
+
+ @patch.object(Site, "backup")
+ def test_site_with_logical_backup_time_taken_at_right_time(self, mock_backup):
+ site: Site = self._create_site_requiring_backup()
+ site.schedule_logical_backup_at_custom_time = True
+ site.append(
+ "logical_backup_times",
+ {
+ "backup_time": "00:00",
+ },
+ )
+ site.save()
+ with self.freeze_time("2021-01-01 01:00"):
+ schedule_logical_backups_for_sites_with_backup_time()
+ mock_backup.assert_not_called()
+ with self.freeze_time("2021-01-01 00:00"):
+ schedule_logical_backups_for_sites_with_backup_time()
+ mock_backup.assert_called_once()
+ job = ScheduledBackupJob(backup_type="Logical")
+ self.assertEqual(len(job.sites), 0) # site with backup time should be skipped
+
+ @patch.object(Site, "backup")
+ def test_site_with_physical_backup_time_taken_at_right_time(self, mock_backup):
+ site: Site = self._create_site_requiring_backup()
+ site.skip_scheduled_physical_backups = False
+ site.schedule_physical_backup_at_custom_time = True
+ site.append(
+ "physical_backup_times",
+ {
+ "backup_time": "00:00:00",
+ },
+ )
+ site.save()
+ with self.freeze_time("2021-01-01 01:00"):
+ schedule_physical_backups_for_sites_with_backup_time()
+ mock_backup.assert_not_called()
+ with self.freeze_time("2021-01-01 00:00"):
+ schedule_physical_backups_for_sites_with_backup_time()
+ mock_backup.assert_called_once()
+ print(mock_backup.call_args)
+ job = ScheduledBackupJob(backup_type="Physical")
+ self.assertEqual(len(job.sites), 0) # site with backup time should be skipped
+
+ @patch.object(Site, "backup")
+ def test_site_with_multiple_logical_backup_times(self, mock_backup):
+ site: Site = self._create_site_requiring_backup()
+ site.schedule_logical_backup_at_custom_time = True
+ site.append(
+ "logical_backup_times",
+ {
+ "backup_time": "01:00:00",
+ },
+ )
+ site.append(
+ "logical_backup_times",
+ {
+ "backup_time": "05:00:00",
+ },
+ )
+ site.append(
+ "logical_backup_times",
+ {
+ "backup_time": "12:00:00",
+ },
+ )
+ site.save()
+ with self.freeze_time("2021-01-01 00:00"):
+ schedule_logical_backups_for_sites_with_backup_time()
+ mock_backup.assert_not_called()
+
+ with self.freeze_time("2021-01-01 01:00"):
+ schedule_logical_backups_for_sites_with_backup_time()
+ mock_backup.assert_called_once()
+ mock_backup.reset_mock()
+
+ with self.freeze_time("2021-01-01 02:00"):
+ schedule_logical_backups_for_sites_with_backup_time()
+ mock_backup.assert_not_called()
+
+ with self.freeze_time("2021-01-01 03:00"):
+ schedule_logical_backups_for_sites_with_backup_time()
+ mock_backup.assert_not_called()
+
+ with self.freeze_time("2021-01-01 04:00"):
+ schedule_logical_backups_for_sites_with_backup_time()
+ mock_backup.assert_not_called()
+
+ with self.freeze_time("2021-01-01 05:00"):
+ schedule_logical_backups_for_sites_with_backup_time()
+ mock_backup.assert_called_once()
+ mock_backup.reset_mock()
+
+ with self.freeze_time("2021-01-01 06:00"):
+ schedule_logical_backups_for_sites_with_backup_time()
+ mock_backup.assert_not_called()
+
+ with self.freeze_time("2021-01-01 12:00"):
+ schedule_logical_backups_for_sites_with_backup_time()
+ mock_backup.assert_called_once()
+ mock_backup.reset_mock()
diff --git a/press/press/doctype/site/test_site.py b/press/press/doctype/site/test_site.py
index 6a8d2fdc810..d2125ab1937 100644
--- a/press/press/doctype/site/test_site.py
+++ b/press/press/doctype/site/test_site.py
@@ -1,46 +1,77 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe and Contributors
# See license.txt
+from __future__ import annotations
-
-import unittest
-from datetime import datetime
-from typing import Optional
+import json
+import typing
from unittest.mock import Mock, patch
import frappe
+import frappe.utils
+import responses
from frappe.model.naming import make_autoname
+from frappe.tests.utils import FrappeTestCase
-from press.press.doctype.agent_job.agent_job import AgentJob
+from press.exceptions import InsufficientSpaceOnServer
+from press.press.doctype.agent_job.agent_job import AgentJob, poll_pending_jobs
from press.press.doctype.app.test_app import create_test_app
+from press.press.doctype.app_source.app_source import AppSource
from press.press.doctype.database_server.test_database_server import (
create_test_database_server,
)
-from press.press.doctype.proxy_server.test_proxy_server import create_test_proxy_server
+from press.press.doctype.deploy_candidate_build.deploy_candidate_build import DeployCandidateBuild
from press.press.doctype.release_group.test_release_group import (
create_test_release_group,
)
-from press.press.doctype.server.test_server import create_test_server
-from press.press.doctype.site.site import Site, process_rename_site_job_update
+from press.press.doctype.remote_file.remote_file import RemoteFile
+from press.press.doctype.remote_file.test_remote_file import (
+ create_test_remote_file,
+)
+from press.press.doctype.server.server import BaseServer, Server
+from press.press.doctype.site.site import (
+ ARCHIVE_AFTER_SUSPEND_DAYS,
+ Site,
+ archive_suspended_sites,
+ process_rename_site_job_update,
+ suspend_sites_exceeding_disk_usage_for_last_14_days,
+)
+from press.press.doctype.site_activity.test_site_activity import create_test_site_activity
+from press.press.doctype.site_plan.test_site_plan import create_test_plan
+from press.press.doctype.team.test_team import create_test_team
+from press.press.doctype.telegram_message.telegram_message import TelegramMessage
+from press.saas.doctype.saas_settings.test_saas_settings import create_test_saas_settings
+from press.utils import get_current_team
+
+if typing.TYPE_CHECKING:
+ from datetime import datetime
-from press.press.doctype.release_group.release_group import ReleaseGroup
+ from press.press.doctype.bench.bench import Bench
+ from press.press.doctype.release_group.release_group import ReleaseGroup
+@patch.object(DeployCandidateBuild, "pre_build", new=Mock())
def create_test_bench(
- user: str = "Administrator",
- group: ReleaseGroup = None,
- server: str = None,
- apps: Optional[list[dict]] = None,
-):
+ user: str | None = None,
+ group: ReleaseGroup | None = None,
+ server: str | None = None,
+ apps: list[dict] | None = None,
+ creation: datetime | None = None,
+ public_server: bool = False,
+) -> "Bench":
"""
Create test Bench doc.
API call to agent will be faked when creating the doc.
"""
+ from press.press.doctype.proxy_server.test_proxy_server import create_test_proxy_server
+ from press.press.doctype.server.test_server import create_test_server
+
+ creation = creation or frappe.utils.now_datetime()
+ user = user or frappe.session.user
if not server:
proxy_server = create_test_proxy_server()
database_server = create_test_database_server()
- server = create_test_server(proxy_server.name, database_server.name).name
+ server = create_test_server(proxy_server.name, database_server.name, public=public_server).name
if not group:
app = create_test_app()
@@ -48,7 +79,7 @@ def create_test_bench(
name = frappe.mock("name")
candidate = group.create_deploy_candidate()
- candidate.db_set("docker_image", frappe.mock("url"))
+ deploy_candidate_build = candidate.build()
bench = frappe.get_doc(
{
"name": f"Test Bench{name}",
@@ -59,9 +90,12 @@ def create_test_bench(
"group": group.name,
"apps": apps,
"candidate": candidate.name,
+ "build": deploy_candidate_build["message"],
"server": server,
+ "docker_image": frappe.mock("url"),
}
).insert(ignore_if_duplicate=True)
+ bench.db_set("creation", creation)
bench.reload()
return bench
@@ -69,48 +103,79 @@ def create_test_bench(
def create_test_site(
subdomain: str = "",
new: bool = False,
- creation: datetime = None,
- bench: str = None,
- team: str = None,
- standby_for: Optional[str] = None,
+ creation: datetime | None = None,
+ bench: str | None = None,
+ server: str | None = None,
+ team: str | None = None,
+ standby_for_product: str | None = None,
+ apps: list[str] | None = None,
+ remote_database_file=None,
+ remote_public_file=None,
+ remote_private_file=None,
+ remote_config_file=None,
+ fake_agent_jobs: bool = False,
+ **kwargs,
) -> Site:
"""Create test Site doc.
Installs all apps present in bench.
"""
- if not creation:
- creation = datetime.now()
- if not subdomain:
- subdomain = make_autoname("test-site-.#####")
- if not bench:
- bench = create_test_bench()
- else:
- bench = frappe.get_doc("Bench", bench)
- group = frappe.get_doc("Release Group", bench.group)
-
- status = "Pending" if new else "Active"
- # on_update checks won't be triggered if not Active
+ from press.press.doctype.agent_job.test_agent_job import fake_agent_job
- site = frappe.get_doc(
- {
- "doctype": "Site",
- "status": status,
- "subdomain": subdomain,
- "server": bench.server,
- "bench": bench.name,
- "team": team or frappe.get_value("Team", {"user": "Administrator"}, "name"),
- "apps": [{"app": app.app} for app in group.apps],
- "admin_password": "admin",
- "standby_for": standby_for,
- }
- ).insert()
- site.db_set("creation", creation)
- site.reload()
- return site
+ if fake_agent_jobs:
+ context = fake_agent_job(
+ {
+ "New Site": {"status": "Success"},
+ "Update Site Configuration": {"status": "Success"},
+ "Add Site to Upstream": {"status": "Success"},
+ }
+ )
+ else:
+ context = patch.object(AgentJob, "enqueue_http_request", new=Mock())
+
+ with context:
+ creation = creation or frappe.utils.now_datetime()
+ subdomain = subdomain or make_autoname("test-site-.#####")
+ apps = [{"app": app} for app in apps] if apps else None
+ if not bench:
+ bench = create_test_bench(server=server, public_server=kwargs.get("public_server", False))
+ else:
+ bench = frappe.get_doc("Bench", bench)
+ group = frappe.get_doc("Release Group", bench.group)
+
+ status = "Pending" if new else "Active"
+ # on_update checks won't be triggered if not Active
+
+ site = frappe.get_doc(
+ {
+ "doctype": "Site",
+ "status": status,
+ "subdomain": subdomain,
+ "server": bench.server,
+ "bench": bench.name,
+ "team": team or get_current_team(),
+ "apps": apps or [{"app": app.app} for app in group.apps],
+ "admin_password": "admin",
+ "standby_for_product": standby_for_product,
+ "remote_database_file": remote_database_file,
+ "remote_public_file": remote_public_file,
+ "remote_private_file": remote_private_file,
+ "remote_config_file": remote_config_file,
+ }
+ )
+ site.update(kwargs)
+ site.insert()
+ site.db_set("creation", creation)
+ site.reload()
+ if fake_agent_jobs:
+ poll_pending_jobs()
+ site.reload()
+ return site
@patch.object(AgentJob, "enqueue_http_request", new=Mock())
-class TestSite(unittest.TestCase):
+@patch("press.press.doctype.site.site._change_dns_record", new=Mock())
+class TestSite(FrappeTestCase):
"""Tests for Site Document methods."""
def tearDown(self):
@@ -132,9 +197,7 @@ def test_new_sites_set_host_name_in_site_config(self):
"""Ensure new sites set host_name in site config in f server."""
with patch.object(Site, "_update_configuration") as mock_update_config:
site = create_test_site("testsubdomain", new=True)
- mock_update_config.assert_called_with(
- {"host_name": f"https://{site.name}"}, save=False
- )
+ mock_update_config.assert_called_with({"host_name": f"https://{site.name}"}, save=False)
def test_rename_updates_name(self):
"""Ensure rename changes name of site."""
@@ -168,9 +231,7 @@ def test_rename_creates_2_agent_jobs(self):
)
self.assertEqual(rename_jobs_count_after - rename_jobs_count_before, 1)
- self.assertEqual(
- rename_upstream_jobs_count_after - rename_upstream_jobs_count_before, 1
- )
+ self.assertEqual(rename_upstream_jobs_count_after - rename_upstream_jobs_count_before, 1)
def test_subdomain_update_renames_site(self):
"""Ensure updating subdomain renames site."""
@@ -191,9 +252,7 @@ def test_subdomain_update_renames_site(self):
)
self.assertEqual(rename_jobs_count_after - rename_jobs_count_before, 1)
- self.assertEqual(
- rename_upstream_jobs_count_after - rename_upstream_jobs_count_before, 1
- )
+ self.assertEqual(rename_upstream_jobs_count_after - rename_upstream_jobs_count_before, 1)
def _fake_succeed_rename_jobs(self):
rename_step_name_map = {
@@ -201,9 +260,7 @@ def _fake_succeed_rename_jobs(self):
"Rename Site on Upstream": "Rename Site File in Upstream Directory",
}
rename_job = frappe.get_last_doc("Agent Job", {"job_type": "Rename Site"})
- rename_upstream_job = frappe.get_last_doc(
- "Agent Job", {"job_type": "Rename Site on Upstream"}
- )
+ rename_upstream_job = frappe.get_last_doc("Agent Job", {"job_type": "Rename Site on Upstream"})
frappe.db.set_value(
"Agent Job Step",
{
@@ -334,3 +391,333 @@ def test_site_rename_doesnt_update_host_name_for_custom_domain(self):
if site.configuration[0].key == "host_name":
config_host = site.configuration[0].value
self.assertEqual(config_host, f"https://{site_domain1.name}")
+
+ @patch.object(RemoteFile, "download_link", new="http://test.com")
+ @patch.object(RemoteFile, "get_content", new=lambda x: {"a": "test"}) # type: ignore
+ def test_new_site_with_backup_files(self):
+ # no asserts here, just checking if it doesn't fail
+ database = create_test_remote_file().name
+ public = create_test_remote_file().name
+ private = create_test_remote_file().name
+ config = create_test_remote_file().name
+ plan = frappe.get_doc(
+ doctype="Site Plan",
+ name="Plan-10",
+ document_type="Site",
+ interval="Daily",
+ price_usd=30,
+ price_inr=30,
+ period=30,
+ ).insert()
+ create_test_site(
+ "test-site-restore",
+ remote_database_file=database,
+ remote_public_file=public,
+ remote_private_file=private,
+ remote_config_file=config,
+ subscription_plan=plan.name,
+ )
+
+ @patch.object(TelegramMessage, "enqueue", new=Mock())
+ @patch.object(BaseServer, "disk_capacity", new=Mock(return_value=100))
+ @patch.object(RemoteFile, "download_link", new="http://test.com")
+ @patch.object(RemoteFile, "get_content", new=lambda _: {"a": "test"})
+ @patch.object(RemoteFile, "exists", lambda _: True)
+ @patch.object(BaseServer, "calculated_increase_disk_size")
+ def test_restore_site_adds_storage_if_no_sufficient_storage_available_on_public_server(
+ self, mock_increase_disk_size: Mock
+ ):
+ """Ensure restore site adds storage if no sufficient storage available."""
+ site = create_test_site()
+ site.remote_database_file = create_test_remote_file(file_size=1024).name
+ site.remote_public_file = create_test_remote_file(file_size=1024).name
+ site.remote_private_file = create_test_remote_file(file_size=1024).name
+ db_server = frappe.get_value("Server", site.server, "database_server")
+
+ frappe.db.set_value("Server", site.server, "public", True)
+ frappe.db.set_value(
+ "Database Server",
+ db_server,
+ "public",
+ True,
+ )
+ with patch.object(BaseServer, "free_space", new=Mock(return_value=500 * 1024 * 1024 * 1024)):
+ site.restore_site()
+ mock_increase_disk_size.assert_not_called()
+
+ with patch.object(BaseServer, "free_space", new=Mock(return_value=0)):
+ site.restore_site()
+ mock_increase_disk_size.assert_called()
+
+ frappe.db.set_value("Server", site.server, "public", False)
+ frappe.db.set_value(
+ "Database Server",
+ db_server,
+ "public",
+ False,
+ )
+ with patch.object(Server, "free_space", new=Mock(return_value=0)):
+ self.assertRaises(InsufficientSpaceOnServer, site.restore_site)
+
+ def test_user_cannot_disable_auto_update_if_site_in_public_release_group(self):
+ rg = create_test_release_group([create_test_app()], public=True)
+ bench = create_test_bench(group=rg)
+ site = create_test_site("testsite", bench=bench.name)
+ site.skip_auto_updates = True
+ with self.assertRaises(frappe.exceptions.ValidationError) as context:
+ site.save(ignore_permissions=True)
+ self.assertTrue(
+ "Auto updates can't be disabled for sites on public benches" in str(context.exception)
+ )
+
+ def test_user_can_disable_auto_update_if_site_in_private_bench(self):
+ rg = create_test_release_group([create_test_app()], public=False)
+ bench = create_test_bench(group=rg)
+ site = create_test_site("testsite", bench=bench.name)
+ site.skip_auto_updates = True
+ site.save(ignore_permissions=True)
+
+ @responses.activate
+ @patch.object(AppSource, "validate_dependent_apps", new=Mock())
+ def test_sync_apps_updates_apps_child_table(self):
+ app1 = create_test_app()
+ app2 = create_test_app("erpnext", "ERPNext")
+ group = create_test_release_group([app1, app2])
+ bench = create_test_bench(group=group)
+ site = create_test_site(bench=bench.name)
+ responses.get(
+ f"https://{site.server}:443/agent/benches/{site.bench}/sites/{site.name}/apps",
+ json.dumps({"data": "frappe\nerpnext"}),
+ )
+ site.sync_apps()
+ self.assertEqual(site.apps[0].app, "frappe")
+ self.assertEqual(site.apps[1].app, "erpnext")
+ self.assertEqual(len(site.apps), 2)
+
+ def test_delete_multiple_config_creates_job_to_remove_multiple_site_config_keys(self):
+ site = create_test_site()
+ site._set_configuration(
+ [
+ {"key": "key1", "value": "value1", "type": "String"},
+ {"key": "key2", "value": "value2", "type": "String"},
+ ]
+ )
+ site.delete_multiple_config(["key1", "key2"])
+ update_job = frappe.get_last_doc(
+ "Agent Job", {"job_type": "Update Site Configuration", "site": site.name}
+ )
+ self.assertEqual(
+ json.loads(update_job.request_data).get("remove"),
+ ["key1", "key2"],
+ )
+
+ def test_apps_are_reordered_to_follow_bench_order(self):
+ app1 = create_test_app()
+ app2 = create_test_app("erpnext", "ERPNext")
+ app3 = create_test_app("crm", "Frappe CRM")
+ group = create_test_release_group([app1, app2, app3])
+ bench = create_test_bench(group=group)
+ site = create_test_site(bench=bench.name, apps=["frappe", "crm", "erpnext"])
+ site.reload()
+ self.assertEqual(site.apps[0].app, "frappe")
+ self.assertEqual(site.apps[1].app, "erpnext")
+ self.assertEqual(site.apps[2].app, "crm")
+
+ @patch("press.press.doctype.site.site.frappe.db.commit", new=Mock())
+ @patch("press.press.doctype.site.site.frappe.db.rollback", new=Mock())
+ def test_archive_suspended_sites_archives_only_sites_suspended_longer_than_days(self):
+ site = create_test_site()
+ site.db_set("status", "Suspended")
+ site_activity = create_test_site_activity(site.name, "Suspend Site")
+ site_activity.db_set(
+ "creation", frappe.utils.add_days(frappe.utils.now_datetime(), -ARCHIVE_AFTER_SUSPEND_DAYS - 1)
+ )
+ site2 = create_test_site()
+ site2.db_set("status", "Suspended")
+ site2_activity = create_test_site_activity(site2.name, "Suspend Site")
+ site2_activity.db_set(
+ "creation", frappe.utils.add_days(frappe.utils.now_datetime(), -ARCHIVE_AFTER_SUSPEND_DAYS + 1)
+ ) # site2 suspended recently
+ site3 = create_test_site() # active site should not be archived
+
+ create_test_saas_settings(None, [create_test_app(), create_test_app("erpnext", "ERPNext")])
+
+ archive_suspended_sites()
+ site.reload()
+ site2.reload()
+ site3.reload()
+ self.assertEqual(site.status, "Pending") # to be archived
+ self.assertEqual(site2.status, "Suspended")
+ self.assertEqual(site3.status, "Active")
+
+ @patch("press.press.doctype.site.site.frappe.db.commit", new=Mock())
+ @patch("press.press.doctype.site.site.frappe.db.rollback", new=Mock())
+ def test_suspension_of_10_usd_site_triggers_backup_if_it_does_not_exist(self):
+ plan_10 = create_test_plan("Site", price_usd=10.0, price_inr=750.0, plan_name="USD 10")
+
+ site = create_test_site()
+ site.db_set("status", "Suspended")
+ site.db_set("plan", plan_10.name)
+ site_activity = create_test_site_activity(site.name, "Suspend Site")
+ site_activity.db_set(
+ "creation", frappe.utils.add_days(frappe.utils.now_datetime(), -ARCHIVE_AFTER_SUSPEND_DAYS - 1)
+ )
+
+ site2 = create_test_site()
+ site2.db_set("status", "Suspended")
+ site2.db_set("plan", plan_10.name)
+ site2_activity = create_test_site_activity(site2.name, "Suspend Site")
+ site2_activity.db_set(
+ "creation", frappe.utils.add_days(frappe.utils.now_datetime(), -ARCHIVE_AFTER_SUSPEND_DAYS - 1)
+ )
+ from press.press.doctype.site_backup.test_site_backup import create_test_site_backup
+
+ create_test_site_backup(site2.name)
+
+ create_test_saas_settings(None, [create_test_app(), create_test_app("erpnext", "ERPNext")])
+
+ self.assertEqual(frappe.db.count("Site Backup", {"site": site.name, "status": "Pending"}), 0)
+ self.assertEqual(frappe.db.count("Site Backup", {"site": site2.name, "status": "Pending"}), 0)
+ archive_suspended_sites()
+ self.assertEqual(frappe.db.count("Site Backup", {"site": site.name, "status": "Pending"}), 1)
+ self.assertEqual(frappe.db.count("Site Backup", {"site": site2.name, "status": "Pending"}), 0)
+
+ site.reload()
+ site2.reload()
+
+ self.assertNotEqual(site.status, "Pending") # should not be archived
+ self.assertEqual(site2.status, "Pending")
+
+ def test_site_usage_exceed_tracking(self):
+ team = create_test_team()
+ plan_10 = create_test_plan("Site", price_usd=10.0, price_inr=750.0, plan_name="USD 10")
+ site = create_test_site(plan=plan_10.name, public_server=True, team=team.name)
+
+ self.assertEqual(site.status, "Active")
+ self.assertFalse(site.site_usage_exceeded)
+
+ site.current_disk_usage = 150
+ site.check_if_disk_usage_exceeded()
+ site.reload()
+
+ self.assertTrue(site.site_usage_exceeded)
+ self.assertIsNotNone(site.site_usage_exceeded_on)
+ self.assertEqual(site.status, "Active")
+
+ def test_free_sites_ignore_usage_exceed_tracking(self):
+ team = create_test_team(free_account=False)
+ plan_10 = create_test_plan("Site", price_usd=10.0, price_inr=750.0, plan_name="USD 10")
+ site = create_test_site(plan=plan_10.name, public_server=True, team=team.name, free=True)
+
+ self.assertEqual(site.status, "Active")
+ self.assertFalse(site.site_usage_exceeded)
+
+ site.current_disk_usage = 150
+ site.check_if_disk_usage_exceeded()
+ site.reload()
+
+ self.assertFalse(site.site_usage_exceeded)
+ self.assertIsNone(site.site_usage_exceeded_on)
+ self.assertEqual(site.status, "Active")
+
+ def test_free_team_sites_ignore_usage_exceed_tracking(self):
+ team = create_test_team(free_account=True)
+ plan_10 = create_test_plan("Site", price_usd=10.0, price_inr=750.0, plan_name="USD 10")
+ site = create_test_site(plan=plan_10.name, public_server=True, team=team.name, free=False)
+
+ self.assertEqual(site.status, "Active")
+ self.assertFalse(site.site_usage_exceeded)
+
+ site.current_disk_usage = 150
+ site.check_if_disk_usage_exceeded()
+ site.reload()
+
+ self.assertFalse(site.site_usage_exceeded)
+ self.assertIsNone(site.site_usage_exceeded_on)
+
+ def test_sites_on_dedicated_server_ignore_usage_exceed_tracking(self):
+ team = create_test_team()
+ plan_10 = create_test_plan("Site", price_usd=10.0, price_inr=750.0, plan_name="USD 10")
+ site = create_test_site(plan=plan_10.name, public_server=False, team=team.name)
+
+ self.assertEqual(site.status, "Active")
+ self.assertFalse(site.site_usage_exceeded)
+
+ site.current_disk_usage = 150
+ site.check_if_disk_usage_exceeded()
+ site.reload()
+
+ self.assertFalse(site.site_usage_exceeded)
+ self.assertIsNone(site.site_usage_exceeded_on)
+ self.assertEqual(site.status, "Active")
+
+ def test_reset_disk_usage_exceed_alert_on_changing_plan(self):
+ team = create_test_team()
+ plan_10 = create_test_plan("Site", price_usd=10.0, price_inr=750.0, plan_name="USD 10")
+ plan_20 = create_test_plan("Site", price_usd=20.0, price_inr=1500.0, plan_name="USD 20")
+
+ site: Site = create_test_site(plan=plan_10.name, public_server=True, team=team.name)
+ site.create_subscription(plan=plan_10.name)
+ site.current_disk_usage = 150
+ site.check_if_disk_usage_exceeded(save=True)
+ site.reload()
+
+ self.assertTrue(site.site_usage_exceeded)
+
+ site.change_plan(plan_20.name, ignore_card_setup=True)
+ site.reload()
+
+ self.assertFalse(site.site_usage_exceeded)
+ self.assertIsNone(site.site_usage_exceeded_on)
+
+ def test_reset_disk_usage_exceed_alert_on_reducing_disk_size(self):
+ team = create_test_team()
+ plan_10 = create_test_plan("Site", price_usd=10.0, price_inr=750.0, plan_name="USD 10")
+ site: Site = create_test_site(plan=plan_10.name, public_server=True, team=team.name)
+ site.create_subscription(plan=plan_10.name)
+ site.current_disk_usage = 150
+ site.check_if_disk_usage_exceeded(save=True)
+ site.reload()
+
+ self.assertTrue(site.site_usage_exceeded)
+
+ site.current_disk_usage = 50
+ site.check_if_disk_usage_exceeded(save=True)
+ site.reload()
+
+ self.assertFalse(site.site_usage_exceeded)
+ self.assertIsNone(site.site_usage_exceeded_on)
+
+ @patch("frappe.sendmail", new=Mock())
+ def test_suspend_site_on_exceeding_site_usage_for_consecutive_14_days(self):
+ frappe.db.set_single_value("Press Settings", "enforce_storage_limits", 1)
+ team = create_test_team()
+ site: Site = create_test_site(public_server=True, free=False, team=team.name)
+
+ site.current_database_usage = 150
+ site.site_usage_exceeded = True
+ site.site_usage_exceeded_on = frappe.utils.add_to_date(None, days=-2)
+ site.save()
+ self.assertEqual(site.status, "Active")
+
+ suspend_sites_exceeding_disk_usage_for_last_14_days()
+ site.reload()
+ self.assertEqual(site.status, "Active")
+
+ site.site_usage_exceeded_on = frappe.utils.add_to_date(None, days=-6)
+ site.save()
+ suspend_sites_exceeding_disk_usage_for_last_14_days()
+ site.reload()
+ self.assertEqual(site.status, "Active")
+
+ site.site_usage_exceeded_on = frappe.utils.add_to_date(None, days=-7)
+ site.save()
+ suspend_sites_exceeding_disk_usage_for_last_14_days()
+ site.reload()
+ self.assertEqual(site.status, "Active")
+
+ site.site_usage_exceeded_on = frappe.utils.add_to_date(None, days=-15)
+ site.save()
+ suspend_sites_exceeding_disk_usage_for_last_14_days()
+ site.reload()
+ self.assertEqual(site.status, "Suspended")
diff --git a/press/press/doctype/site_activity/site_activity.json b/press/press/doctype/site_activity/site_activity.json
index d6be104680f..8a1fdfcb4ee 100644
--- a/press/press/doctype/site_activity/site_activity.json
+++ b/press/press/doctype/site_activity/site_activity.json
@@ -6,8 +6,10 @@
"engine": "InnoDB",
"field_order": [
"site",
+ "team",
"action",
- "reason"
+ "reason",
+ "job"
],
"fields": [
{
@@ -27,7 +29,7 @@
"in_list_view": 1,
"in_standard_filter": 1,
"label": "Action",
- "options": "Activate Site\nAdd Domain\nArchive\nBackup\nCreate\nClear Cache\nDeactivate Site\nInstall App\nLogin as Administrator\nMigrate\nReinstall\nRestore\nSuspend Site\nUninstall App\nUnsuspend Site\nUpdate\nUpdate Configuration\nDrop Offsite Backups\nEnable Database Access\nDisable Database Access",
+ "options": "Activate Site\nAdd Domain\nArchive\nBackup\nCreate\nClear Cache\nDeactivate Site\nInstall App\nLogin as Administrator\nMigrate\nReinstall\nRestore\nSuspend Site\nUninstall App\nUnsuspend Site\nUpdate\nUpdate Configuration\nDrop Offsite Backups\nDrop Physical Backups\nEnable Database Access\nDisable Database Access\nCreate Database User\nRemove Database User\nModify Database User Permissions\nDisable Monitoring And Alerts\nEnable Monitoring And Alerts",
"read_only": 1,
"reqd": 1,
"search_index": 1
@@ -36,11 +38,26 @@
"fieldname": "reason",
"fieldtype": "Small Text",
"label": "Reason"
+ },
+ {
+ "fetch_from": "site.team",
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "label": "Team",
+ "options": "Team"
+ },
+ {
+ "depends_on": "eval: doc.job",
+ "fieldname": "job",
+ "fieldtype": "Link",
+ "label": "Job",
+ "options": "Agent Job"
}
],
+ "grid_page_length": 50,
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2022-02-28 18:55:41.364129",
+ "modified": "2025-10-07 19:31:49.273641",
"modified_by": "Administrator",
"module": "Press",
"name": "Site Activity",
@@ -67,8 +84,9 @@
"role": "Press Member"
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/site_activity/site_activity.py b/press/press/doctype/site_activity/site_activity.py
index e9b76ca0455..237de6f9d57 100644
--- a/press/press/doctype/site_activity/site_activity.py
+++ b/press/press/doctype/site_activity/site_activity.py
@@ -1,31 +1,88 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
-
+from __future__ import annotations
import frappe
from frappe.model.document import Document
+from press.press.doctype.communication_info.communication_info import get_communication_info
+
class SiteActivity(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ action: DF.Literal[
+ "Activate Site",
+ "Add Domain",
+ "Archive",
+ "Backup",
+ "Create",
+ "Clear Cache",
+ "Deactivate Site",
+ "Install App",
+ "Login as Administrator",
+ "Migrate",
+ "Reinstall",
+ "Restore",
+ "Suspend Site",
+ "Uninstall App",
+ "Unsuspend Site",
+ "Update",
+ "Update Configuration",
+ "Drop Offsite Backups",
+ "Drop Physical Backups",
+ "Enable Database Access",
+ "Disable Database Access",
+ "Create Database User",
+ "Remove Database User",
+ "Modify Database User Permissions",
+ "Disable Monitoring And Alerts",
+ "Enable Monitoring And Alerts",
+ ]
+ job: DF.Link | None
+ reason: DF.SmallText | None
+ site: DF.Link
+ team: DF.Link | None
+ # end: auto-generated types
+
+ dashboard_fields = ("action", "reason", "site", "job")
+
def after_insert(self):
+ if self.team == "Administrator":
+ return
+
if self.action == "Login as Administrator" and self.reason:
- d = frappe.get_all("Site", {"name": self.site}, ["notify_email", "team"])[0]
- recipient = d.notify_email or frappe.get_doc("Team", d.team).user
- if recipient:
- team = frappe.get_doc("Team", d.team)
- team.notify_with_email(
- [recipient],
+ recipients = get_communication_info("Email", "Site Activity", "Site", self.site)
+ if recipients:
+ frappe.sendmail(
+ recipients=recipients,
subject="Administrator login to your site",
template="admin_login",
args={"site": self.site, "user": self.owner, "reason": self.reason},
reference_doctype=self.doctype,
reference_name=self.name,
- now=True,
+ )
+
+ if self.action == "Disable Monitoring And Alerts" and self.reason:
+ recipients = get_communication_info("Email", "Site Activity", "Site", self.site)
+ if recipients:
+ frappe.sendmail(
+ recipients=recipients,
+ subject="Site Monitoring Disabled",
+ template="disabled_site_monitoring",
+ args={"site": self.site, "reason": self.reason},
+ reference_doctype=self.doctype,
+ reference_name=self.name,
)
-def log_site_activity(site, action, reason=None):
+def log_site_activity(site, action, reason=None, job=None):
return frappe.get_doc(
- {"doctype": "Site Activity", "site": site, "action": action, "reason": reason}
+ {"doctype": "Site Activity", "site": site, "action": action, "reason": reason, "job": job}
).insert()
diff --git a/press/press/doctype/site_activity/test_site_activity.py b/press/press/doctype/site_activity/test_site_activity.py
index 6e79e0cd02f..d5a8fd16b5d 100644
--- a/press/press/doctype/site_activity/test_site_activity.py
+++ b/press/press/doctype/site_activity/test_site_activity.py
@@ -1,11 +1,28 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
+from __future__ import annotations
+from typing import TYPE_CHECKING
-# import frappe
-import unittest
+import frappe
+from frappe.tests.utils import FrappeTestCase
+from press.utils import get_current_team
-class TestSiteActivity(unittest.TestCase):
+if TYPE_CHECKING:
+ from press.press.doctype.site_activity.site_activity import SiteActivity
+
+
+def create_test_site_activity(site: str, action: str) -> SiteActivity:
+ return frappe.get_doc(
+ { # type: ignore
+ "doctype": "Site Activity",
+ "site": site,
+ "action": action,
+ "team": get_current_team(),
+ }
+ ).insert()
+
+
+class TestSiteActivity(FrappeTestCase):
pass
diff --git a/press/press/doctype/site_analytics/site_analytics.py b/press/press/doctype/site_analytics/site_analytics.py
index 74ce45970a2..4b76a6b7dbb 100644
--- a/press/press/doctype/site_analytics/site_analytics.py
+++ b/press/press/doctype/site_analytics/site_analytics.py
@@ -3,10 +3,69 @@
import frappe
from frappe.model.document import Document
+from frappe.query_builder import Interval
+from frappe.query_builder.functions import Now
class SiteAnalytics(Document):
- pass
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.site_analytics_active.site_analytics_active import (
+ SiteAnalyticsActive,
+ )
+ from press.press.doctype.site_analytics_app.site_analytics_app import SiteAnalyticsApp
+ from press.press.doctype.site_analytics_doctype.site_analytics_doctype import (
+ SiteAnalyticsDocType,
+ )
+ from press.press.doctype.site_analytics_login.site_analytics_login import (
+ SiteAnalyticsLogin,
+ )
+ from press.press.doctype.site_analytics_user.site_analytics_user import (
+ SiteAnalyticsUser,
+ )
+
+ activation_level: DF.Int
+ backup_size: DF.Int
+ company: DF.Data | None
+ country: DF.Data | None
+ database_size: DF.Int
+ domain: DF.Data | None
+ emails_sent: DF.Int
+ files_size: DF.Int
+ installed_apps: DF.Table[SiteAnalyticsApp]
+ language: DF.Data | None
+ last_active: DF.Table[SiteAnalyticsActive]
+ last_logins: DF.Table[SiteAnalyticsLogin]
+ sales_data: DF.Table[SiteAnalyticsDocType]
+ scheduler_enabled: DF.Check
+ setup_complete: DF.Check
+ site: DF.Link
+ space_used: DF.Int
+ time_zone: DF.Data | None
+ timestamp: DF.Datetime
+ users: DF.Table[SiteAnalyticsUser]
+ # end: auto-generated types
+
+ @staticmethod
+ def clear_old_logs(days=30):
+ tables = [
+ "Site Analytics",
+ "Site Analytics User",
+ "Site Analytics Login",
+ "Site Analytics App",
+ "Site Analytics DocType",
+ "Site Analytics Active",
+ ]
+ for table in tables:
+ table = frappe.qb.DocType(table)
+ frappe.db.delete(table, filters=(table.modified < (Now() - Interval(days=days))))
+ frappe.db.commit()
def create_site_analytics(site, data):
diff --git a/press/press/doctype/site_analytics_active/site_analytics_active.py b/press/press/doctype/site_analytics_active/site_analytics_active.py
index b25ee803461..fa41a4d68eb 100644
--- a/press/press/doctype/site_analytics_active/site_analytics_active.py
+++ b/press/press/doctype/site_analytics_active/site_analytics_active.py
@@ -6,4 +6,26 @@
class SiteAnalyticsActive(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ email: DF.Data | None
+ enabled: DF.Check
+ full_name: DF.Data | None
+ is_system_manager: DF.Check
+ language: DF.Data | None
+ last_active: DF.Datetime | None
+ last_login: DF.Datetime | None
+ name: DF.Int | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ time_zone: DF.Data | None
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/site_analytics_app/site_analytics_app.py b/press/press/doctype/site_analytics_app/site_analytics_app.py
index d10dc8da719..6ad12ebb92b 100644
--- a/press/press/doctype/site_analytics_app/site_analytics_app.py
+++ b/press/press/doctype/site_analytics_app/site_analytics_app.py
@@ -6,4 +6,21 @@
class SiteAnalyticsApp(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ app_name: DF.Data | None
+ branch: DF.Data | None
+ name: DF.Int | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ version: DF.Data | None
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/site_analytics_doctype/site_analytics_doctype.py b/press/press/doctype/site_analytics_doctype/site_analytics_doctype.py
index de139a82870..47911ada7ad 100644
--- a/press/press/doctype/site_analytics_doctype/site_analytics_doctype.py
+++ b/press/press/doctype/site_analytics_doctype/site_analytics_doctype.py
@@ -6,4 +6,20 @@
class SiteAnalyticsDocType(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ count: DF.Int
+ document_type: DF.Data | None
+ name: DF.Int | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/site_analytics_login/site_analytics_login.py b/press/press/doctype/site_analytics_login/site_analytics_login.py
index 28e601376ae..0ffcc701ab2 100644
--- a/press/press/doctype/site_analytics_login/site_analytics_login.py
+++ b/press/press/doctype/site_analytics_login/site_analytics_login.py
@@ -6,4 +6,21 @@
class SiteAnalyticsLogin(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ full_name: DF.Data | None
+ name: DF.Int | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ timestamp: DF.Data | None
+ user: DF.Data | None
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/site_analytics_user/site_analytics_user.py b/press/press/doctype/site_analytics_user/site_analytics_user.py
index 56fe0327d3f..e18cae875ad 100644
--- a/press/press/doctype/site_analytics_user/site_analytics_user.py
+++ b/press/press/doctype/site_analytics_user/site_analytics_user.py
@@ -6,4 +6,26 @@
class SiteAnalyticsUser(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ email: DF.Data | None
+ enabled: DF.Check
+ full_name: DF.Data | None
+ is_system_manager: DF.Check
+ language: DF.Data | None
+ last_active: DF.Datetime | None
+ last_login: DF.Datetime | None
+ name: DF.Int | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ time_zone: DF.Data | None
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/site_app/site_app.py b/press/press/doctype/site_app/site_app.py
index 88e074b90f5..fcc067251ba 100644
--- a/press/press/doctype/site_app/site_app.py
+++ b/press/press/doctype/site_app/site_app.py
@@ -1,11 +1,51 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
-# import frappe
+import frappe
from frappe.model.document import Document
+from frappe.utils import cstr
+
+from press.api.site import get_installed_apps
class SiteApp(Document):
- pass
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ app: DF.Link
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
+ @staticmethod
+ def get_list_query(query, filters=None, **list_args):
+ site = cstr(filters.get("parent", "")) if filters else None
+ if not site:
+ return None
+
+ site_doc = frappe.get_doc("Site", site)
+ installed_apps = get_installed_apps(site_doc, filters)
+
+ # Apply is_app_patched flag to installed_apps
+ app_names = [a.app for a in site_doc.apps]
+ patched_apps = frappe.get_all(
+ "App Patch",
+ fields=["app"],
+ filters={
+ "bench": site_doc.bench,
+ "app": ["in", app_names],
+ },
+ pluck="app",
+ )
+ for app in installed_apps:
+ if app.app in patched_apps:
+ app.is_app_patched = True
+
+ return installed_apps
diff --git a/press/press/doctype/site_backup/site_backup.json b/press/press/doctype/site_backup/site_backup.json
index 6cba84d991e..b529edda51f 100644
--- a/press/press/doctype/site_backup/site_backup.json
+++ b/press/press/doctype/site_backup/site_backup.json
@@ -6,14 +6,17 @@
"engine": "InnoDB",
"field_order": [
"status",
- "files_availability",
"site",
+ "database_name",
+ "team",
+ "column_break_obsx",
"job",
+ "files_availability",
+ "physical",
+ "for_site_update",
"with_files",
"offsite",
- "database",
- "size",
- "url",
+ "deactivate_site_during_backup",
"data_7",
"database_file",
"database_url",
@@ -35,7 +38,11 @@
"remote_private_file",
"private_size",
"section_break_21",
- "offsite_backup"
+ "offsite_backup",
+ "section_break_hiaw",
+ "database_snapshot",
+ "column_break_hksx",
+ "snapshot_request_key"
],
"fields": [
{
@@ -63,28 +70,12 @@
"fieldtype": "Link",
"label": "Job",
"options": "Agent Job",
- "read_only": 1
- },
- {
- "fieldname": "database",
- "fieldtype": "Data",
- "label": "database",
- "read_only": 1
- },
- {
- "fieldname": "size",
- "fieldtype": "Data",
- "label": "size",
- "read_only": 1
- },
- {
- "fieldname": "url",
- "fieldtype": "Data",
- "label": "url",
- "read_only": 1
+ "read_only": 1,
+ "search_index": 1
},
{
"default": "0",
+ "depends_on": "eval: doc.physical != 1",
"fieldname": "with_files",
"fieldtype": "Check",
"in_list_view": 1,
@@ -147,10 +138,12 @@
"read_only": 1
},
{
+ "depends_on": "eval: doc.physical != 1",
"fieldname": "data_7",
"fieldtype": "Section Break"
},
{
+ "depends_on": "eval: doc.physical != 1",
"fieldname": "data_12",
"fieldtype": "Section Break"
},
@@ -160,6 +153,7 @@
},
{
"default": "0",
+ "depends_on": "eval: doc.physical != 1",
"fieldname": "offsite",
"fieldtype": "Check",
"in_list_view": 1,
@@ -168,6 +162,7 @@
"set_only_once": 1
},
{
+ "depends_on": "eval: doc.physical != 1 && doc.offsite == 1",
"fieldname": "section_break_21",
"fieldtype": "Section Break"
},
@@ -236,10 +231,83 @@
"label": "Remote File",
"options": "Remote File",
"read_only": 1
+ },
+ {
+ "fetch_from": "site.team",
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "label": "Team",
+ "options": "Team",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "physical",
+ "fieldtype": "Check",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Physical",
+ "set_only_once": 1
+ },
+ {
+ "collapsible_depends_on": "1",
+ "depends_on": "eval: doc.physical == 1",
+ "fieldname": "section_break_hiaw",
+ "fieldtype": "Section Break",
+ "label": "Physical Backup"
+ },
+ {
+ "fieldname": "column_break_hksx",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "snapshot_request_key",
+ "fieldtype": "Data",
+ "label": "Snapshot Request Key",
+ "read_only": 1
+ },
+ {
+ "depends_on": "eval: doc.physical",
+ "fieldname": "database_snapshot",
+ "fieldtype": "Link",
+ "label": "Database Snapshot",
+ "options": "Virtual Disk Snapshot",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "depends_on": "eval: doc.database_name",
+ "fieldname": "database_name",
+ "fieldtype": "Data",
+ "label": "Database Name",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_obsx",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "depends_on": "eval: doc.physical == 1",
+ "fieldname": "for_site_update",
+ "fieldtype": "Check",
+ "label": "For Site Update"
+ },
+ {
+ "default": "0",
+ "fieldname": "deactivate_site_during_backup",
+ "fieldtype": "Check",
+ "label": "Deactivate Site During Backup",
+ "set_only_once": 1
+ }
+ ],
+ "links": [
+ {
+ "link_doctype": "Agent Job",
+ "link_fieldname": "reference_name"
}
],
- "links": [],
- "modified": "2023-06-08 23:47:51.315930",
+ "modified": "2025-04-17 10:57:09.359685",
"modified_by": "Administrator",
"module": "Press",
"name": "Site Backup",
diff --git a/press/press/doctype/site_backup/site_backup.py b/press/press/doctype/site_backup/site_backup.py
index 0fe98432ba8..82a0cfc1025 100644
--- a/press/press/doctype/site_backup/site_backup.py
+++ b/press/press/doctype/site_backup/site_backup.py
@@ -1,49 +1,423 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
+import contextlib
import json
-from datetime import datetime, timedelta
-from typing import Dict
+import time
+from typing import TYPE_CHECKING
import frappe
+import frappe.utils
from frappe.desk.doctype.tag.tag import add_tag
from frappe.model.document import Document
+
from press.agent import Agent
+from press.exceptions import SiteTooManyPendingBackups
+from press.overrides import get_permission_query_conditions_for_doctype
+from press.press.doctype.ansible_console.ansible_console import AnsibleAdHoc
+
+if TYPE_CHECKING:
+ from datetime import datetime
+
+ from press.press.doctype.agent_job.agent_job import AgentJob
+ from press.press.doctype.site_update.site_update import SiteUpdate
+ from press.press.doctype.virtual_machine.virtual_machine import VirtualMachine
class SiteBackup(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ config_file: DF.Data | None
+ config_file_size: DF.Data | None
+ config_file_url: DF.Text | None
+ database_file: DF.Data | None
+ database_name: DF.Data | None
+ database_size: DF.Data | None
+ database_snapshot: DF.Link | None
+ database_url: DF.Text | None
+ deactivate_site_during_backup: DF.Check
+ files_availability: DF.Literal["", "Available", "Unavailable"]
+ for_site_update: DF.Check
+ job: DF.Link | None
+ offsite: DF.Check
+ offsite_backup: DF.Code | None
+ physical: DF.Check
+ private_file: DF.Data | None
+ private_size: DF.Data | None
+ private_url: DF.Text | None
+ public_file: DF.Data | None
+ public_size: DF.Data | None
+ public_url: DF.Text | None
+ remote_config_file: DF.Link | None
+ remote_database_file: DF.Link | None
+ remote_private_file: DF.Link | None
+ remote_public_file: DF.Link | None
+ site: DF.Link
+ snapshot_request_key: DF.Data | None
+ status: DF.Literal["Pending", "Running", "Success", "Failure"]
+ team: DF.Link | None
+ with_files: DF.Check
+ # end: auto-generated types
+
+ dashboard_fields = (
+ "job",
+ "status",
+ "database_url",
+ "public_url",
+ "private_url",
+ "config_file_url",
+ "site",
+ "database_size",
+ "public_size",
+ "private_size",
+ "with_files",
+ "offsite",
+ "files_availability",
+ "remote_database_file",
+ "remote_public_file",
+ "remote_private_file",
+ "remote_config_file",
+ "physical",
+ "database_snapshot",
+ )
+
+ @property
+ def database_server(self):
+ return frappe.get_value("Server", self.server, "database_server")
+
+ @property
+ def server(self):
+ return frappe.get_cached_value("Site", self.site, "server")
+
+ @staticmethod
+ def get_list_query(query, filters=None, **list_args):
+ """
+ Remove records with `Success` but files_availability is `Unavailable`
+ """
+ sb = frappe.qb.DocType("Site Backup")
+ query = query.where(~((sb.files_availability == "Unavailable") & (sb.status == "Success")))
+ if filters.get("backup_date"):
+ with contextlib.suppress(Exception):
+ date = frappe.utils.getdate(filters["backup_date"])
+ query = query.where(
+ sb.creation.between(
+ frappe.utils.add_to_date(date, hours=0, minutes=0, seconds=0),
+ frappe.utils.add_to_date(date, hours=23, minutes=59, seconds=59),
+ )
+ )
+
+ if not filters.get("status"):
+ query = query.where(sb.status == "Success")
+
+ results = [
+ result
+ for result in query.run(as_dict=True)
+ if not (result.get("physical") and result.get("for_site_update"))
+ ]
+
+ return [
+ {
+ **result,
+ "type": "Physical" if result.get("physical") else "Logical",
+ "ready_to_restore": True
+ if result.get("physical") == 0
+ else frappe.get_cached_value(
+ "Virtual Disk Snapshot", result.get("database_snapshot"), "status"
+ )
+ == "Completed",
+ }
+ for result in results
+ ]
+
+ def validate(self):
+ if self.physical and self.with_files:
+ frappe.throw("Physical backups cannot be taken with files")
+ if self.physical and self.offsite:
+ frappe.throw("Physical and offsite logical backups cannot be taken together")
+
+ if self.deactivate_site_during_backup and not self.physical:
+ frappe.throw("Site deactivation should be used for physical backups only")
+
def before_insert(self):
- last_two_hours = datetime.now() - timedelta(hours=2)
- if self.site == "kyosk.erpnext.com": # as per customer request
- raise Exception("No auto backup for Kyosk")
+ if getattr(self, "force", False):
+ if self.physical:
+ frappe.throw("Physical backups cannot be forcefully triggered")
+ return
+ # For backups, check if there are too many pending backups
+ two_hours_ago = frappe.utils.add_to_date(None, hours=-2)
if frappe.db.count(
"Site Backup",
{
"site": self.site,
"status": ("in", ["Running", "Pending"]),
- "creation": (">", last_two_hours),
+ "creation": (">", two_hours_ago),
},
):
- raise Exception("Too many pending backups")
+ frappe.throw("Too many pending backups", SiteTooManyPendingBackups)
+
+ if self.physical:
+ # validate physical backup enabled on database server
+ if not bool(
+ frappe.utils.cint(
+ frappe.get_value("Database Server", self.database_server, "enable_physical_backup")
+ )
+ ):
+ frappe.throw(
+ "Physical backup is not enabled for this database server. Please reach out to support."
+ )
+ # Set some default values
+ site = frappe.get_doc("Site", self.site)
+ if not site.database_name:
+ site.sync_info()
+ site.reload()
+ if not site.database_name:
+ frappe.throw("Database name is missing in the site")
+ self.database_name = site.database_name
+ self.snapshot_request_key = frappe.generate_hash(length=32)
def after_insert(self):
- site = frappe.get_doc("Site", self.site)
- agent = Agent(site.server)
- job = agent.backup_site(site, self.with_files, self.offsite)
- frappe.db.set_value("Site Backup", self.name, "job", job.name)
+ if self.deactivate_site_during_backup:
+ agent = Agent(self.server)
+ agent.deactivate_site(
+ frappe.get_doc("Site", self.site), reference_doctype=self.doctype, reference_name=self.name
+ )
+ else:
+ self.start_backup()
+
+ def start_backup(self):
+ if self.physical:
+ frappe.enqueue_doc(
+ doctype=self.doctype,
+ name=self.name,
+ method="_create_physical_backup",
+ enqueue_after_commit=True,
+ )
+ else:
+ site = frappe.get_doc("Site", self.site)
+ agent = Agent(site.server)
+ job = agent.backup_site(site, self)
+ frappe.db.set_value("Site Backup", self.name, "job", job.name)
def after_delete(self):
if self.job:
frappe.delete_doc_if_exists("Agent Job", self.job)
+ def on_update(self): # noqa: C901
+ if self.physical and self.has_value_changed("status") and self.status in ["Success", "Failure"]:
+ site_update_doc_name = frappe.db.exists("Site Update", {"site_backup": self.name})
+ if site_update_doc_name:
+ """
+ If site backup was trigerred for Site Update,
+ Then, trigger Site Update to proceed with the next steps
+ """
+ site_update: SiteUpdate = frappe.get_doc("Site Update", site_update_doc_name)
+ if self.status == "Success":
+ site_update.create_update_site_agent_request()
+ elif self.status == "Failure":
+ site_update.activate_site(backup_failed=True)
+
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ method="_rollback_db_directory_permissions",
+ enqueue_after_commit=True,
+ )
+
+ if (
+ self.has_value_changed("status")
+ and self.status in ["Success", "Failure"]
+ and self.deactivate_site_during_backup
+ ):
+ agent = Agent(self.server)
+ agent.activate_site(
+ frappe.get_doc("Site", self.site), reference_doctype=self.doctype, reference_name=self.name
+ )
+
+ try:
+ if (
+ not self.physical
+ and self.has_value_changed("status")
+ and frappe.db.get_value("Agent Job", self.job, "status") == "Failure"
+ ):
+ self.autocorrect_bench_permissions()
+ except Exception:
+ frappe.log_error(
+ "Failed to correct bench permissions",
+ reference_doctype=self.doctype,
+ reference_name=self.name,
+ )
+
+ if (
+ not self.physical
+ and self.has_value_changed("status")
+ and frappe.db.get_value("Agent Job", self.job, "status") == "Failure"
+ ):
+ self.fix_global_search_indexes()
+
+ def _rollback_db_directory_permissions(self):
+ if not self.physical:
+ return
+ """
+ Rollback the permission changes made to the database directory
+ Change it back to 770 from 700
+
+ Check `_create_physical_backup` method for more information
+ """
+ success = self.run_ansible_command_in_database_server(
+ f"chmod 700 /var/lib/mysql/{self.database_name}"
+ )
+ if not success:
+ """
+ Don't throw an error here, Because the backup is already created
+ And keeping the permission as 770 will not cause issue in database operations
+ """
+ frappe.log_error(
+ "Failed to rollback the permission changes of the database directory",
+ reference_doctype=self.doctype,
+ reference_name=self.name,
+ )
+
+ def _create_physical_backup(self):
+ site = frappe.get_doc("Site", self.site)
+ """
+ Change the /var/lib/mysql/ directory's permission to 700 from 770
+ The files inside that directory will have 660 permission, So no need to change the permission of the files
+
+ `frappe` user on server is already part of `mysql` group.
+ So `frappe` user can read-write the files inside that directory
+ """
+ success = self.run_ansible_command_in_database_server(
+ f"chmod 770 /var/lib/mysql/{self.database_name}"
+ )
+ if not success:
+ self.reload()
+ self.status = "Failure"
+ self.save(ignore_permissions=True)
+ return
+ agent = Agent(self.database_server, "Database Server")
+ job = agent.physical_backup_database(site, self)
+ frappe.db.set_value("Site Backup", self.name, "job", job.name)
+
+ def run_ansible_command_in_database_server(self, command: str) -> bool:
+ virtual_machine_ip = frappe.db.get_value(
+ "Virtual Machine",
+ frappe.get_value("Database Server", self.database_server, "virtual_machine"),
+ "public_ip_address",
+ )
+ result = AnsibleAdHoc(sources=f"{virtual_machine_ip},").run(command, self.name)[0]
+ success = result.get("status") == "Success"
+ if not success:
+ pretty_result = json.dumps(result, indent=2, sort_keys=True, default=str)
+ frappe.log_error(
+ "During physical backup creation, failed to execute command in database server",
+ message=pretty_result,
+ reference_doctype=self.doctype,
+ reference_name=self.name,
+ )
+ comment = f"{command}{pretty_result} "
+ self.add_comment(text=comment)
+ return success
+
+ def create_database_snapshot(self):
+ if self.database_snapshot:
+ # Snapshot already exists, So no need to create a new one
+ return
+
+ server = frappe.get_value("Site", self.site, "server")
+ database_server = frappe.get_value("Server", server, "database_server")
+ virtual_machine: VirtualMachine = frappe.get_doc(
+ "Virtual Machine", frappe.get_value("Database Server", database_server, "virtual_machine")
+ )
+
+ cache_key = f"volume_active_snapshot:{self.database_server}"
+
+ max_retries = 3
+ while max_retries > 0:
+ is_ongoing_snapshot = frappe.utils.cint(frappe.cache.get_value(cache_key, expires=True))
+ if not is_ongoing_snapshot:
+ break
+ time.sleep(2)
+ max_retries -= 1
+
+ if frappe.cache.get_value(cache_key, expires=True):
+ raise OngoingSnapshotError("Snapshot creation per volume rate exceeded")
+
+ frappe.cache.set_value(
+ cache_key,
+ 1,
+ expires_in_sec=15,
+ )
+
+ virtual_machine.create_snapshots(exclude_boot_volume=True, physical_backup=True)
+ if len(virtual_machine.flags.created_snapshots) == 0:
+ frappe.throw("Failed to create a snapshot for the database server")
+ frappe.db.set_value(
+ "Site Backup", self.name, "database_snapshot", virtual_machine.flags.created_snapshots[0]
+ )
+
+ def autocorrect_bench_permissions(self):
+ """
+ Run this whenever a Site Backup fails with the error
+ "[Errno 13]: Permission denied".
+ """
+ job = frappe.db.get_value("Agent Job", self.job, ["bench", "server", "output"], as_dict=True)
+ import re
+
+ play_exists = frappe.db.get_value(
+ "Ansible Play",
+ filters={
+ "play": "Correct Bench Permissions",
+ "status": ["in", ["Pending", "Running"]],
+ "server": job.server,
+ "variables": ["like", "f%{job.bench}%"],
+ },
+ )
+
+ if job.output and not play_exists and re.search(r"\[Errno 13\] Permission denied", job.output):
+ try:
+ bench = frappe.get_doc("Bench", job.bench)
+ bench.correct_bench_permissions()
+ return True
+ except Exception:
+ frappe.log_error(
+ "Failed to correct bench permissions.",
+ reference_doctype=self.doctype,
+ reference_name=self.name,
+ )
+ return False
+ return False
+
+ def fix_global_search_indexes(self):
+ """
+ Run this whenever Backup Job fails because of broken global search indexes and regenerate them.
+ """
+ job = frappe.db.get_value("Agent Job", self.job, ["bench", "server", "output"], as_dict=True)
+
+ if job.output and "Couldn't execute 'show create table `__global_search`'" in job.output:
+ try:
+ agent = Agent(self.server)
+ agent.create_agent_job("Fix global search", "fix_global_search")
+ except Exception:
+ frappe.log_error(
+ "Failed to fix global search indexes",
+ reference_doctype=self.doctype,
+ reference_name=self.name,
+ )
+
@classmethod
def offsite_backup_exists(cls, site: str, day: datetime.date) -> bool:
return cls.backup_exists(site, day, {"offsite": True})
@classmethod
- def backup_exists(cls, site: str, day: datetime.date, filters: Dict):
+ def backup_exists(cls, site: str, day: datetime.date, filters: dict):
base_filters = {
"creation": ("between", [day, day]),
"site": site,
@@ -56,9 +430,16 @@ def file_backup_exists(cls, site: str, day: datetime.date) -> bool:
return cls.backup_exists(site, day, {"with_files": True})
-def track_offsite_backups(
- site: str, backup_data: dict, offsite_backup_data: dict
-) -> tuple:
+get_permission_query_conditions = get_permission_query_conditions_for_doctype("Site Backup")
+
+
+class OngoingSnapshotError(Exception):
+ """Exception raised when other snapshot creation is ongoing"""
+
+ pass
+
+
+def track_offsite_backups(site: str, backup_data: dict, offsite_backup_data: dict) -> tuple:
remote_files = {"database": None, "site_config": None, "public": None, "private": None}
if offsite_backup_data:
@@ -96,68 +477,103 @@ def track_offsite_backups(
def process_backup_site_job_update(job):
- backups = frappe.get_all(
- "Site Backup", fields=["name", "status"], filters={"job": job.name}, limit=1
- )
+ backups = frappe.get_all("Site Backup", fields=["name", "status"], filters={"job": job.name}, limit=1)
if not backups:
return
backup = backups[0]
if job.status != backup.status:
- frappe.db.set_value(
- "Site Backup", backup.name, "status", job.status, for_update=False
- )
+ status = job.status
+ if job.status == "Delivery Failure":
+ status = "Failure"
+
if job.status == "Success":
- job_data = json.loads(job.data)
- backup_data, offsite_backup_data = job_data["backups"], job_data["offsite"]
- (
- remote_database,
- remote_config_file,
- remote_public,
- remote_private,
- ) = track_offsite_backups(job.site, backup_data, offsite_backup_data)
-
- site_backup_dict = {
- "files_availability": "Available",
- "database_size": backup_data["database"]["size"],
- "database_url": backup_data["database"]["url"],
- "database_file": backup_data["database"]["file"],
- "remote_database_file": remote_database,
- }
+ if frappe.get_value("Site Backup", backup.name, "physical"):
+ doc: SiteBackup = frappe.get_doc("Site Backup", backup.name)
+ doc.files_availability = "Available"
+ doc.status = "Success"
+ doc.save()
+ else:
+ frappe.db.set_value("Site Backup", backup.name, "status", status)
+ job_data = json.loads(job.data)
+ backup_data, offsite_backup_data = job_data["backups"], job_data["offsite"]
+ (
+ remote_database,
+ remote_config_file,
+ remote_public,
+ remote_private,
+ ) = track_offsite_backups(job.site, backup_data, offsite_backup_data)
- if "site_config" in backup_data:
- site_backup_dict.update(
- {
- "config_file_size": backup_data["site_config"]["size"],
- "config_file_url": backup_data["site_config"]["url"],
- "config_file": backup_data["site_config"]["file"],
- "remote_config_file": remote_config_file,
- }
- )
+ site_backup_dict = {
+ "files_availability": "Available",
+ "database_size": backup_data["database"]["size"],
+ "database_url": backup_data["database"]["url"],
+ "database_file": backup_data["database"]["file"],
+ "remote_database_file": remote_database,
+ }
- if "private" in backup_data and "public" in backup_data:
- site_backup_dict.update(
- {
- "private_size": backup_data["private"]["size"],
- "private_url": backup_data["private"]["url"],
- "private_file": backup_data["private"]["file"],
- "remote_public_file": remote_public,
- "public_size": backup_data["public"]["size"],
- "public_url": backup_data["public"]["url"],
- "public_file": backup_data["public"]["file"],
- "remote_private_file": remote_private,
- }
- )
+ if "site_config" in backup_data:
+ site_backup_dict.update(
+ {
+ "config_file_size": backup_data["site_config"]["size"],
+ "config_file_url": backup_data["site_config"]["url"],
+ "config_file": backup_data["site_config"]["file"],
+ "remote_config_file": remote_config_file,
+ }
+ )
+
+ if "private" in backup_data and "public" in backup_data:
+ site_backup_dict.update(
+ {
+ "private_size": backup_data["private"]["size"],
+ "private_url": backup_data["private"]["url"],
+ "private_file": backup_data["private"]["file"],
+ "remote_public_file": remote_public,
+ "public_size": backup_data["public"]["size"],
+ "public_url": backup_data["public"]["url"],
+ "public_file": backup_data["public"]["file"],
+ "remote_private_file": remote_private,
+ }
+ )
- frappe.db.set_value("Site Backup", backup.name, site_backup_dict, for_update=False)
+ frappe.db.set_value("Site Backup", backup.name, site_backup_dict)
+ else:
+ site_backup: SiteBackup = frappe.get_doc("Site Backup", backup.name)
+ site_backup.status = status
+ site_backup.save()
def get_backup_bucket(cluster, region=False):
- bucket_for_cluster = frappe.get_all(
- "Backup Bucket", {"cluster": cluster}, ["name", "region"], limit=1
- )
+ bucket_for_cluster = frappe.get_all("Backup Bucket", {"cluster": cluster}, ["name", "region"], limit=1)
default_bucket = frappe.db.get_single_value("Press Settings", "aws_s3_bucket")
if region:
return bucket_for_cluster[0] if bucket_for_cluster else default_bucket
- else:
- return bucket_for_cluster[0]["name"] if bucket_for_cluster else default_bucket
+ return bucket_for_cluster[0]["name"] if bucket_for_cluster else default_bucket
+
+
+def process_deactivate_site_job_update(job: AgentJob):
+ if job.reference_doctype != "Site Backup":
+ return
+
+ if job.status not in ["Success", "Failure", "Delivery Failure"]:
+ return
+
+ status = {
+ "Success": "Success",
+ "Failure": "Failure",
+ "Delivery Failure": "Failure",
+ }[job.status]
+
+ if frappe.get_value("Site Backup", job.reference_name, "status") == status:
+ return
+
+ backup: SiteBackup = frappe.get_doc("Site Backup", job.reference_name)
+ if status == "Failure":
+ backup.status = "Failure"
+ backup.save()
+ elif status == "Success":
+ backup.start_backup()
+
+
+def on_doctype_update():
+ frappe.db.add_index("Site Backup", ["files_availability", "job"])
diff --git a/press/press/doctype/site_backup/test_site_backup.py b/press/press/doctype/site_backup/test_site_backup.py
index 97786e5a237..829071aa062 100644
--- a/press/press/doctype/site_backup/test_site_backup.py
+++ b/press/press/doctype/site_backup/test_site_backup.py
@@ -1,10 +1,9 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
-
+from __future__ import annotations
import json
-from datetime import datetime
+from typing import TYPE_CHECKING
from unittest.mock import Mock, patch
import frappe
@@ -14,11 +13,14 @@
from press.press.doctype.remote_file.test_remote_file import create_test_remote_file
from press.press.doctype.site.test_site import create_test_site
+if TYPE_CHECKING:
+ from datetime import datetime
+
@patch.object(AgentJob, "enqueue_http_request", new=Mock())
def create_test_site_backup(
site: str,
- creation: datetime = None,
+ creation: datetime | None = None,
files_availability: str = "Available",
offsite: bool = True,
status: str = "Success",
@@ -28,14 +30,14 @@ def create_test_site_backup(
Makes offsite backups by default along with remote files.
"""
- if not creation:
- creation = datetime.now()
+ creation = creation or frappe.utils.now_datetime()
params_dict = {
"doctype": "Site Backup",
"status": status,
"site": site,
"files_availability": files_availability,
"offsite": offsite,
+ "with_files": offsite,
}
if offsite:
params_dict["remote_public_file"] = create_test_remote_file(site, creation).name
diff --git a/press/press/doctype/site_backup_time/__init__.py b/press/press/doctype/site_backup_time/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/site_backup_time/site_backup_time.json b/press/press/doctype/site_backup_time/site_backup_time.json
new file mode 100644
index 00000000000..e1f59f60720
--- /dev/null
+++ b/press/press/doctype/site_backup_time/site_backup_time.json
@@ -0,0 +1,32 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-04-11 13:34:22.125288",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "backup_time"
+ ],
+ "fields": [
+ {
+ "fieldname": "backup_time",
+ "fieldtype": "Time",
+ "in_list_view": 1,
+ "label": "Backup Time",
+ "reqd": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-04-11 13:35:26.873865",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Site Backup Time",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/site_backup_time/site_backup_time.py b/press/press/doctype/site_backup_time/site_backup_time.py
new file mode 100644
index 00000000000..d16be229549
--- /dev/null
+++ b/press/press/doctype/site_backup_time/site_backup_time.py
@@ -0,0 +1,23 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class SiteBackupTime(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ backup_time: DF.Time
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/site_config/site_config.json b/press/press/doctype/site_config/site_config.json
index c01e2759582..8e1798cae0a 100644
--- a/press/press/doctype/site_config/site_config.json
+++ b/press/press/doctype/site_config/site_config.json
@@ -38,13 +38,13 @@
"fieldname": "type",
"fieldtype": "Select",
"label": "Type",
- "options": "\nString\nNumber\nBoolean\nJSON"
+ "options": "\nString\nPassword\nNumber\nBoolean\nJSON"
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
- "modified": "2020-09-21 17:03:07.326556",
+ "modified": "2024-02-23 09:28:25.746695",
"modified_by": "Administrator",
"module": "Press",
"name": "Site Config",
@@ -52,5 +52,6 @@
"permissions": [],
"quick_entry": 1,
"sort_field": "modified",
- "sort_order": "DESC"
+ "sort_order": "DESC",
+ "states": []
}
\ No newline at end of file
diff --git a/press/press/doctype/site_config/site_config.py b/press/press/doctype/site_config/site_config.py
index 3aa9a797192..bf9256fe825 100644
--- a/press/press/doctype/site_config/site_config.py
+++ b/press/press/doctype/site_config/site_config.py
@@ -7,6 +7,49 @@
from frappe.model.document import Document
-class SiteConfig(Document):
+class Config(Document):
+ dashboard_fields = ["key", "type", "value"]
+
def get_type(self):
return frappe.db.get_value("Site Config Key", self.key, "type")
+
+ def format_config_for_list(configs):
+ config_key_titles = frappe.db.get_all(
+ "Site Config Key",
+ fields=["key", "title"],
+ filters={"key": ["in", [c.key for c in configs]]},
+ )
+ secret_keys = frappe.get_all(
+ "Site Config Key", filters={"type": "Password"}, pluck="key"
+ )
+ for config in configs:
+ if config.key in secret_keys:
+ config.value = "*******"
+ config.title = next((c.title for c in config_key_titles if c.key == config.key), "")
+ return configs
+
+
+class SiteConfig(Config):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ internal: DF.Check
+ key: DF.Data
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ type: DF.Literal["", "String", "Password", "Number", "Boolean", "JSON"]
+ value: DF.Code
+ # end: auto-generated types
+
+ @staticmethod
+ def get_list_query(query, filters=None, **list_args):
+ Config = frappe.qb.DocType("Site Config")
+ query = query.where(Config.internal == 0)
+ configs = query.run(as_dict=True)
+ return SiteConfig.format_config_for_list(configs)
diff --git a/press/press/doctype/site_config_key/site_config_key.py b/press/press/doctype/site_config_key/site_config_key.py
index 2b343e13091..c4bb5dddcb9 100644
--- a/press/press/doctype/site_config_key/site_config_key.py
+++ b/press/press/doctype/site_config_key/site_config_key.py
@@ -7,6 +7,23 @@
class SiteConfigKey(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ description: DF.SmallText | None
+ internal: DF.Check
+ key: DF.Data
+ title: DF.Data | None
+ type: DF.Literal["Password", "String", "Number", "Boolean", "JSON"]
+ # end: auto-generated types
+
+ dashboard_fields = ["key", "title", "description", "type", "internal"]
+
def validate(self):
import frappe
diff --git a/press/press/doctype/site_config_key/test_site_config_key.py b/press/press/doctype/site_config_key/test_site_config_key.py
index 1cd06f3a673..8717f8e3960 100644
--- a/press/press/doctype/site_config_key/test_site_config_key.py
+++ b/press/press/doctype/site_config_key/test_site_config_key.py
@@ -1,11 +1,10 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestSiteConfigKey(unittest.TestCase):
+class TestSiteConfigKey(FrappeTestCase):
pass
diff --git a/press/press/doctype/site_config_key_blacklist/site_config_key_blacklist.py b/press/press/doctype/site_config_key_blacklist/site_config_key_blacklist.py
index 9afc4468f09..614bebd6d7d 100644
--- a/press/press/doctype/site_config_key_blacklist/site_config_key_blacklist.py
+++ b/press/press/doctype/site_config_key_blacklist/site_config_key_blacklist.py
@@ -7,6 +7,18 @@
class SiteConfigKeyBlacklist(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ key: DF.Data
+ reason: DF.SmallText | None
+ # end: auto-generated types
+
def validate(self):
import frappe
diff --git a/press/press/doctype/site_config_key_blacklist/test_site_config_key_blacklist.py b/press/press/doctype/site_config_key_blacklist/test_site_config_key_blacklist.py
index 46c80d3f4be..e2d123294ed 100644
--- a/press/press/doctype/site_config_key_blacklist/test_site_config_key_blacklist.py
+++ b/press/press/doctype/site_config_key_blacklist/test_site_config_key_blacklist.py
@@ -1,11 +1,10 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestSiteConfigKeyBlacklist(unittest.TestCase):
+class TestSiteConfigKeyBlacklist(FrappeTestCase):
pass
diff --git a/press/press/doctype/site_database_table_permission/__init__.py b/press/press/doctype/site_database_table_permission/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/site_database_table_permission/site_database_table_permission.json b/press/press/doctype/site_database_table_permission/site_database_table_permission.json
new file mode 100644
index 00000000000..d2872364afc
--- /dev/null
+++ b/press/press/doctype/site_database_table_permission/site_database_table_permission.json
@@ -0,0 +1,72 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-10-31 17:08:37.280675",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "table",
+ "column_break_fbqg",
+ "mode",
+ "section_break_rswb",
+ "allow_all_columns",
+ "selected_columns"
+ ],
+ "fields": [
+ {
+ "fieldname": "table",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Table",
+ "reqd": 1
+ },
+ {
+ "fieldname": "mode",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Mode",
+ "options": "read_only\nread_write",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_fbqg",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "section_break_rswb",
+ "fieldtype": "Section Break"
+ },
+ {
+ "default": "1",
+ "fieldname": "allow_all_columns",
+ "fieldtype": "Check",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Allow All Columns"
+ },
+ {
+ "depends_on": "eval: !doc.allow_all_columns",
+ "description": "Comma seperated column names",
+ "fieldname": "selected_columns",
+ "fieldtype": "Small Text",
+ "label": "Selected Columns",
+ "mandatory_depends_on": "eval: !doc.allow_all_columns",
+ "not_nullable": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2024-10-31 17:17:51.606102",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Site Database Table Permission",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/site_database_table_permission/site_database_table_permission.py b/press/press/doctype/site_database_table_permission/site_database_table_permission.py
new file mode 100644
index 00000000000..be51665d149
--- /dev/null
+++ b/press/press/doctype/site_database_table_permission/site_database_table_permission.py
@@ -0,0 +1,26 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class SiteDatabaseTablePermission(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ allow_all_columns: DF.Check
+ mode: DF.Literal["read_only", "read_write"]
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ selected_columns: DF.SmallText
+ table: DF.Data
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/site_database_user/__init__.py b/press/press/doctype/site_database_user/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/site_database_user/site_database_user.js b/press/press/doctype/site_database_user/site_database_user.js
new file mode 100644
index 00000000000..50b260ed0ed
--- /dev/null
+++ b/press/press/doctype/site_database_user/site_database_user.js
@@ -0,0 +1,65 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Site Database User', {
+ refresh(frm) {
+ [
+ [__('Apply Changes'), 'apply_changes', true],
+ [
+ __('Create User in Database'),
+ 'create_user',
+ !frm.doc.user_created_in_database,
+ ],
+ [
+ __('Remove User from Database'),
+ 'remove_user',
+ frm.doc.user_created_in_database,
+ ],
+ [
+ __('Add User to ProxySQL'),
+ 'add_user_to_proxysql',
+ !frm.doc.user_added_in_proxysql,
+ ],
+ [
+ __('Remove User from ProxySQL'),
+ 'remove_user_from_proxysql',
+ frm.doc.user_added_in_proxysql,
+ ],
+ [__('Archive User'), 'archive', frm.doc.status !== 'Archived'],
+ ].forEach(([label, method, condition]) => {
+ if (typeof condition === 'undefined' || condition) {
+ frm.add_custom_button(
+ label,
+ () => {
+ frappe.confirm(
+ `Are you sure you want to ${label.toLowerCase()} this site?`,
+ () => frm.call(method).then((r) => frm.refresh()),
+ );
+ },
+ __('Actions'),
+ );
+ }
+ });
+
+ frm.add_custom_button(
+ __('Show Credential'),
+ () =>
+ frm.call('get_credential').then((r) => {
+ let message = `Host: ${r.message.host}
+
+Port: ${r.message.port}
+
+Database: ${r.message.database}
+
+Username: ${r.message.username}
+
+Password: ${r.message.password}
+
+\`\`\`\nmysql -u ${r.message.username} -p${r.message.password} -h ${r.message.host} -P ${r.message.port} --ssl --ssl-verify-server-cert\n\`\`\``;
+
+ frappe.msgprint(frappe.markdown(message), 'Database Credentials');
+ }),
+ __('Actions'),
+ );
+ },
+});
diff --git a/press/press/doctype/site_database_user/site_database_user.json b/press/press/doctype/site_database_user/site_database_user.json
new file mode 100644
index 00000000000..33ecbd424be
--- /dev/null
+++ b/press/press/doctype/site_database_user/site_database_user.json
@@ -0,0 +1,186 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-10-31 16:54:56.752608",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "status",
+ "label",
+ "mode",
+ "site",
+ "team",
+ "column_break_udtx",
+ "max_connections",
+ "username",
+ "password",
+ "user_created_in_database",
+ "user_added_in_proxysql",
+ "section_break_cpbg",
+ "permissions",
+ "section_break_ubkn",
+ "column_break_rczb",
+ "failed_agent_job",
+ "failure_reason"
+ ],
+ "fields": [
+ {
+ "fieldname": "site",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Site",
+ "options": "Site",
+ "reqd": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "mode",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Mode",
+ "options": "read_only\nread_write\ngranular",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_udtx",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "username",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Username",
+ "read_only": 1,
+ "set_only_once": 1,
+ "unique": 1
+ },
+ {
+ "fieldname": "password",
+ "fieldtype": "Password",
+ "label": "Password",
+ "read_only": 1,
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "section_break_cpbg",
+ "fieldtype": "Section Break"
+ },
+ {
+ "depends_on": "eval: doc.mode == \"granular\"",
+ "fieldname": "permissions",
+ "fieldtype": "Table",
+ "label": "Permissions",
+ "options": "Site Database Table Permission"
+ },
+ {
+ "default": "Pending",
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Status",
+ "options": "Pending\nActive\nFailed\nArchived",
+ "read_only": 1,
+ "reqd": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "user_added_in_proxysql",
+ "fieldtype": "Check",
+ "label": "User Added in ProxySQL",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "user_created_in_database",
+ "fieldtype": "Check",
+ "label": "User Created in Database",
+ "read_only": 1
+ },
+ {
+ "depends_on": "eval: doc.status === \"Failed\"",
+ "fieldname": "section_break_ubkn",
+ "fieldtype": "Section Break"
+ },
+ {
+ "depends_on": "eval: doc.status === \"Failed\"",
+ "fieldname": "column_break_rczb",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "failed_agent_job",
+ "fieldtype": "Link",
+ "label": "Failed Agent Job",
+ "options": "Agent Job"
+ },
+ {
+ "fieldname": "failure_reason",
+ "fieldtype": "Small Text",
+ "label": "Failure Reason"
+ },
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "label": "Team",
+ "options": "Team",
+ "reqd": 1,
+ "search_index": 1
+ },
+ {
+ "default": "16",
+ "fieldname": "max_connections",
+ "fieldtype": "Int",
+ "label": "Max Connections",
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "label",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Label",
+ "reqd": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [
+ {
+ "group": "Related Documents",
+ "link_doctype": "Agent Job",
+ "link_fieldname": "reference_name"
+ }
+ ],
+ "modified": "2025-09-01 13:44:51.350883",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Site Database User",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/site_database_user/site_database_user.py b/press/press/doctype/site_database_user/site_database_user.py
new file mode 100644
index 00000000000..812f5c75fc8
--- /dev/null
+++ b/press/press/doctype/site_database_user/site_database_user.py
@@ -0,0 +1,449 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+import re
+from collections import Counter
+from typing import TYPE_CHECKING
+
+import frappe
+import frappe.utils
+from elasticsearch import Elasticsearch
+from frappe.model.document import Document
+from frappe.utils.password import get_decrypted_password
+
+from press.agent import Agent
+from press.api.client import dashboard_whitelist
+from press.overrides import get_permission_query_conditions_for_doctype
+from press.press.doctype.site_activity.site_activity import log_site_activity
+
+if TYPE_CHECKING:
+ from press.press.doctype.site.site import Site
+
+
+class SiteDatabaseUser(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.site_database_table_permission.site_database_table_permission import (
+ SiteDatabaseTablePermission,
+ )
+
+ failed_agent_job: DF.Link | None
+ failure_reason: DF.SmallText | None
+ label: DF.Data
+ max_connections: DF.Int
+ mode: DF.Literal["read_only", "read_write", "granular"]
+ password: DF.Password | None
+ permissions: DF.Table[SiteDatabaseTablePermission]
+ site: DF.Link
+ status: DF.Literal["Pending", "Active", "Failed", "Archived"]
+ team: DF.Link
+ user_added_in_proxysql: DF.Check
+ user_created_in_database: DF.Check
+ username: DF.Data | None
+ # end: auto-generated types
+
+ dashboard_fields = (
+ "label",
+ "status",
+ "site",
+ "username",
+ "team",
+ "mode",
+ "failed_agent_job",
+ "failure_reason",
+ "permissions",
+ "max_connections",
+ )
+
+ def validate(self):
+ if not self.has_value_changed("status"):
+ self._raise_error_if_archived()
+ # remove permissions if not granular mode
+ if self.mode != "granular":
+ self.permissions.clear()
+
+ if not self.is_new() and self.has_value_changed("max_connections"):
+ frappe.throw("You can't update the max database connections. Archive it and create a new one.")
+
+ if not self.max_connections:
+ frappe.throw(
+ "Max database connections can't be zero. You need to opt for at least one connection."
+ )
+
+ def before_insert(self):
+ site: Site = frappe.get_doc("Site", self.site)
+ if not site.has_permission():
+ frappe.throw("You don't have permission to create database user")
+ if not frappe.db.get_value("Site Plan", site.plan, "database_access"):
+ frappe.throw(f"Database Access is not available on {site.plan} plan")
+
+ # validate connection limit
+ exists_db_users_connection_limit = frappe.db.get_all(
+ "Site Database User",
+ {"site": self.site, "status": ("!=", "Archived")},
+ pluck="max_connections",
+ )
+ total_used_connections = sum(exists_db_users_connection_limit)
+ allowed_max_connections_for_site = site.database_access_connection_limit - total_used_connections
+ if self.max_connections > allowed_max_connections_for_site:
+ frappe.throw(
+ f"Your site has quota of {site.database_access_connection_limit} database connections.\nYou can't allocate more than {allowed_max_connections_for_site} connections for new user. You can drop other database users to allocate more connections."
+ )
+
+ self.status = "Pending"
+ if not self.username:
+ self.username = frappe.generate_hash(length=15)
+ if not self.password:
+ self.password = frappe.generate_hash(length=20)
+
+ def after_insert(self):
+ log_site_activity(
+ self.site,
+ "Create Database User",
+ reason=f"Created user {self.username} with {self.mode} permission",
+ )
+ if hasattr(self.flags, "ignore_after_insert_hooks") and self.flags.ignore_after_insert_hooks:
+ """
+ Added for make it easy to migrate records of db access users from site doctype to site database user
+ """
+ return
+ self.apply_changes()
+
+ def on_update(self):
+ if self.has_value_changed("status") and self.status == "Archived":
+ log_site_activity(
+ self.site,
+ "Remove Database User",
+ reason=f"Removed user {self.username} with {self.mode} permission",
+ )
+
+ def _raise_error_if_archived(self):
+ if self.status == "Archived":
+ frappe.throw("user has been deleted and no further changes can be made")
+
+ def _get_database_name(self):
+ site = frappe.get_doc("Site", self.site)
+ db_name = site.fetch_info().get("config", {}).get("db_name")
+ if not db_name:
+ frappe.throw("Failed to fetch database name of site")
+ return db_name
+
+ @dashboard_whitelist()
+ def save_and_apply_changes(self, label: str, mode: str, permissions: list): # noqa: C901
+ if self.status == "Pending" or self.status == "Archived":
+ frappe.throw(f"You can't modify information in {self.status} state. Please try again later")
+
+ self.label = label
+ is_db_user_configuration_changed = self.mode != mode or self._is_permissions_changed(permissions)
+ if is_db_user_configuration_changed:
+ self.mode = mode
+ new_permissions = permissions
+ new_permission_tables = [p["table"] for p in new_permissions]
+ current_permission_tables = [p.table for p in self.permissions]
+ # add new permissions
+ for permission in new_permissions:
+ if permission["table"] not in current_permission_tables:
+ self.append("permissions", permission)
+ # modify permissions
+ for permission in self.permissions:
+ for new_permission in new_permissions:
+ if permission.table == new_permission["table"]:
+ permission.update(new_permission)
+ break
+ # delete permissions which are not in the modified list
+ self.permissions = [p for p in self.permissions if p.table in new_permission_tables]
+
+ self.save()
+ if is_db_user_configuration_changed:
+ self.apply_changes()
+
+ def _is_permissions_changed(self, new_permissions):
+ if len(new_permissions) != len(self.permissions):
+ return True
+
+ for permission in new_permissions:
+ for p in self.permissions:
+ if permission["table"] == p.table and (
+ permission["mode"] != p.mode
+ or permission["allow_all_columns"] != p.allow_all_columns
+ or Counter(permission["selected_columns"]) != Counter(p.selected_columns)
+ ):
+ return True
+
+ return False
+
+ @frappe.whitelist()
+ def apply_changes(self):
+ if not self.user_created_in_database:
+ self.create_user()
+ elif not self.user_added_in_proxysql:
+ self.add_user_to_proxysql()
+ else:
+ self.modify_permissions()
+
+ self.status = "Pending"
+ self.save(ignore_permissions=True)
+
+ @frappe.whitelist()
+ def create_user(self):
+ self._raise_error_if_archived()
+ agent = Agent(frappe.db.get_value("Site", self.site, "server"))
+ agent.create_database_user(
+ frappe.get_doc("Site", self.site), self.username, self.get_password("password"), self.name
+ )
+
+ @frappe.whitelist()
+ def remove_user(self):
+ self._raise_error_if_archived()
+ agent = Agent(frappe.db.get_value("Site", self.site, "server"))
+ agent.remove_database_user(
+ frappe.get_doc("Site", self.site),
+ self.username,
+ self.name,
+ )
+
+ @frappe.whitelist()
+ def add_user_to_proxysql(self):
+ self._raise_error_if_archived()
+ database = self._get_database_name()
+ server = frappe.db.get_value("Site", self.site, "server")
+ proxy_server = frappe.db.get_value("Server", server, "proxy_server")
+ database_server_name = frappe.db.get_value(
+ "Bench", frappe.db.get_value("Site", self.site, "bench"), "database_server"
+ )
+ database_server = frappe.get_doc("Database Server", database_server_name)
+ agent = Agent(proxy_server, server_type="Proxy Server")
+ agent.add_proxysql_user(
+ frappe.get_doc("Site", self.site),
+ database,
+ self.username,
+ self.get_password("password"),
+ self.max_connections,
+ database_server,
+ reference_doctype="Site Database User",
+ reference_name=self.name,
+ )
+
+ @frappe.whitelist()
+ def remove_user_from_proxysql(self):
+ self._raise_error_if_archived()
+ server = frappe.db.get_value("Site", self.site, "server")
+ proxy_server = frappe.db.get_value("Server", server, "proxy_server")
+ agent = Agent(proxy_server, server_type="Proxy Server")
+ agent.remove_proxysql_user(
+ frappe.get_doc("Site", self.site),
+ self.username,
+ reference_doctype="Site Database User",
+ reference_name=self.name,
+ )
+
+ @frappe.whitelist()
+ def modify_permissions(self):
+ self._raise_error_if_archived()
+ log_site_activity(
+ self.site,
+ "Modify Database User Permissions",
+ reason=f"Modified user {self.username} with {self.mode} permission",
+ )
+ server = frappe.db.get_value("Site", self.site, "server")
+ agent = Agent(server)
+ table_permissions = {}
+
+ if self.mode == "granular":
+ for x in self.permissions:
+ table_permissions[x.table] = {
+ "mode": x.mode,
+ "columns": "*"
+ if x.allow_all_columns
+ else [c.strip() for c in x.selected_columns.splitlines() if c.strip()],
+ }
+
+ agent.modify_database_user_permissions(
+ frappe.get_doc("Site", self.site),
+ self.username,
+ self.mode,
+ table_permissions,
+ self.name,
+ )
+
+ @dashboard_whitelist()
+ def get_credential(self):
+ server = frappe.db.get_value("Site", self.site, "server")
+ proxy_server = frappe.db.get_value("Server", server, "proxy_server")
+ database = self._get_database_name()
+ return {
+ "host": proxy_server,
+ "port": 3306,
+ "database": database,
+ "username": self.username,
+ "password": self.get_password("password"),
+ "mode": self.mode,
+ "max_connections": self.max_connections,
+ }
+
+ @dashboard_whitelist()
+ def archive(self, raise_error: bool = True, skip_remove_db_user_step: bool = False):
+ if not raise_error and self.status == "Archived":
+ return
+ self._raise_error_if_archived()
+ self.status = "Pending"
+ self.save()
+
+ if self.user_created_in_database and not skip_remove_db_user_step:
+ """
+ If we are dropping the database, there is no need to drop
+ db users separately.
+ In those cases, use `skip_remove_db_user_step` param to skip it
+ """
+ self.remove_user()
+ else:
+ self.user_created_in_database = False
+ self.save()
+
+ if self.user_added_in_proxysql:
+ self.remove_user_from_proxysql()
+
+ if not self.user_created_in_database and not self.user_added_in_proxysql:
+ self.status = "Archived"
+ self.save()
+
+ @dashboard_whitelist()
+ def fetch_logs(
+ self, start_timestamp: int, end_timestamp: int, search_string: str = "", client_ip: str = ""
+ ):
+ if abs(start_timestamp - end_timestamp) > 2592000:
+ frappe.throw(
+ "You can only search through at max 30 days of logs. Please try again with a smaller range."
+ )
+ try:
+ log_server = frappe.db.get_single_value("Press Settings", "log_server")
+
+ if not log_server:
+ return []
+
+ query = {
+ "bool": {
+ "filter": [
+ {"term": {"event.dataset": "proxysql.events"}},
+ {"term": {"username": self.username}},
+ {
+ "range": {
+ "@timestamp": {
+ "gte": int(start_timestamp) * 1000, # Convert to milliseconds
+ "lte": int(end_timestamp) * 1000, # Convert to milliseconds
+ }
+ }
+ },
+ ],
+ "must": [],
+ "must_not": [],
+ "should": [],
+ }
+ }
+
+ if search_string and search_string.strip() != "*":
+ query["bool"]["must"].append(
+ {"wildcard": {"query": {"value": f"*{search_string}*", "case_insensitive": True}}}
+ )
+
+ if client_ip:
+ query["bool"]["filter"].append({"term": {"client_ip": client_ip}})
+
+ url = f"https://{log_server}/elasticsearch/"
+ password = get_decrypted_password("Log Server", log_server, "kibana_password")
+ client = Elasticsearch(url, basic_auth=("frappe", password))
+ result = client.search(
+ size=500,
+ index="filebeat-*",
+ query=query,
+ _source=["query", "client_ip", "start_timestamp", "duration_ms"],
+ )
+ # Only return the _source part of each hit
+ hits = [hit["_source"] for hit in result["hits"]["hits"]]
+ for i in range(len(hits)):
+ hits[i]["start_timestamp"] = int(
+ frappe.utils.cint(hits[i].get("start_timestamp"), 0) / 1000
+ ) # Convert to seconds
+ return hits
+ except Exception:
+ frappe.throw("Failed to fetch logs from log server. Please try again later.")
+
+ @staticmethod
+ def process_job_update(job): # noqa: C901
+ if job.status not in ("Success", "Failure"):
+ return
+
+ if not job.reference_name or not frappe.db.exists("Site Database User", job.reference_name):
+ return
+
+ doc: SiteDatabaseUser = frappe.get_doc("Site Database User", job.reference_name)
+
+ if job.status == "Failure":
+ doc.status = "Failed"
+ doc.failed_agent_job = job.name
+ if job.job_type == "Modify Database User Permissions":
+ doc.failure_reason = SiteDatabaseUser.user_addressable_error_from_stacktrace(job.traceback)
+ doc.save(ignore_permissions=True)
+ return
+
+ if job.job_type == "Create Database User":
+ doc.user_created_in_database = True
+ if not doc.user_added_in_proxysql:
+ doc.add_user_to_proxysql()
+ if job.job_type == "Remove Database User":
+ doc.user_created_in_database = False
+ elif job.job_type == "Add User to ProxySQL":
+ doc.user_added_in_proxysql = True
+ doc.modify_permissions()
+ elif job.job_type == "Remove User from ProxySQL":
+ doc.user_added_in_proxysql = False
+ elif job.job_type == "Modify Database User Permissions":
+ doc.status = "Active"
+
+ doc.save(ignore_permissions=True)
+ doc.reload()
+
+ if (
+ job.job_type in ("Remove Database User", "Remove User from ProxySQL")
+ and not doc.user_added_in_proxysql
+ and not doc.user_created_in_database
+ ):
+ doc.archive()
+
+ @staticmethod
+ def user_addressable_error_from_stacktrace(stacktrace: str):
+ pattern = r"peewee\.\w+Error: (.*)?"
+ default_error_msg = "Unknown error. Please try again.\nIf the error persists, please contact support."
+
+ matches = re.findall(pattern, stacktrace)
+ if len(matches) == 0:
+ return default_error_msg
+ data = matches[0].strip().replace("(", "").replace(")", "").split(",", 1)
+ if len(data) != 2:
+ return default_error_msg
+
+ if data[0] == "1054":
+ pattern = r"Unknown column '(.*)' in '(.*)'\"*?"
+ matches = re.findall(pattern, data[1])
+ if len(matches) == 1 and len(matches[0]) == 2:
+ return f"Column '{matches[0][0]}' doesn't exist in '{matches[0][1]}' table.\nPlease remove the column from permissions configuration and apply changes."
+
+ elif data[0] == "1146":
+ pattern = r"Table '(.*)' doesn't exist"
+ matches = re.findall(pattern, data[1])
+ if len(matches) == 1 and isinstance(matches[0], str):
+ table_name = matches[0]
+ table_name = table_name.split(".")[-1]
+ return f"Table '{table_name}' doesn't exist.\nPlease remove it from permissions table and apply changes."
+
+ return default_error_msg
+
+
+get_permission_query_conditions = get_permission_query_conditions_for_doctype("Site Database User")
diff --git a/press/press/doctype/site_database_user/test_site_database_user.py b/press/press/doctype/site_database_user/test_site_database_user.py
new file mode 100644
index 00000000000..247f80c3f9c
--- /dev/null
+++ b/press/press/doctype/site_database_user/test_site_database_user.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests import UnitTestCase
+
+
+class TestSiteDatabaseUser(UnitTestCase):
+ """
+ Unit tests for SiteDatabaseUser.
+ Use this class for testing individual functions and methods.
+ """
+
+ pass
diff --git a/press/press/doctype/site_domain/site_domain.js b/press/press/doctype/site_domain/site_domain.js
index e5a18527c11..990e7d2980f 100644
--- a/press/press/doctype/site_domain/site_domain.js
+++ b/press/press/doctype/site_domain/site_domain.js
@@ -1,5 +1,10 @@
// Copyright (c) 2020, Frappe and contributors
// For license information, please see license.txt
-// frappe.ui.form.on('Site Domain', {
-// });
+frappe.ui.form.on('Site Domain', {
+ refresh: function (frm) {
+ frm.add_custom_button('Create DNS Record', () => {
+ frm.call('create_dns_record').then((r) => frm.refresh());
+ });
+ },
+});
diff --git a/press/press/doctype/site_domain/site_domain.json b/press/press/doctype/site_domain/site_domain.json
index bd16f437f32..7785b05ee3f 100644
--- a/press/press/doctype/site_domain/site_domain.json
+++ b/press/press/doctype/site_domain/site_domain.json
@@ -11,11 +11,13 @@
"site",
"team",
"domain",
- "ssl",
- "dns_type",
+ "column_break_faqy",
"tls_certificate",
"retry_count",
- "redirect_to_primary"
+ "redirect_to_primary",
+ "dns_section",
+ "dns_type",
+ "dns_response"
],
"fields": [
{
@@ -26,7 +28,8 @@
"label": "Site",
"options": "Site",
"read_only": 1,
- "reqd": 1
+ "reqd": 1,
+ "search_index": 1
},
{
"fieldname": "domain",
@@ -38,14 +41,6 @@
"reqd": 1,
"unique": 1
},
- {
- "default": "0",
- "fieldname": "ssl",
- "fieldtype": "Check",
- "in_list_view": 1,
- "label": "SSL",
- "read_only": 1
- },
{
"default": "CNAME",
"fieldname": "dns_type",
@@ -95,13 +90,29 @@
"options": "Team",
"read_only": 1,
"reqd": 1
+ },
+ {
+ "fieldname": "column_break_faqy",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "dns_section",
+ "fieldtype": "Section Break",
+ "label": "DNS"
+ },
+ {
+ "fieldname": "dns_response",
+ "fieldtype": "Code",
+ "label": "DNS Response",
+ "read_only": 1
}
],
"links": [],
- "modified": "2021-02-15 20:09:39.409924",
+ "modified": "2025-03-18 10:09:23.846340",
"modified_by": "Administrator",
"module": "Press",
"name": "Site Domain",
+ "naming_rule": "By fieldname",
"owner": "Administrator",
"permissions": [
{
@@ -118,12 +129,14 @@
},
{
"create": 1,
+ "delete": 1,
"read": 1,
"role": "Press Admin",
"write": 1
},
{
"create": 1,
+ "delete": 1,
"read": 1,
"role": "Press Member",
"write": 1
@@ -131,6 +144,7 @@
],
"sort_field": "modified",
"sort_order": "DESC",
+ "states": [],
"title_field": "domain",
"track_changes": 1
-}
+}
\ No newline at end of file
diff --git a/press/press/doctype/site_domain/site_domain.py b/press/press/doctype/site_domain/site_domain.py
index 9e5073ae543..37eea54e72c 100644
--- a/press/press/doctype/site_domain/site_domain.py
+++ b/press/press/doctype/site_domain/site_domain.py
@@ -1,20 +1,96 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
+import json
+from typing import TYPE_CHECKING, ClassVar
import frappe
+import rq
from frappe.model.document import Document
from press.agent import Agent
-
+from press.exceptions import (
+ DNSValidationError,
+)
from press.overrides import get_permission_query_conditions_for_doctype
+from press.utils import log_error
+from press.utils.dns import check_dns_cname_a, create_dns_record
+from press.utils.jobs import has_job_timeout_exceeded
+
+if TYPE_CHECKING:
+ from press.press.doctype.site.site import Site
class SiteDomain(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ dns_response: DF.Code | None
+ dns_type: DF.Literal["A", "NS", "CNAME"]
+ domain: DF.Data
+ redirect_to_primary: DF.Check
+ retry_count: DF.Int
+ site: DF.Link
+ status: DF.Literal["Pending", "In Progress", "Active", "Broken"]
+ team: DF.Link
+ tls_certificate: DF.Link | None
+ # end: auto-generated types
+
+ dashboard_fields: ClassVar = ["domain", "status", "dns_type", "site", "redirect_to_primary"]
+
+ @staticmethod
+ def get_list_query(query, filters=None, **list_args):
+ domains = query.run(as_dict=1)
+ if filters.site:
+ host_name = frappe.db.get_value("Site", filters.site, "host_name")
+ for domain in domains:
+ if domain.domain == host_name:
+ domain.primary = True
+ break
+ domains.sort(key=lambda domain: not domain.primary)
+ return domains
+ return None
+
+ def before_insert(self):
+ Site = frappe.qb.DocType("Site")
+
+ site = (
+ frappe.qb.from_(Site)
+ .select(Site.name)
+ .where(Site.name == self.domain)
+ .where(Site.name != self.site)
+ .run(as_dict=True)
+ )
+ if site:
+ frappe.throw(f"Domain {self.domain} is already taken. Please choose a different domain.")
+
+ @property
+ def agent(self):
+ server = frappe.db.get_value("Site", self.site, "server")
+ is_standalone = frappe.db.get_value("Server", server, "is_standalone")
+ if is_standalone:
+ agent = Agent(server, server_type="Server")
+ else:
+ proxy_server = frappe.db.get_value("Server", server, "proxy_server")
+ agent = Agent(proxy_server, server_type="Proxy Server")
+ return agent
+
def after_insert(self):
- if not self.default:
- self.create_tls_certificate()
+ if self.default:
+ return
+
+ if self.has_root_tls_certificate:
+ server = frappe.db.get_value("Site", self.site, "server")
+ self.agent.add_domain_to_upstream(server=server, site=self.site, domain=self.domain)
+ return
+
+ self.create_tls_certificate()
def validate(self):
if self.has_value_changed("redirect_to_primary"):
@@ -23,17 +99,26 @@ def validate(self):
elif not self.is_new():
self.remove_redirect_in_proxy()
+ @frappe.whitelist()
+ def create_dns_record(self):
+ site = frappe.get_doc("Site", self.site)
+ if not self.domain.endswith(site.domain):
+ return
+ create_dns_record(site, self.domain)
+
@property
def default(self):
return self.domain == self.site
+ @property
+ def has_root_tls_certificate(self):
+ return bool(frappe.db.exists("Root Domain", self.domain.split(".", 1)[1], "name"))
+
def setup_redirect_in_proxy(self):
- site = frappe.get_doc("Site", self.site)
+ site: Site = frappe.get_doc("Site", self.site)
target = site.host_name
if target == self.name:
- frappe.throw(
- "Primary domain can't be redirected.", exc=frappe.exceptions.ValidationError
- )
+ frappe.throw("Primary domain can't be redirected.", exc=frappe.exceptions.ValidationError)
site.set_redirects_in_proxy([self.name])
def remove_redirect_in_proxy(self):
@@ -61,34 +146,20 @@ def create_tls_certificate(self):
self.save()
def process_tls_certificate_update(self):
- certificate_status = frappe.db.get_value(
- "TLS Certificate", self.tls_certificate, "status"
+ certificate = frappe.db.get_value(
+ "TLS Certificate", self.tls_certificate, ["status", "creation"], as_dict=True
)
- if certificate_status == "Active":
+ if certificate.status == "Active":
self.create_agent_request()
- elif certificate_status == "Failure":
+ elif certificate.status == "Failure":
self.status = "Broken"
self.save()
def create_agent_request(self):
- server = frappe.db.get_value("Site", self.site, "server")
- is_standalone = frappe.db.get_value("Server", server, "is_standalone")
- if is_standalone:
- agent = Agent(server, server_type="Server")
- else:
- proxy_server = frappe.db.get_value("Server", server, "proxy_server")
- agent = Agent(proxy_server, server_type="Proxy Server")
- agent.new_host(self)
+ self.agent.new_host(self)
def create_remove_host_agent_request(self):
- server = frappe.db.get_value("Site", self.site, "server")
- is_standalone = frappe.db.get_value("Server", server, "is_standalone")
- if is_standalone:
- agent = Agent(server, server_type="Server")
- else:
- proxy_server = frappe.db.get_value("Server", server, "proxy_server")
- agent = Agent(proxy_server, server_type="Proxy Server")
- agent.remove_host(self)
+ self.agent.remove_host(self)
def retry(self):
self.status = "Pending"
@@ -102,14 +173,10 @@ def retry(self):
def on_trash(self):
if self.domain == frappe.db.get_value("Site", self.site, "host_name"):
- frappe.throw(
- msg="Primary domain cannot be deleted", exc=frappe.exceptions.LinkExistsError
- )
+ frappe.throw(msg="Primary domain cannot be deleted", exc=frappe.exceptions.LinkExistsError)
self.disavow_agent_jobs()
- if not self.default:
- self.create_remove_host_agent_request()
- elif self.redirect_to_primary:
+ if not (self.default and self.has_root_tls_certificate) or self.redirect_to_primary:
self.create_remove_host_agent_request()
if self.status == "Active":
self.remove_domain_from_site_config()
@@ -126,7 +193,7 @@ def disavow_agent_jobs(self):
frappe.db.set_value("Agent Job", job.name, "host", None)
def remove_domain_from_site_config(self):
- site_doc = frappe.get_doc("Site", self.site)
+ site_doc: Site = frappe.get_doc("Site", self.site)
if site_doc.status == "Archived":
return
site_doc.remove_domain_from_config(self.domain)
@@ -140,6 +207,7 @@ def process_new_host_job_update(job):
"Running": "In Progress",
"Success": "Active",
"Failure": "Broken",
+ "Delivery Failure": "Broken",
}[job.status]
if updated_status != domain_status:
@@ -148,6 +216,75 @@ def process_new_host_job_update(job):
frappe.get_doc("Site", job.site).add_domain_to_config(job.host)
-get_permission_query_conditions = get_permission_query_conditions_for_doctype(
- "Site Domain"
-)
+def process_add_domain_to_upstream_job_update(job):
+ request_data = json.loads(job.request_data)
+ domain = request_data.get("domain")
+ domain_status = frappe.get_value("Site Domain", domain, "status")
+
+ updated_status = {
+ "Pending": "Pending",
+ "Running": "In Progress",
+ "Success": "Active",
+ "Failure": "Broken",
+ "Delivery Failure": "Broken",
+ }[job.status]
+
+ if updated_status != domain_status:
+ frappe.db.set_value("Site Domain", domain, "status", updated_status)
+
+ if job.status in ["Failure", "Delivery Failure"]:
+ frappe.db.set_value(
+ "Product Trial Request", {"domain": request_data.get("domain")}, "status", "Error"
+ )
+
+
+def update_dns_type():
+ Domain = frappe.qb.DocType("Site Domain")
+ Certificate = frappe.qb.DocType("TLS Certificate")
+ query = (
+ frappe.qb.from_(Domain)
+ .left_join(Certificate)
+ .on(Domain.tls_certificate == Certificate.name)
+ .where(Domain.tls_certificate.isnotnull()) # Don't query wildcard subdomains
+ .select(
+ Domain.name,
+ Domain.domain,
+ Domain.dns_type,
+ Domain.site,
+ Domain.tls_certificate,
+ Certificate.retry_count,
+ )
+ )
+
+ domains = query.run(as_dict=1)
+ for domain in domains:
+ if has_job_timeout_exceeded():
+ return
+ try:
+ response = check_dns_cname_a(domain.site, domain.domain, ignore_proxying=True)
+ if response["matched"] and response["type"] != domain.dns_type:
+ frappe.db.set_value(
+ "Site Domain", domain.name, "dns_type", response["type"], update_modified=False
+ )
+ if domain.retry_count > 0 and response["matched"]:
+ # In the past we failed to obtain the certificate (likely because of DNS issues).
+ # Since the DNS is now correct, we can retry obtaining the certificate.
+ frappe.db.set_value(
+ "TLS Certificate", domain.tls_certificate, "retry_count", 0, update_modified=False
+ )
+
+ pretty_response = json.dumps(response, indent=4, default=str)
+ frappe.db.set_value(
+ "Site Domain", domain.name, "dns_response", pretty_response, update_modified=False
+ )
+ frappe.db.commit()
+ except DNSValidationError:
+ pass
+ except rq.timeouts.JobTimeoutException:
+ return
+ except Exception:
+ frappe.db.rollback()
+ log_error("DNS Check Failed", domain=domain)
+
+
+get_permission_query_conditions = get_permission_query_conditions_for_doctype("Site Domain")
diff --git a/press/press/doctype/site_domain/test_site_domain.py b/press/press/doctype/site_domain/test_site_domain.py
index 4772fed322e..5d06021276e 100644
--- a/press/press/doctype/site_domain/test_site_domain.py
+++ b/press/press/doctype/site_domain/test_site_domain.py
@@ -1,12 +1,11 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
-import unittest
from unittest.mock import Mock, call, patch
import frappe
+from frappe.tests.utils import FrappeTestCase
from press.agent import Agent
from press.press.doctype.agent_job.agent_job import AgentJob
@@ -16,9 +15,7 @@
from press.press.doctype.tls_certificate.tls_certificate import TLSCertificate
-def create_test_site_domain(
- site: str, domain: str, status: str = "Active"
-) -> SiteDomain:
+def create_test_site_domain(site: str, domain: str, status: str = "Active") -> SiteDomain:
"""Create test Site Domain doc."""
with patch.object(TLSCertificate, "obtain_certificate"):
site_domain = frappe.get_doc(
@@ -36,7 +33,8 @@ def create_test_site_domain(
@patch.object(AgentJob, "after_insert", new=Mock())
-class TestSiteDomain(unittest.TestCase):
+@patch("press.press.doctype.site.site._change_dns_record", new=Mock())
+class TestSiteDomain(FrappeTestCase):
"""Tests for Site Domain Document methods."""
def tearDown(self):
@@ -60,9 +58,7 @@ def test_only_active_site_domain_can_be_primary(self):
domain_name = frappe.mock("domain_name")
site_domain = create_test_site_domain(site.name, domain_name, "Pending")
- self.assertRaises(
- frappe.exceptions.LinkValidationError, site.set_host_name, site_domain.name
- )
+ self.assertRaises(frappe.exceptions.LinkValidationError, site.set_host_name, site_domain.name)
def test_default_host_name_is_site_subdomain(self):
"""Ensure subdomain+domain is default primary host_name."""
@@ -72,9 +68,7 @@ def test_default_host_name_is_site_subdomain(self):
def test_default_site_domain_cannot_be_deleted(self):
"""Ensure default site domain for a site cannot be deleted."""
site = create_test_site(self.site_subdomain)
- default_domain = frappe.get_doc(
- {"doctype": "Site Domain", "site": site.name, "name": site.name}
- )
+ default_domain = frappe.get_doc({"doctype": "Site Domain", "site": site.name, "name": site.name})
site_domain2 = create_test_site_domain(site.name, "hellohello.com")
site.set_host_name(site_domain2.name)
self.assertRaises(Exception, site.remove_domain, default_domain.name)
@@ -93,9 +87,7 @@ def test_site_domain_for_other_site_cant_be_primary(self):
site1 = create_test_site(self.site_subdomain)
site2 = create_test_site("testing-another")
site_domain = create_test_site_domain(site2.name, "hellohello.com")
- self.assertRaises(
- frappe.exceptions.LinkValidationError, site1.set_host_name, site_domain.name
- )
+ self.assertRaises(frappe.exceptions.LinkValidationError, site1.set_host_name, site_domain.name)
def test_set_host_name_removes_redirect_of_domain(self):
"""Ensure set_host_name removes redirect of domain."""
diff --git a/press/press/doctype/site_group_deploy/__init__.py b/press/press/doctype/site_group_deploy/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/site_group_deploy/site_group_deploy.js b/press/press/doctype/site_group_deploy/site_group_deploy.js
new file mode 100644
index 00000000000..db9dbb48a44
--- /dev/null
+++ b/press/press/doctype/site_group_deploy/site_group_deploy.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Site Group Deploy", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/site_group_deploy/site_group_deploy.json b/press/press/doctype/site_group_deploy/site_group_deploy.json
new file mode 100644
index 00000000000..b3b52b619ca
--- /dev/null
+++ b/press/press/doctype/site_group_deploy/site_group_deploy.json
@@ -0,0 +1,185 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "autoname": "hash",
+ "creation": "2024-08-28 14:35:33.621134",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "status",
+ "team",
+ "site_section",
+ "site",
+ "subdomain",
+ "site_plan",
+ "auto_provision_bench",
+ "provider",
+ "release_group_section",
+ "release_group",
+ "version",
+ "column_break_smfr",
+ "cluster",
+ "bench",
+ "apps_section",
+ "apps_column",
+ "apps"
+ ],
+ "fields": [
+ {
+ "fieldname": "site_section",
+ "fieldtype": "Section Break",
+ "label": "Site"
+ },
+ {
+ "fieldname": "site",
+ "fieldtype": "Link",
+ "label": "Site",
+ "options": "Site"
+ },
+ {
+ "fieldname": "release_group_section",
+ "fieldtype": "Section Break",
+ "label": "Release Group"
+ },
+ {
+ "fieldname": "subdomain",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Subdomain",
+ "reqd": 1
+ },
+ {
+ "fieldname": "release_group",
+ "fieldtype": "Link",
+ "label": "Release Group",
+ "options": "Release Group"
+ },
+ {
+ "fieldname": "apps_section",
+ "fieldtype": "Section Break",
+ "label": "Apps"
+ },
+ {
+ "fieldname": "version",
+ "fieldtype": "Link",
+ "label": "Version",
+ "options": "Frappe Version"
+ },
+ {
+ "fieldname": "apps_column",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "apps",
+ "fieldtype": "Table",
+ "label": "Apps",
+ "options": "Site Group Deploy App",
+ "reqd": 1
+ },
+ {
+ "fieldname": "cluster",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Cluster",
+ "options": "Cluster",
+ "reqd": 1
+ },
+ {
+ "default": "Pending",
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "label": "Status",
+ "options": "Pending\nDeploying Bench\nBench Deployed\nBench Deploy Failed\nCreating Site\nSite Created\nSite Creation Failed"
+ },
+ {
+ "fieldname": "column_break_smfr",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "bench",
+ "fieldtype": "Link",
+ "label": "Bench",
+ "options": "Bench"
+ },
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "label": "Team",
+ "options": "Team",
+ "reqd": 1
+ },
+ {
+ "fieldname": "site_plan",
+ "fieldtype": "Link",
+ "label": "Site Plan",
+ "link_filters": "[[\"Site Plan\",\"document_type\",\"=\",\"Site\"]]",
+ "options": "Site Plan",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fetch_from": "site_plan.private_bench_support",
+ "fieldname": "auto_provision_bench",
+ "fieldtype": "Check",
+ "label": "Auto Provision Bench",
+ "read_only": 1
+ },
+ {
+ "fieldname": "provider",
+ "fieldtype": "Link",
+ "hidden": 1,
+ "label": "Provider",
+ "options": "Cloud Provider"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2026-01-12 00:55:22.675800",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Site Group Deploy",
+ "naming_rule": "Random",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/site_group_deploy/site_group_deploy.py b/press/press/doctype/site_group_deploy/site_group_deploy.py
new file mode 100644
index 00000000000..228d15f2bae
--- /dev/null
+++ b/press/press/doctype/site_group_deploy/site_group_deploy.py
@@ -0,0 +1,194 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+import frappe
+from frappe.model.document import Document
+
+
+class SiteGroupDeploy(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.site_group_deploy_app.site_group_deploy_app import SiteGroupDeployApp
+
+ apps: DF.Table[SiteGroupDeployApp]
+ auto_provision_bench: DF.Check
+ bench: DF.Link | None
+ cluster: DF.Link
+ provider: DF.Link | None
+ release_group: DF.Link | None
+ site: DF.Link | None
+ site_plan: DF.Link | None
+ status: DF.Literal[
+ "Pending",
+ "Deploying Bench",
+ "Bench Deployed",
+ "Bench Deploy Failed",
+ "Creating Site",
+ "Site Created",
+ "Site Creation Failed",
+ ]
+ subdomain: DF.Data
+ team: DF.Link
+ version: DF.Link | None
+ # end: auto-generated types
+
+ dashboard_fields = ("status", "site", "release_group")
+
+ def before_insert(self):
+ self.set_latest_version()
+ self.check_if_rg_or_site_exists()
+
+ def after_insert(self):
+ if self.release_group:
+ return
+
+ group = self.create_release_group()
+
+ self.status = "Deploying Bench"
+ self.save()
+
+ group.initial_deploy()
+
+ def set_latest_version(self):
+ if self.version:
+ return
+
+ self.version = frappe.db.get_value("Frappe Version", {"status": "stable"}, order_by="number desc")
+
+ def check_if_rg_or_site_exists(self):
+ from press.press.doctype.site.site import Site
+
+ if frappe.db.exists("Release Group", {"title": self.subdomain}):
+ frappe.throw(f"Release Group with title {self.subdomain} already exists")
+
+ domain = frappe.db.get_single_value("Press Settings", "domain")
+ if Site.exists(self.subdomain, domain):
+ frappe.throw(f"Site with subdomain {self.subdomain} already exists")
+
+ def get_optimal_server_for_private_bench(self):
+ servers = frappe.get_all(
+ "Server",
+ filters={
+ "status": "Active",
+ "cluster": self.cluster,
+ "provider": self.provider,
+ "public": True,
+ },
+ fields=["name", "ram"],
+ )
+
+ if not servers:
+ return None
+
+ server_stats = []
+ for server in servers:
+ bench_count = frappe.db.count("Bench", {"server": server.name, "status": "Active"})
+ resource_ratio = server.ram / (bench_count + 1)
+ server_stats.append(
+ {
+ "name": server.name,
+ "resource_ratio": resource_ratio,
+ }
+ )
+
+ server_stats.sort(key=lambda x: -x["resource_ratio"])
+ return server_stats[0]["name"] if server_stats else None
+
+ def create_release_group(self):
+ from press.press.doctype.release_group.release_group import (
+ new_release_group,
+ )
+
+ apps = [{"app": app.app, "source": app.source} for app in self.apps]
+
+ server = ""
+ if self.auto_provision_bench and self.provider:
+ server = self.get_optimal_server_for_private_bench()
+
+ group = new_release_group(
+ title=self.subdomain,
+ version=self.version,
+ apps=apps,
+ team=self.team,
+ cluster=self.cluster,
+ server=server if server else None,
+ )
+
+ self.release_group = group.name
+ self.save()
+
+ return group
+
+ def create_site(self):
+ site_plan = self.site_plan
+ if not (site_plan and self.auto_provision_bench):
+ cheapest_private_bench_plan = frappe.db.get_value(
+ "Site Plan",
+ {
+ "private_benches": 1,
+ "document_type": "Site",
+ "price_inr": ["!=", 0],
+ "price_usd": ["!=", 0],
+ },
+ order_by="price_inr asc",
+ )
+ site_plan = cheapest_private_bench_plan
+
+ apps = [{"app": app.app} for app in self.apps]
+ app_plan_map = {app.app: {"name": app.plan} for app in self.apps if app.plan}
+
+ try:
+ site = frappe.get_doc(
+ {
+ "doctype": "Site",
+ "team": self.team,
+ "subdomain": self.subdomain,
+ "apps": apps,
+ "cluster": self.cluster,
+ "release_group": self.release_group,
+ "bench": self.bench,
+ "domain": frappe.db.get_single_value("Press Settings", "domain"),
+ "subscription_plan": site_plan,
+ "app_plans": app_plan_map,
+ }
+ ).insert()
+
+ self.site = site.name
+ self.status = "Creating Site"
+
+ except frappe.exceptions.ValidationError:
+ self.status = "Site Creation Failed"
+
+ self.save()
+
+ def update_site_group_deploy_on_deploy_failure(self, deploy):
+ if deploy and deploy.status == "Failure":
+ self.status = "Bench Deploy Failed"
+ self.save()
+
+ def update_site_group_deploy_on_process_job(self, job):
+ if job.job_type == "New Bench":
+ if job.status == "Success":
+ self.bench = job.bench
+ self.status = "Bench Deployed"
+ self.save()
+ self.create_site()
+
+ elif job.status == "Failure":
+ self.status = "Bench Deploy Failed"
+ self.save()
+
+ elif job.job_type == "New Site":
+ if job.status == "Success":
+ self.status = "Site Created"
+ self.save()
+ elif job.status == "Failure":
+ self.status = "Site Creation Failed"
+ self.save()
diff --git a/press/press/doctype/site_group_deploy/test_site_group_deploy.py b/press/press/doctype/site_group_deploy/test_site_group_deploy.py
new file mode 100644
index 00000000000..df5d753d74f
--- /dev/null
+++ b/press/press/doctype/site_group_deploy/test_site_group_deploy.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestSiteGroupDeploy(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/site_group_deploy_app/__init__.py b/press/press/doctype/site_group_deploy_app/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/site_group_deploy_app/site_group_deploy_app.json b/press/press/doctype/site_group_deploy_app/site_group_deploy_app.json
new file mode 100644
index 00000000000..e078aeece25
--- /dev/null
+++ b/press/press/doctype/site_group_deploy_app/site_group_deploy_app.json
@@ -0,0 +1,52 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-08-28 16:22:16.613931",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "app",
+ "source",
+ "title",
+ "plan"
+ ],
+ "fields": [
+ {
+ "fieldname": "app",
+ "fieldtype": "Link",
+ "label": "App",
+ "options": "App"
+ },
+ {
+ "fieldname": "source",
+ "fieldtype": "Link",
+ "label": "Source",
+ "options": "App Source"
+ },
+ {
+ "fetch_from": "app.title",
+ "fieldname": "title",
+ "fieldtype": "Data",
+ "label": "Title"
+ },
+ {
+ "fieldname": "plan",
+ "fieldtype": "Link",
+ "label": "Plan",
+ "options": "Marketplace App Plan"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2024-08-28 20:11:22.334225",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Site Group Deploy App",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/site_group_deploy_app/site_group_deploy_app.py b/press/press/doctype/site_group_deploy_app/site_group_deploy_app.py
new file mode 100644
index 00000000000..c8e70c72f99
--- /dev/null
+++ b/press/press/doctype/site_group_deploy_app/site_group_deploy_app.py
@@ -0,0 +1,26 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class SiteGroupDeployApp(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ app: DF.Link | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ plan: DF.Link | None
+ source: DF.Link | None
+ title: DF.Data | None
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/site_migration/site_migration.js b/press/press/doctype/site_migration/site_migration.js
index 4324ee35999..1cb63c7afeb 100644
--- a/press/press/doctype/site_migration/site_migration.js
+++ b/press/press/doctype/site_migration/site_migration.js
@@ -3,6 +3,13 @@
frappe.ui.form.on('Site Migration', {
refresh: function (frm) {
+ frm.set_query('site', () => {
+ return {
+ filters: {
+ status: 'Active',
+ },
+ };
+ });
frm.set_query('source_bench', () => {
return {
filters: {
@@ -17,13 +24,67 @@ frappe.ui.form.on('Site Migration', {
},
};
});
- if (['Scheduled', 'Failure'].includes(frm.doc.status)) {
+ if (frm.doc.status === 'Failure') {
frm.add_custom_button(__('Continue'), () => {
frappe.confirm(
- 'Are you sure you want to continue from next Pending step?',
- () => frm.call('run_next_step'),
+ `Are you sure you want to continue from next Pending step?
+
+ Note: This could cause data loss if you don't know what you're doing `,
+ () => frm.call('continue_from_next_pending'),
+ );
+ });
+ frm.add_custom_button(__('Set Skipped to Pending'), () => {
+ frappe.confirm(
+ `Are you sure you want to set all Skipped steps to Pending?
+
+ Note: This could cause data loss if you don't know what you're doing `,
+ () => {
+ frm.set_value(
+ 'steps',
+ frm.doc.steps.map((step) => {
+ if (step.status === 'Skipped') {
+ step.status = 'Pending';
+ }
+ return step;
+ }),
+ );
+ frm.save();
+ },
);
});
+ } else if (frm.doc.status === 'Scheduled') {
+ frm.add_custom_button(__('Start'), () => {
+ frappe.confirm(
+ `Are you sure you want to start the migration?
+
+ Note: This will start downtime `,
+ () => frm.call('start'),
+ );
+ });
+ } else if (frm.doc.status === 'Running') {
+ frm.add_custom_button(__('Cleanup and fail'), () => {
+ frappe.confirm(
+ `Are you sure you want to skip pending steps and fail the migration?
+
+ This will attempt to stop the migration and put everything back to the original state.
+
+ Note: This could cause data loss if you don't know what you're doing `,
+ () => frm.call('cleanup_and_fail'),
+ );
+ });
+ }
+ if (frm.is_new()) {
+ frm.dashboard.set_headline_alert(
+ __(
+ `Scheduled time not set. The migration will start immediately on save`,
+ ),
+ 'orange',
+ );
+ }
+ },
+ scheduled_time: function (frm) {
+ if (frm.doc.scheduled_time) {
+ frm.dashboard.clear_headline();
}
},
});
diff --git a/press/press/doctype/site_migration/site_migration.json b/press/press/doctype/site_migration/site_migration.json
index c7ea708e6c5..21bde8d6515 100644
--- a/press/press/doctype/site_migration/site_migration.json
+++ b/press/press/doctype/site_migration/site_migration.json
@@ -31,7 +31,8 @@
"in_standard_filter": 1,
"label": "Site",
"options": "Site",
- "reqd": 1
+ "reqd": 1,
+ "search_index": 1
},
{
"fetch_from": "site.bench",
@@ -43,7 +44,8 @@
"label": "Source Bench",
"options": "Bench",
"read_only": 1,
- "reqd": 1
+ "reqd": 1,
+ "set_only_once": 1
},
{
"fetch_from": "site.server",
@@ -152,9 +154,10 @@
"label": "Skip Failing Patches"
}
],
+ "grid_page_length": 50,
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2023-03-02 12:05:54.861434",
+ "modified": "2025-05-05 10:32:27.636592",
"modified_by": "Administrator",
"module": "Press",
"name": "Site Migration",
@@ -182,11 +185,32 @@
"role": "Site Manager",
"share": 1,
"write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1,
+ "write": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"title_field": "site",
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/site_migration/site_migration.py b/press/press/doctype/site_migration/site_migration.py
index 2060ae952a6..7f9c330ba3b 100644
--- a/press/press/doctype/site_migration/site_migration.py
+++ b/press/press/doctype/site_migration/site_migration.py
@@ -1,18 +1,47 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
+
+from datetime import timedelta
+from functools import cached_property
+from typing import TYPE_CHECKING
import frappe
from frappe.core.utils import find
from frappe.model.document import Document
from press.agent import Agent
-from press.press.doctype.site_backup.site_backup import process_backup_site_job_update
+from press.exceptions import (
+ ActiveDomainsForStandalone,
+ CannotChangePlan,
+ InactiveDomains,
+ InsufficientSpaceOnServer,
+ MissingAppsInBench,
+ OngoingAgentJob,
+ SiteAlreadyArchived,
+ SiteUnderMaintenance,
+)
+from press.press.doctype.press_notification.press_notification import (
+ create_new_notification,
+)
+from press.press.doctype.site.site import Site
+from press.press.doctype.site_backup.site_backup import (
+ SiteBackup,
+ process_backup_site_job_update,
+)
from press.utils import log_error
+if TYPE_CHECKING:
+ from frappe.types.DF import Link
+
+ from press.press.doctype.agent_job.agent_job import AgentJob
+ from press.press.doctype.cluster.cluster import Cluster
+ from press.press.doctype.server.server import Server
+ from press.press.doctype.site_domain.site_domain import SiteDomain
-def get_ongoing_migration(site: str, scheduled=False):
+
+def get_ongoing_migration(site: Link, scheduled=False):
"""
Return ongoing Site Migration for site.
@@ -21,28 +50,165 @@ def get_ongoing_migration(site: str, scheduled=False):
ongoing_statuses = ["Pending", "Running"]
if scheduled:
ongoing_statuses.append("Scheduled")
- return frappe.db.exists(
- "Site Migration", {"site": site, "status": ("in", ongoing_statuses)}
- )
+ return frappe.db.exists("Site Migration", {"site": site, "status": ("in", ongoing_statuses)})
class SiteMigration(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.site_migration_step.site_migration_step import SiteMigrationStep
+
+ backup: DF.Link | None
+ destination_bench: DF.Link
+ destination_cluster: DF.Link
+ destination_server: DF.Link
+ migration_type: DF.Literal["", "Bench", "Server", "Cluster"]
+ scheduled_time: DF.Datetime | None
+ site: DF.Link
+ skip_failing_patches: DF.Check
+ source_bench: DF.Link
+ source_cluster: DF.Link
+ source_server: DF.Link
+ status: DF.Literal["Scheduled", "Pending", "Running", "Success", "Failure"]
+ steps: DF.Table[SiteMigrationStep]
+ # end: auto-generated types
+
def before_insert(self):
if get_ongoing_migration(self.site, scheduled=True):
- frappe.throw("Ongoing/Scheduled Site Migration for that site exists.")
- self.check_for_existing_agent_jobs()
+ frappe.throw(f"Ongoing/Scheduled Site Migration for the site {frappe.bold(self.site)} exists.")
+ site: Site = frappe.get_doc("Site", self.site)
+ site.check_move_scheduled()
+
+ def check_for_existing_domains(self):
+ """
+ If destination server is standalone, custom domains need to be removed before moving the site and added afterwards.
+ """
+ is_standalone = frappe.db.get_value("Server", self.destination_server, "is_standalone")
+ if not is_standalone:
+ return
+ domains = frappe.get_all(
+ "Site Domain",
+ {"site": self.site, "domain": ("!=", self.site)},
+ pluck="domain",
+ )
+
+ if domains:
+ frappe.throw(
+ f"Destination server is standalone. Please remove custom domains: ({', '.join(domains)}) before moving the site. They can be added again after the migration is complete.",
+ ActiveDomainsForStandalone,
+ )
+
+ def validate_bench(self):
+ if frappe.db.get_value("Bench", self.destination_bench, "status", for_update=True) != "Active":
+ frappe.throw("Destination bench does not exist")
+
+ @cached_property
+ def last_backup(self) -> SiteBackup | None:
+ return Site("Site", self.site).last_backup
+
+ def check_enough_space_on_source_server(self):
+ # server needs to have enough space to create backup
+ try:
+ backup = self.last_backup
+ except frappe.DoesNotExistError:
+ pass
+ else:
+ site = Site("Site", self.site)
+ site.remote_database_file = backup.remote_database_file
+ site.remote_public_file = backup.remote_public_file
+ site.remote_private_file = backup.remote_private_file
+ site.check_space_on_server_for_backup()
+
+ def check_enough_space_on_destination_server(self):
+ backup = self.last_backup
+ if not backup:
+ return
+ site = Site("Site", self.site)
+ site.server = self.destination_server
+ site.remote_database_file = backup.remote_database_file
+ site.remote_public_file = backup.remote_public_file
+ site.remote_private_file = backup.remote_private_file
+ site.check_space_on_server_for_restore()
def after_insert(self):
self.set_migration_type()
self.add_steps()
self.save()
+ if not self.scheduled_time:
+ self.start()
+ else:
+ self.run_validations()
+
+ def validate_apps(self):
+ site_apps = [app.app for app in Site("Site", self.site).apps]
+ bench_apps = [app.app for app in frappe.get_doc("Bench", self.destination_bench).apps]
+
+ if diff := set(site_apps) - set(bench_apps):
+ frappe.throw(
+ f"Bench {self.destination_bench} doesn't have some of the apps installed on {self.site}: {', '.join(diff)}",
+ MissingAppsInBench,
+ )
+
+ def check_for_inactive_domains(self):
+ if domains := frappe.db.get_all(
+ "Site Domain", {"site": self.site, "status": ("!=", "Active")}, pluck="name"
+ ):
+ frappe.throw(
+ f"Inactive custom domains exist: {','.join(domains)}. Please remove or fix the same.",
+ InactiveDomains,
+ )
+
+ def run_validations(self):
+ self.validate_bench()
+ self.validate_apps()
+ self.check_for_inactive_domains()
+ self.check_for_existing_domains()
+ self.check_enough_space_on_destination_server()
+ @frappe.whitelist()
def start(self):
- self.db_set("status", "Pending")
- frappe.db.commit()
+ self.check_for_ongoing_agent_jobs() # has to be before setting state to pending so it gets retried
+ previous_status = self.status
+ self.status = "Pending"
+ self.save()
+ self.run_validations()
+ site = Site("Site", self.site)
+ try:
+ site.ready_for_move()
+ except SiteAlreadyArchived:
+ self.status = "Failure"
+ self.save()
+ return
+ except SiteUnderMaintenance:
+ # Just ignore the error for now
+ # It can be retried later once the site is out of maintenance
+ self.status = previous_status
+ self.save()
+ return
+
+ self.run_next_step()
+
+ @frappe.whitelist()
+ def continue_from_next_pending(self):
+ self.remove_archive_on_destination_step_if_exists()
self.run_next_step()
- def check_for_existing_agent_jobs(self):
+ def remove_archive_on_destination_step_if_exists(self):
+ """Remove Archive on Destination step if exists"""
+ archive_on_destination_step = find(
+ self.steps,
+ lambda x: x.method_name == self.archive_site_on_destination_server.__name__,
+ )
+ if archive_on_destination_step:
+ self.steps.remove(archive_on_destination_step)
+
+ def check_for_ongoing_agent_jobs(self):
if frappe.db.exists(
"Agent Job",
{
@@ -51,7 +217,7 @@ def check_for_existing_agent_jobs(self):
"creation": (">", frappe.utils.add_to_date(None, hours=-24)),
},
):
- frappe.throw("Ongoing Agent Job for site exists")
+ frappe.throw("Ongoing Agent Job for site exists", OngoingAgentJob)
def set_migration_type(self):
if self.source_cluster != self.destination_cluster:
@@ -69,13 +235,12 @@ def add_steps(self):
self.add_steps_for_domains()
elif self.migration_type == "Server":
source_db = frappe.db.get_value("Server", self.source_server, "database_server")
- destination_db = frappe.db.get_value(
- "Server", self.destination_server, "database_server"
- )
+ destination_db = frappe.db.get_value("Server", self.destination_server, "database_server")
if source_db == destination_db:
raise NotImplementedError
# TODO: switch order of steps here (archive before restore)
self.add_steps_for_server_migration()
+ self.add_steps_for_user_defined_domains()
else:
# TODO: Call site update for bench only migration with popup with link to site update job
raise NotImplementedError
@@ -90,6 +255,7 @@ def remove_domain_hosts_from_source(self):
for domain in domains:
site_domain = frappe.get_doc("Site Domain", domain)
agent.remove_host(site_domain)
+ agent.reload_nginx()
def _add_remove_host_from_source_proxy_step(self, domain: str):
step = {
@@ -100,6 +266,24 @@ def _add_remove_host_from_source_proxy_step(self, domain: str):
}
self.append("steps", step)
+ def _add_remove_user_defined_domain_from_source_proxy_step(self, domain: str):
+ step = {
+ "step_title": f"Remove user defined domain {domain} from source proxy",
+ "method_name": self.remove_user_defined_domain_from_source_proxy.__name__,
+ "status": "Pending",
+ "method_arg": domain,
+ }
+ self.append("steps", step)
+
+ def _add_restore_user_defined_domain_to_destination_proxy_step(self, domain: str):
+ step = {
+ "step_title": f"Restore user defined domain {domain} on destination proxy",
+ "method_name": self.restore_user_defined_domain_on_destination_proxy.__name__,
+ "status": "Pending",
+ "method_arg": domain,
+ }
+ self.append("steps", step)
+
def _add_add_host_to_destination_proxy_step(self, domain: str):
step = {
"step_title": f"Add host {domain} to destination proxy",
@@ -110,9 +294,17 @@ def _add_add_host_to_destination_proxy_step(self, domain: str):
self.append("steps", step)
def add_host_to_destination_proxy(self, domain):
- site_domain = frappe.get_doc("Site Domain", domain)
+ site_domain: SiteDomain = frappe.get_doc("Site Domain", domain)
proxy_server = frappe.db.get_value("Server", self.destination_server, "proxy_server")
agent = Agent(proxy_server, server_type="Proxy Server")
+
+ if site_domain.has_root_tls_certificate:
+ return agent.add_domain_to_upstream(
+ server=self.destination_server,
+ site=site_domain.site,
+ domain=site_domain.domain,
+ )
+
return agent.new_host(site_domain)
def remove_host_from_source_proxy(self, domain):
@@ -131,13 +323,13 @@ def _add_setup_redirects_step(self):
def setup_redirects(self):
"""Setup redirects of site in proxy"""
- site = frappe.get_doc("Site", self.site)
+ site = Site("Site", self.site)
ret = site._update_redirects_for_all_site_domains()
if ret:
# could be no jobs
return ret
self.update_next_step_status("Skipped")
- self.run_next_step()
+ return self.run_next_step()
def add_steps_for_domains(self):
domains = frappe.get_all("Site Domain", {"site": self.site}, pluck="name")
@@ -150,8 +342,17 @@ def add_steps_for_domains(self):
if len(domains) > 1:
self._add_setup_redirects_step()
+ def add_steps_for_user_defined_domains(self):
+ domains = frappe.get_all("Site Domain", {"site": self.site, "name": ["!=", self.site]}, pluck="name")
+ for domain in domains:
+ site_domain = frappe.get_doc("Site Domain", domain)
+ if site_domain.default or not site_domain.has_root_tls_certificate:
+ continue
+ self._add_remove_user_defined_domain_from_source_proxy_step(domain)
+ self._add_restore_user_defined_domain_to_destination_proxy_step(domain)
+
@property
- def next_step(self):
+ def next_step(self) -> SiteMigrationStep | None:
"""Get next step to execute or update."""
return find(self.steps, lambda step: step.status in ["Pending", "Running"])
@@ -175,16 +376,129 @@ def run_next_step(self):
self.save()
def update_next_step_status(self, status: str):
+ if not self.next_step:
+ raise ValueError("No next step to update status for")
self.next_step.status = status
self.save()
- def fail(self):
+ @property
+ def possibly_archived_site_on_source(self) -> bool:
+ return find(self.steps, lambda x: x.method_name == self.archive_site_on_source.__name__).status in [
+ "Success",
+ "Failure",
+ ]
+
+ def set_pending_steps_to_skipped(self):
+ for step in self.steps:
+ if step.status == "Pending":
+ step.status = "Skipped"
+ self.save()
+
+ @property
+ def restore_on_destination_happened(self) -> bool:
+ return find(
+ self.steps,
+ lambda x: x.method_name == self.restore_site_on_destination_server.__name__,
+ ).status in ["Success", "Failure"]
+
+ def cleanup_if_appropriate(self):
+ self.set_pending_steps_to_skipped()
+ if self.possibly_archived_site_on_source or not self.restore_on_destination_happened:
+ return False
+ self.append(
+ "steps",
+ {
+ "step_title": self.archive_site_on_destination_server.__doc__,
+ "method_name": self.archive_site_on_destination_server.__name__,
+ "status": "Pending",
+ },
+ )
+ self.run_next_step()
+ return True
+
+ @frappe.whitelist()
+ def cleanup_and_fail(self, *args, **kwargs):
+ if self.cleanup_if_appropriate():
+ return # callback will trigger fail
+ self.fail(*args, **kwargs)
+
+ def fail(self, reason: str | None = None, force_activate: bool = False):
self.status = "Failure"
self.save()
+ self.send_fail_notification(reason)
+ self.activate_site_if_appropriate(force=force_activate)
+
+ @property
+ def failed_step(self):
+ return find(self.steps, lambda x: x.status == "Failure")
+
+ def activate_site_if_appropriate(self, force=False):
+ site = Site("Site", self.site)
+ failed_step_method_name = (self.failed_step or {}).get("method_name", "__NOT_SET__")
+ if force or (
+ failed_step_method_name
+ in [
+ self.backup_source_site.__name__,
+ self.restore_site_on_destination_server.__name__,
+ self.restore_site_on_destination_proxy.__name__,
+ ]
+ and site.status_before_update != "Inactive"
+ ):
+ site.activate()
+ elif site.status_before_update == "Inactive":
+ site.db_set("status", "Inactive")
+ if self.is_standalone_migration:
+ site.create_dns_record()
+ if self.migration_type == "Cluster":
+ if site.cluster == frappe.db.get_value(
+ "Root Domain", site.domain, "default_cluster"
+ ): # reverse the DNS record creation
+ site.remove_dns_record(frappe.db.get_value("Server", self.destination_server, "proxy_server"))
+ else:
+ site.create_dns_record()
+
+ def send_fail_notification(self, reason: str | None = None):
+ from press.press.doctype.agent_job.agent_job_notifications import create_job_failed_notification
+
+ site = Site("Site", self.site)
+ message = f"Site Migration ({self.migration_type}) for site {site.host_name} failed"
+ if reason:
+ message += f" due to {reason}"
+ agent_job_id = None
+
+ create_new_notification(
+ site.team,
+ "Site Migrate",
+ "Agent Job",
+ agent_job_id,
+ message,
+ )
+ else:
+ agent_job_id = find(self.steps, lambda x: x.status == "Failure").get("step_job")
+
+ job = frappe.get_doc("Agent Job", agent_job_id)
+ create_job_failed_notification(job, site.team, "Site Migrate", "Site Migrate", message)
def succeed(self):
self.status = "Success"
self.save()
+ self.send_success_notification()
+
+ def send_success_notification(self):
+ site = Site("Site", self.site)
+
+ message = (
+ f"Site Migration ({self.migration_type}) for site {site.host_name} completed successfully"
+ )
+ agent_job_id = find(self.steps, lambda x: x.step_title == "Restore site on destination").step_job
+
+ create_new_notification(
+ site.team,
+ "Site Migrate",
+ "Agent Job",
+ agent_job_id,
+ message,
+ )
def add_steps_for_cluster_migration(self):
steps = [
@@ -228,6 +542,11 @@ def add_steps_for_cluster_migration(self):
"method_name": self.reset_site_status_on_destination.__name__,
"status": "Pending",
},
+ {
+ "step_title": self.adjust_plan_if_required.__doc__,
+ "method_name": self.adjust_plan_if_required.__name__,
+ "status": "Pending",
+ },
]
for step in steps:
self.append("steps", step)
@@ -274,15 +593,19 @@ def add_steps_for_server_migration(self):
"method_name": self.reset_site_status_on_destination.__name__,
"status": "Pending",
},
+ {
+ "step_title": self.adjust_plan_if_required.__doc__,
+ "method_name": self.adjust_plan_if_required.__name__,
+ "status": "Pending",
+ },
]
for step in steps:
self.append("steps", step)
def deactivate_site_on_source_server(self):
"""Deactivate site on source"""
- site = frappe.get_doc("Site", self.site)
- site.status_before_update = site.status
- site.status = "Inactive"
+ site: Site = frappe.get_doc("Site", self.site)
+ site.status = "Pending"
return site.update_site_config({"maintenance_mode": 1}) # saves doc
def deactivate_site_on_source_proxy(self):
@@ -294,18 +617,25 @@ def backup_source_site(self):
"""Backup site on source"""
site = frappe.get_doc("Site", self.site)
- backup = site.backup(with_files=True, offsite=True)
+ backup = site.backup(with_files=True, offsite=True, force=True)
backup.reload()
self.backup = backup.name
self.save()
return frappe.get_doc("Agent Job", backup.job)
+ def archive_site_on_destination_server(self):
+ """Archive site on destination (case of failure)"""
+ agent = Agent(self.destination_server)
+ site = frappe.get_doc("Site", self.site)
+ site.bench = self.destination_bench
+ return agent.archive_site(site, force=True)
+
def restore_site_on_destination_server(self):
"""Restore site on destination"""
agent = Agent(self.destination_server)
- site = frappe.get_doc("Site", self.site)
- backup = frappe.get_doc("Site Backup", self.backup)
+ site: Site = frappe.get_doc("Site", self.site)
+ backup: SiteBackup = frappe.get_doc("Site Backup", self.backup)
site.remote_database_file = backup.remote_database_file
site.remote_public_file = backup.remote_public_file
site.remote_private_file = backup.remote_private_file
@@ -314,31 +644,48 @@ def restore_site_on_destination_server(self):
site.cluster = self.destination_cluster
site.server = self.destination_server
if self.migration_type == "Cluster":
+ site.create_dns_record() # won't create for default cluster
+ if self.destination_cluster == frappe.db.get_value("Root Domain", site.domain, "default_cluster"):
+ source_proxy = str(frappe.db.get_value("Server", self.source_server, "proxy_server"))
+ site.remove_dns_record(source_proxy)
+ elif self.is_standalone_migration:
site.create_dns_record()
- domain = frappe.get_doc("Root Domain", site.domain)
- if self.destination_cluster == domain.default_cluster:
- source_proxy = frappe.db.get_value("Server", self.source_server, "proxy_server")
- site.remove_dns_record(domain, source_proxy, site.name)
- return agent.new_site_from_backup(
- site, skip_failing_patches=self.skip_failing_patches
- )
+ return agent.new_site_from_backup(site, skip_failing_patches=self.skip_failing_patches)
def restore_site_on_destination_proxy(self):
"""Restore site on destination proxy"""
proxy_server = frappe.db.get_value("Server", self.destination_server, "proxy_server")
agent = Agent(proxy_server, server_type="Proxy Server")
- return agent.new_upstream_site(self.destination_server, self.site)
+ return agent.new_upstream_file(server=self.destination_server, site=self.site)
+
+ def restore_user_defined_domain_on_destination_proxy(self, domain: str):
+ """Restore user defined domain on destination proxy for product trial sites"""
+
+ proxy_server = frappe.db.get_value("Server", self.destination_server, "proxy_server")
+ agent = Agent(proxy_server, server_type="Proxy Server")
+ site_domain: SiteDomain = frappe.get_doc("Site Domain", domain)
+
+ return agent.add_domain_to_upstream(
+ server=self.destination_server, site=site_domain.site, domain=domain
+ )
def remove_site_from_source_proxy(self):
"""Remove site from source proxy"""
proxy_server = frappe.db.get_value("Server", self.source_server, "proxy_server")
agent = Agent(proxy_server, server_type="Proxy Server")
- return agent.remove_upstream_site(self.source_server, self.site)
+ return agent.remove_upstream_file(server=self.source_server, site=self.site)
+
+ def remove_user_defined_domain_from_source_proxy(self, domain: str):
+ """Remove user defined domain from source proxy for product trial sites"""
+ proxy_server = frappe.db.get_value("Server", self.source_server, "proxy_server")
+ agent = Agent(proxy_server, server_type="Proxy Server")
+ return agent.remove_upstream_file(server=self.source_server, site=self.site, site_name=domain)
def archive_site_on_source(self):
"""Archive site on source"""
agent = Agent(self.source_server)
site = frappe.get_doc("Site", self.site)
+ site.bench = self.source_bench # for sanity
return agent.archive_site(site)
def update_site_record_fields(self):
@@ -350,46 +697,117 @@ def update_site_record_fields(self):
self.update_next_step_status("Success")
self.run_next_step()
+ @property
+ def is_standalone_migration(self) -> bool:
+ return frappe.db.get_value("Server", self.destination_server, "is_standalone") or frappe.db.get_value(
+ "Server", self.source_server, "is_standalone"
+ )
+
def reset_site_status_on_destination(self):
"""Reset site status on destination"""
- site = frappe.get_doc("Site", self.site)
+ site: Site = frappe.get_doc("Site", self.site)
if site.status_before_update in ["Inactive", "Suspended"]:
self.update_next_step_status("Skipped")
- self.run_next_step()
job = None
else:
- job = site.update_site_config({"maintenance_mode": 0})
+ job = site.update_site_config({"maintenance_mode": 0}) # will do run_next_step in callback
site.reload()
- site.status = site.status_before_update
+ site.status = site.status_before_update or "Active"
site.status_before_update = None
site.save()
- return job
+ if job:
+ return job
+ return self.run_next_step()
def activate_site_on_destination_proxy(self):
"""Activate site on destination proxy"""
site = frappe.get_doc("Site", self.site)
return site.update_site_status_on_proxy("activated")
+ @property
+ def scheduled_by_consultant(self):
+ return self.owner.endswith("@erpnext.com") or self.owner.endswith("@frappe.io")
+
+ def upgrade_plan(self, site: "Site", dest_server: Server):
+ if not dest_server.public and site.team == dest_server.team and not site.is_on_dedicated_plan:
+ return site.change_plan(
+ "Unlimited",
+ ignore_card_setup=self.scheduled_by_consultant,
+ )
+ return None
+
+ def downgrade_plan(self, site: "Site", dest_server: Server):
+ if dest_server.public and site.team != dest_server.team and site.is_on_dedicated_plan:
+ return site.change_plan(
+ "USD 100",
+ ignore_card_setup=self.scheduled_by_consultant,
+ )
+ return None
+
+ def adjust_plan_if_required(self):
+ """Update site plan from/to Unlimited"""
+ site: "Site" = frappe.get_doc("Site", self.site)
+ dest_server: Server = frappe.get_doc("Server", self.destination_server)
+ plan_change = None
+ try:
+ plan_change = self.upgrade_plan(site, dest_server) or self.downgrade_plan(site, dest_server)
+ except CannotChangePlan:
+ self.update_next_step_status("Failure")
+
+ if plan_change:
+ self.update_next_step_status("Success")
+ else:
+ self.update_next_step_status("Skipped")
+ self.run_next_step()
+
+ def is_cleanup_done(self, job: "AgentJob") -> bool:
+ return (job.job_type == "Archive Site" and job.bench == self.destination_bench) and (
+ job.status == "Success"
+ or (
+ job.status == "Failure"
+ and (
+ f"KeyError: '{self.site}'" in str(job.traceback)
+ or "BenchNotExistsException" in str(job.traceback)
+ )
+ ) # sometimes site may not even get created in destination to clean it up
+ )
+
def process_required_job_callbacks(job):
if job.job_type == "Backup Site":
process_backup_site_job_update(job)
+def job_matches_site_migration(job, site_migration_name: str):
+ site_migration = SiteMigration("Site Migration", site_migration_name)
+ next = site_migration.next_step
+ return job.name == next.step_job if next else False
+
+
def process_site_migration_job_update(job, site_migration_name: str):
- site_migration = frappe.get_doc("Site Migration", site_migration_name)
- if job.name == site_migration.next_step.step_job:
- process_required_job_callbacks(job)
- site_migration.update_next_step_status(job.status)
- if job.status == "Success":
- try:
- site_migration.run_next_step()
- except Exception:
- log_error("Site Migration Step Error")
- elif job.status == "Failure":
- site_migration.fail()
- else:
+ site_migration = SiteMigration("Site Migration", site_migration_name)
+ if not site_migration.next_step:
+ log_error("No next step found during Site Migration", doc=site_migration)
+ return
+ if job.name != site_migration.next_step.step_job:
log_error("Extra Job found during Site Migration", job=job.as_dict())
+ return
+
+ process_required_job_callbacks(job)
+ site_migration.update_next_step_status(job.status)
+
+ if site_migration.is_cleanup_done(job):
+ site_migration.fail()
+ return
+
+ if job.status == "Success":
+ try:
+ site_migration.run_next_step()
+ except Exception as e:
+ log_error("Site Migration Step Error", doc=site_migration)
+ site_migration.cleanup_and_fail(reason=str(e), force_activate=True)
+ elif job.status in ["Failure", "Delivery Failure"]:
+ site_migration.cleanup_and_fail()
def run_scheduled_migrations():
@@ -398,8 +816,25 @@ def run_scheduled_migrations():
{"scheduled_time": ("<=", frappe.utils.now()), "status": "Scheduled"},
)
for migration in migrations:
- site_migration = frappe.get_doc("Site Migration", migration)
- site_migration.start()
+ site_migration = SiteMigration("Site Migration", migration)
+ try:
+ site_migration.start()
+ except OngoingAgentJob as e:
+ if not site_migration.scheduled_time:
+ return
+ if frappe.utils.now() > site_migration.scheduled_time + timedelta(
+ hours=4
+ ): # don't trigger more than 4 hours later scheduled time
+ site_migration.cleanup_and_fail(reason=str(e))
+ except (
+ MissingAppsInBench,
+ InsufficientSpaceOnServer,
+ InactiveDomains,
+ ActiveDomainsForStandalone,
+ ) as e:
+ site_migration.cleanup_and_fail(reason=str(e), force_activate=True)
+ except Exception as e:
+ log_error("Site Migration Start Error", exception=e)
def on_doctype_update():
diff --git a/press/press/doctype/site_migration/test_site_migration.py b/press/press/doctype/site_migration/test_site_migration.py
index 41f9dfc1910..3d1c15523fb 100644
--- a/press/press/doctype/site_migration/test_site_migration.py
+++ b/press/press/doctype/site_migration/test_site_migration.py
@@ -1,11 +1,245 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2021, Frappe and Contributors
# See license.txt
+from unittest.mock import patch
-# import frappe
-import unittest
+import frappe
+from frappe.core.utils import find
+from frappe.tests.utils import FrappeTestCase
+from press.press.doctype.agent_job.agent_job import poll_pending_jobs
+from press.press.doctype.agent_job.test_agent_job import fake_agent_job
+from press.press.doctype.app.test_app import create_test_app
+from press.press.doctype.release_group.test_release_group import (
+ create_test_release_group,
+)
+from press.press.doctype.remote_file.remote_file import RemoteFile
+from press.press.doctype.site.site import Site
+from press.press.doctype.site.test_site import create_test_bench, create_test_site
+from press.press.doctype.site_migration.site_migration import (
+ SiteMigration,
+ run_scheduled_migrations,
+)
-class TestSiteMigration(unittest.TestCase):
- pass
+BACKUP_JOB_RES = {
+ "backups": {
+ "database": {
+ "file": "a.sql.gz",
+ "path": "/home/frappe/a.sql.gz",
+ "size": 1674818,
+ "url": "https://a.com/a.sql.gz",
+ },
+ "public": {
+ "file": "b.tar",
+ "path": "/home/frappe/b.tar",
+ "size": 1674818,
+ "url": "https://a.com/b.tar",
+ },
+ "private": {
+ "file": "a.tar",
+ "path": "/home/frappe/a.tar",
+ "size": 1674818,
+ "url": "https://a.com/a.tar",
+ },
+ "site_config": {
+ "file": "a.json",
+ "path": "/home/frappe/a.json",
+ "size": 595,
+ "url": "https://a.com/json",
+ },
+ },
+ "offsite": {
+ "a.sql.gz": "bucket.frappe.cloud/2023-10-10/a.sql.gz",
+ "a.tar": "bucket.frappe.cloud/2023-10-10/a.tar",
+ "b.tar": "bucket.frappe.cloud/2023-10-10/b.tar",
+ "a.json": "bucket.frappe.cloud/2023-10-10/a.json",
+ },
+}
+
+
+@patch.object(RemoteFile, "download_link", new="http://test.com")
+class TestSiteMigration(FrappeTestCase):
+ def tearDown(self):
+ frappe.db.rollback()
+
+ def test_in_cluster_site_migration_goes_through_all_steps_and_updates_site(self):
+ with patch.object(Site, "after_insert"), patch.object(Site, "on_update"):
+ """Patching these methods as its creating issue with duplicate agent job check"""
+ site = create_test_site()
+
+ bench = create_test_bench()
+
+ with (
+ fake_agent_job("Update Site Configuration", "Success"),
+ fake_agent_job(
+ "Backup Site",
+ data=BACKUP_JOB_RES,
+ ),
+ fake_agent_job("New Site from Backup"),
+ fake_agent_job("Archive Site"),
+ fake_agent_job("Remove Site from Upstream"),
+ fake_agent_job("Add Site to Upstream"),
+ fake_agent_job("Update Site Configuration"),
+ ):
+ site_migration: SiteMigration = frappe.get_doc(
+ {
+ "doctype": "Site Migration",
+ "site": site.name,
+ "destination_bench": bench.name,
+ }
+ ).insert()
+ poll_pending_jobs()
+ poll_pending_jobs()
+ poll_pending_jobs()
+ site_migration.reload()
+ self.assertEqual(site_migration.status, "Running")
+
+ poll_pending_jobs()
+ poll_pending_jobs()
+ poll_pending_jobs()
+ site_migration.reload()
+ poll_pending_jobs()
+ site_migration.reload()
+ self.assertEqual(site_migration.status, "Success")
+ site.reload()
+ self.assertEqual(site.status, "Active")
+ self.assertEqual(site.bench, bench.name)
+ self.assertEqual(site.server, bench.server)
+
+ def test_site_is_activated_on_failure_when_possible(self):
+ with patch.object(Site, "after_insert"), patch.object(Site, "on_update"):
+ """Patching these methods as its creating issue with duplicate agent job check"""
+ site = create_test_site()
+ bench = create_test_bench()
+
+ with (
+ fake_agent_job("Update Site Configuration"),
+ fake_agent_job(
+ "Backup Site",
+ data=BACKUP_JOB_RES,
+ ),
+ fake_agent_job("New Site from Backup", "Failure"),
+ fake_agent_job("Archive Site"),
+ ):
+ site_migration: SiteMigration = frappe.get_doc(
+ {
+ "doctype": "Site Migration",
+ "site": site.name,
+ "destination_bench": bench.name,
+ }
+ ).insert()
+ poll_pending_jobs()
+ poll_pending_jobs()
+ poll_pending_jobs()
+ poll_pending_jobs()
+ site_migration.reload()
+ self.assertEqual(site_migration.status, "Failure")
+ site.reload()
+ self.assertEqual(site.status, "Active")
+
+ def test_site_archived_on_destination_on_failure(self):
+ site = create_test_site()
+ bench = create_test_bench()
+
+ with (
+ fake_agent_job("Update Site Configuration"),
+ fake_agent_job(
+ "Backup Site",
+ data=BACKUP_JOB_RES,
+ ),
+ fake_agent_job("New Site from Backup", "Failure"),
+ fake_agent_job(
+ "Archive Site",
+ ),
+ fake_agent_job("Update Site Configuration"),
+ ):
+ site_migration: SiteMigration = frappe.get_doc(
+ {
+ "doctype": "Site Migration",
+ "site": site.name,
+ "destination_bench": bench.name,
+ }
+ ).insert()
+ poll_pending_jobs()
+ poll_pending_jobs()
+ poll_pending_jobs()
+ self.assertEqual(site_migration.status, "Running")
+ poll_pending_jobs() # restore on destination
+ site_migration.reload()
+ self.assertEqual(site_migration.status, "Failure")
+ archive_job_count = frappe.db.count(
+ "Agent Job", {"job_type": "Archive Site", "site": site.name, "server": bench.server}
+ )
+ self.assertEqual(archive_job_count, 1)
+
+ def test_site_not_archived_on_destination_on_failure_if_site_archived_on_source(self):
+ site = create_test_site()
+ bench = create_test_bench()
+
+ with (
+ fake_agent_job("Update Site Configuration"),
+ fake_agent_job(
+ "Backup Site",
+ data=BACKUP_JOB_RES,
+ ),
+ fake_agent_job("New Site from Backup"),
+ fake_agent_job(
+ "Archive Site", # both archives
+ ),
+ fake_agent_job("Remove Site from Upstream"),
+ fake_agent_job("Add Site to Upstream", "Failure"),
+ ):
+ site_migration: SiteMigration = frappe.get_doc(
+ {
+ "doctype": "Site Migration",
+ "site": site.name,
+ "destination_bench": bench.name,
+ }
+ ).insert()
+ poll_pending_jobs()
+ poll_pending_jobs()
+ poll_pending_jobs()
+ poll_pending_jobs() # restore on destination
+ poll_pending_jobs() # archive on source
+ poll_pending_jobs() # remove from source proxy
+ poll_pending_jobs() # restore on dest proxy
+
+ site_migration.reload()
+ self.assertEqual(site_migration.status, "Failure")
+ self.assertEqual(
+ find(
+ site_migration.steps,
+ lambda x: x.method_name == SiteMigration.restore_site_on_destination_proxy.__name__,
+ ).status,
+ "Failure",
+ ) # step after archive site on source passed
+ self.assertFalse(
+ frappe.db.exists(
+ "Agent Job", {"job_type": "Archive Site", "site": site.name, "server": bench.server}
+ ),
+ )
+
+ def test_missing_apps_in_bench_cause_site_migration_to_fail(self):
+ app1 = create_test_app("frappe")
+ app2 = create_test_app("erpnext")
+
+ group = create_test_release_group([app1, app2])
+ bench = create_test_bench(group=group)
+ site = create_test_site(bench=bench.name, apps=[app1.name])
+
+ dest_bench = create_test_bench()
+ site_migration: SiteMigration = frappe.get_doc(
+ {
+ "doctype": "Site Migration",
+ "site": site.name,
+ "destination_bench": dest_bench.name,
+ "scheduled_time": frappe.utils.now_datetime(),
+ }
+ ).insert()
+
+ site.append("apps", {"app": app2.name})
+ site.save()
+
+ run_scheduled_migrations()
+ site_migration.reload()
+ self.assertEqual(site_migration.status, "Failure")
diff --git a/press/press/doctype/site_migration_step/site_migration_step.json b/press/press/doctype/site_migration_step/site_migration_step.json
index df0a738db7f..8d65c29fee8 100644
--- a/press/press/doctype/site_migration_step/site_migration_step.json
+++ b/press/press/doctype/site_migration_step/site_migration_step.json
@@ -18,7 +18,7 @@
"fieldtype": "Select",
"in_list_view": 1,
"label": "Status",
- "options": "Pending\nRunning\nSuccess\nFailure\nSkipped",
+ "options": "Pending\nRunning\nSuccess\nFailure\nSkipped\nDelivery Failure",
"reqd": 1
},
{
@@ -50,7 +50,7 @@
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
- "modified": "2021-05-11 15:32:38.205462",
+ "modified": "2024-01-12 16:30:57.497115",
"modified_by": "Administrator",
"module": "Press",
"name": "Site Migration Step",
@@ -58,5 +58,6 @@
"permissions": [],
"sort_field": "modified",
"sort_order": "DESC",
+ "states": [],
"track_changes": 1
}
\ No newline at end of file
diff --git a/press/press/doctype/site_migration_step/site_migration_step.py b/press/press/doctype/site_migration_step/site_migration_step.py
index ecf758f77ee..bb1c1ccb994 100644
--- a/press/press/doctype/site_migration_step/site_migration_step.py
+++ b/press/press/doctype/site_migration_step/site_migration_step.py
@@ -8,4 +8,24 @@
class SiteMigrationStep(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ method_arg: DF.Data | None
+ method_name: DF.Data
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ status: DF.Literal[
+ "Pending", "Running", "Success", "Failure", "Skipped", "Delivery Failure"
+ ]
+ step_job: DF.Link | None
+ step_title: DF.Data
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/site_migration_step/test_site_migration_step.py b/press/press/doctype/site_migration_step/test_site_migration_step.py
index 1b13a2bb374..e4ef003d1cc 100644
--- a/press/press/doctype/site_migration_step/test_site_migration_step.py
+++ b/press/press/doctype/site_migration_step/test_site_migration_step.py
@@ -1,11 +1,10 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2021, Frappe and Contributors
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestSiteMigrationStep(unittest.TestCase):
+class TestSiteMigrationStep(FrappeTestCase):
pass
diff --git a/press/press/doctype/site_partner_lead/__init__.py b/press/press/doctype/site_partner_lead/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/site_partner_lead/site_partner_lead.js b/press/press/doctype/site_partner_lead/site_partner_lead.js
new file mode 100644
index 00000000000..dadf2a4524f
--- /dev/null
+++ b/press/press/doctype/site_partner_lead/site_partner_lead.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Site Partner Lead", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/site_partner_lead/site_partner_lead.json b/press/press/doctype/site_partner_lead/site_partner_lead.json
new file mode 100644
index 00000000000..928c0deb4b9
--- /dev/null
+++ b/press/press/doctype/site_partner_lead/site_partner_lead.json
@@ -0,0 +1,130 @@
+{
+ "actions": [],
+ "allow_import": 1,
+ "allow_rename": 1,
+ "creation": "2025-09-20 12:47:50.193116",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "team",
+ "site",
+ "frappe_lead",
+ "created_on",
+ "site_details_section",
+ "company",
+ "currency",
+ "country",
+ "users",
+ "column_break_wmtd",
+ "first_name",
+ "last_name",
+ "email",
+ "domain"
+ ],
+ "fields": [
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "label": "Team",
+ "options": "Team",
+ "reqd": 1
+ },
+ {
+ "fieldname": "site",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Site",
+ "options": "Site"
+ },
+ {
+ "fieldname": "frappe_lead",
+ "fieldtype": "Data",
+ "label": "Frappe Lead"
+ },
+ {
+ "fieldname": "site_details_section",
+ "fieldtype": "Section Break",
+ "label": "Site Details"
+ },
+ {
+ "fieldname": "company",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Company"
+ },
+ {
+ "fieldname": "currency",
+ "fieldtype": "Link",
+ "label": "Currency",
+ "options": "Currency"
+ },
+ {
+ "fieldname": "country",
+ "fieldtype": "Link",
+ "label": "Country",
+ "options": "Country"
+ },
+ {
+ "fieldname": "users",
+ "fieldtype": "Int",
+ "in_list_view": 1,
+ "label": "Users"
+ },
+ {
+ "fieldname": "column_break_wmtd",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "first_name",
+ "fieldtype": "Data",
+ "label": "First Name"
+ },
+ {
+ "fieldname": "last_name",
+ "fieldtype": "Data",
+ "label": "Last Name"
+ },
+ {
+ "fieldname": "email",
+ "fieldtype": "Data",
+ "label": "Email"
+ },
+ {
+ "fieldname": "domain",
+ "fieldtype": "Data",
+ "label": "Domain"
+ },
+ {
+ "fieldname": "created_on",
+ "fieldtype": "Date",
+ "label": "Created On"
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-09-20 13:15:46.626864",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Site Partner Lead",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/site_partner_lead/site_partner_lead.py b/press/press/doctype/site_partner_lead/site_partner_lead.py
new file mode 100644
index 00000000000..36ee3a5ad53
--- /dev/null
+++ b/press/press/doctype/site_partner_lead/site_partner_lead.py
@@ -0,0 +1,31 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class SitePartnerLead(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ company: DF.Data | None
+ country: DF.Link | None
+ created_on: DF.Date | None
+ currency: DF.Link | None
+ domain: DF.Data | None
+ email: DF.Data | None
+ first_name: DF.Data | None
+ frappe_lead: DF.Data | None
+ last_name: DF.Data | None
+ site: DF.Link | None
+ team: DF.Link
+ users: DF.Int
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/site_partner_lead/test_site_partner_lead.py b/press/press/doctype/site_partner_lead/test_site_partner_lead.py
new file mode 100644
index 00000000000..b7a297bf053
--- /dev/null
+++ b/press/press/doctype/site_partner_lead/test_site_partner_lead.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests import IntegrationTestCase
+
+# On IntegrationTestCase, the doctype test records and all
+# link-field test record dependencies are recursively loaded
+# Use these module variables to add/remove to/from that list
+EXTRA_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+IGNORE_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+
+
+class IntegrationTestSitePartnerLead(IntegrationTestCase):
+ """
+ Integration tests for SitePartnerLead.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/site_plan/__init__.py b/press/press/doctype/site_plan/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/site_plan/plan.py b/press/press/doctype/site_plan/plan.py
new file mode 100644
index 00000000000..dbda41003f3
--- /dev/null
+++ b/press/press/doctype/site_plan/plan.py
@@ -0,0 +1,59 @@
+import frappe
+from frappe.model.document import Document
+from frappe.utils import rounded
+
+from press.utils import group_children_in_result
+
+
+class Plan(Document):
+ def get_price_for_interval(self, interval, currency):
+ price_per_day = self.get_price_per_day(currency)
+
+ if interval == "Daily":
+ return price_per_day
+
+ if interval == "Monthly":
+ return rounded(price_per_day * 30)
+
+ return None
+
+ def get_price_per_day(self, currency):
+ price = self.price_inr if currency == "INR" else self.price_usd
+ return rounded(price / self.period, 2)
+
+ @property
+ def period(self):
+ return frappe.utils.get_last_day(None).day
+
+ @classmethod
+ def get_plans(cls, doctype, fields=None, filters=None):
+ or_filters = None
+ filters = filters or {}
+ if not fields:
+ fields = ["*"]
+
+ # Should either be enabled or a legacy plan
+ # In case a platform is not passed in we want more control and only want to show
+ # enabled plans in the region, in other cases we can show legacy plan
+ if doctype != "Server Plan" or not filters.get("platform"):
+ filters.update({"enabled": True})
+ else:
+ or_filters = {"enabled": True, "legacy_plan": True}
+
+ fields.append("`tabHas Role`.role")
+ plans = frappe.get_all(
+ doctype, filters=filters, fields=fields, order_by="price_usd asc", or_filters=or_filters
+ )
+ return filter_by_roles(plans)
+
+
+def filter_by_roles(plans):
+ plans = group_children_in_result(plans, {"role": "roles"})
+
+ out = []
+ for plan in plans:
+ if frappe.utils.has_common(plan["roles"], frappe.get_roles()):
+ plan.pop("roles", "")
+ out.append(plan)
+
+ return out
diff --git a/press/press/doctype/site_plan/site_plan.js b/press/press/doctype/site_plan/site_plan.js
new file mode 100644
index 00000000000..66421f7fdfb
--- /dev/null
+++ b/press/press/doctype/site_plan/site_plan.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Site Plan", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/site_plan/site_plan.json b/press/press/doctype/site_plan/site_plan.json
new file mode 100644
index 00000000000..ca679c40c55
--- /dev/null
+++ b/press/press/doctype/site_plan/site_plan.json
@@ -0,0 +1,318 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "autoname": "Prompt",
+ "creation": "2022-01-28 20:07:37.055861",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "enabled",
+ "section_break_2",
+ "document_type",
+ "plan_title",
+ "interval",
+ "column_break_5",
+ "price_inr",
+ "price_usd",
+ "allow_downgrading_from_other_plan",
+ "legacy_plan",
+ "features_section",
+ "cpu_time_per_day",
+ "max_database_usage",
+ "max_storage_usage",
+ "column_break_13",
+ "is_trial_plan",
+ "offsite_backups",
+ "private_benches",
+ "database_access",
+ "monitor_access",
+ "support_included",
+ "dedicated_server_plan",
+ "is_frappe_plan",
+ "private_bench_support",
+ "allowed_providers_section",
+ "cloud_providers",
+ "customization_for_bench_section",
+ "release_groups",
+ "allowed_apps",
+ "servers",
+ "cluster",
+ "instance_type",
+ "column_break_21",
+ "vcpu",
+ "memory",
+ "disk",
+ "roles_section",
+ "roles"
+ ],
+ "fields": [
+ {
+ "default": "1",
+ "fieldname": "enabled",
+ "fieldtype": "Check",
+ "label": "Enabled"
+ },
+ {
+ "fieldname": "section_break_2",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "document_type",
+ "fieldtype": "Link",
+ "label": "Document Type",
+ "options": "DocType",
+ "reqd": 1
+ },
+ {
+ "fieldname": "plan_title",
+ "fieldtype": "Data",
+ "label": "Plan Title"
+ },
+ {
+ "fieldname": "interval",
+ "fieldtype": "Select",
+ "label": "Interval",
+ "options": "Daily\nMonthly\nAnnually"
+ },
+ {
+ "fieldname": "column_break_5",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "price_inr",
+ "fieldtype": "Currency",
+ "in_list_view": 1,
+ "label": "Price (INR)",
+ "options": "INR",
+ "reqd": 1
+ },
+ {
+ "fieldname": "price_usd",
+ "fieldtype": "Currency",
+ "in_list_view": 1,
+ "label": "Price (USD)",
+ "options": "USD",
+ "reqd": 1
+ },
+ {
+ "depends_on": "eval:doc.document_type == 'Site'",
+ "fieldname": "features_section",
+ "fieldtype": "Section Break",
+ "label": "Site Features"
+ },
+ {
+ "fieldname": "cpu_time_per_day",
+ "fieldtype": "Float",
+ "in_list_view": 1,
+ "label": "CPU Time Per Day"
+ },
+ {
+ "fieldname": "max_database_usage",
+ "fieldtype": "Int",
+ "label": "Max Database Usage (MiB)"
+ },
+ {
+ "fieldname": "max_storage_usage",
+ "fieldtype": "Int",
+ "label": "Max Storage Usage (MiB)"
+ },
+ {
+ "fieldname": "column_break_13",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_trial_plan",
+ "fieldtype": "Check",
+ "label": "Is Trial Plan"
+ },
+ {
+ "default": "0",
+ "fieldname": "offsite_backups",
+ "fieldtype": "Check",
+ "in_list_view": 1,
+ "label": "Offsite Backups"
+ },
+ {
+ "default": "0",
+ "fieldname": "private_benches",
+ "fieldtype": "Check",
+ "label": "Private Benches"
+ },
+ {
+ "default": "0",
+ "fieldname": "database_access",
+ "fieldtype": "Check",
+ "label": "Database Access"
+ },
+ {
+ "default": "0",
+ "fieldname": "monitor_access",
+ "fieldtype": "Check",
+ "label": "Monitor Access"
+ },
+ {
+ "default": "0",
+ "fieldname": "support_included",
+ "fieldtype": "Check",
+ "label": "Support Included"
+ },
+ {
+ "default": "0",
+ "fieldname": "dedicated_server_plan",
+ "fieldtype": "Check",
+ "label": "Dedicated Server Plan"
+ },
+ {
+ "default": "0",
+ "description": "Enterprise, Central, Frappe Team etc",
+ "fieldname": "is_frappe_plan",
+ "fieldtype": "Check",
+ "label": "Is Frappe Plan"
+ },
+ {
+ "fieldname": "servers",
+ "fieldtype": "Section Break",
+ "label": "Servers"
+ },
+ {
+ "fieldname": "cluster",
+ "fieldtype": "Link",
+ "label": "Cluster",
+ "options": "Cluster"
+ },
+ {
+ "fieldname": "instance_type",
+ "fieldtype": "Data",
+ "label": "Instance Type"
+ },
+ {
+ "fieldname": "column_break_21",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "vcpu",
+ "fieldtype": "Int",
+ "label": "vCPU"
+ },
+ {
+ "fieldname": "memory",
+ "fieldtype": "Int",
+ "label": "Memory"
+ },
+ {
+ "fieldname": "disk",
+ "fieldtype": "Int",
+ "label": "Disk"
+ },
+ {
+ "fieldname": "roles_section",
+ "fieldtype": "Section Break",
+ "label": "Roles"
+ },
+ {
+ "fieldname": "roles",
+ "fieldtype": "Table",
+ "label": "Roles",
+ "options": "Has Role"
+ },
+ {
+ "description": "Leave this table empty to schedule deployment on any release group",
+ "fieldname": "release_groups",
+ "fieldtype": "Table",
+ "label": "Release Groups",
+ "options": "Site Plan Release Group"
+ },
+ {
+ "fieldname": "customization_for_bench_section",
+ "fieldtype": "Section Break",
+ "label": "Customization For Bench"
+ },
+ {
+ "description": "Leave this table empty to allow any app to install for the site",
+ "fieldname": "allowed_apps",
+ "fieldtype": "Table",
+ "label": "Allowed Apps",
+ "options": "Site Plan Allowed App"
+ },
+ {
+ "default": "1",
+ "fieldname": "allow_downgrading_from_other_plan",
+ "fieldtype": "Check",
+ "label": "Allow Downgrading From Other Plan"
+ },
+ {
+ "default": "0",
+ "fieldname": "legacy_plan",
+ "fieldtype": "Check",
+ "label": "Legacy Plan"
+ },
+ {
+ "fieldname": "allowed_providers_section",
+ "fieldtype": "Section Break",
+ "label": "Allowed Providers"
+ },
+ {
+ "fieldname": "cloud_providers",
+ "fieldtype": "Table",
+ "label": "Cloud Providers",
+ "options": "Cloud Providers"
+ },
+ {
+ "default": "0",
+ "description": "Create Site On A Newly Deployed Bench",
+ "fieldname": "private_bench_support",
+ "fieldtype": "Check",
+ "label": "Private Bench Support"
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2026-01-11 20:29:59.240087",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Site Plan",
+ "naming_rule": "Set by user",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1
+ },
+ {
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1
+ }
+ ],
+ "quick_entry": 1,
+ "row_format": "Dynamic",
+ "sort_field": "price_usd",
+ "sort_order": "ASC",
+ "states": [],
+ "title_field": "plan_title",
+ "track_changes": 1
+}
\ No newline at end of file
diff --git a/press/press/doctype/site_plan/site_plan.py b/press/press/doctype/site_plan/site_plan.py
new file mode 100644
index 00000000000..658903d7274
--- /dev/null
+++ b/press/press/doctype/site_plan/site_plan.py
@@ -0,0 +1,111 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+import frappe
+
+from press.press.doctype.site_plan.plan import Plan
+
+UNLIMITED_PLANS = ["Unlimited", "Unlimited - Supported"]
+
+
+class SitePlan(Plan):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.core.doctype.has_role.has_role import HasRole
+ from frappe.types import DF
+
+ from press.press.doctype.site_plan_allowed_app.site_plan_allowed_app import SitePlanAllowedApp
+ from press.press.doctype.site_plan_release_group.site_plan_release_group import SitePlanReleaseGroup
+
+ allow_downgrading_from_other_plan: DF.Check
+ allowed_apps: DF.Table[SitePlanAllowedApp]
+ cloud_providers: DF.Table[CloudProviders]
+ cluster: DF.Link | None
+ cpu_time_per_day: DF.Float
+ database_access: DF.Check
+ dedicated_server_plan: DF.Check
+ disk: DF.Int
+ document_type: DF.Link
+ enabled: DF.Check
+ instance_type: DF.Data | None
+ interval: DF.Literal["Daily", "Monthly", "Annually"]
+ is_frappe_plan: DF.Check
+ is_trial_plan: DF.Check
+ legacy_plan: DF.Check
+ max_database_usage: DF.Int
+ max_storage_usage: DF.Int
+ memory: DF.Int
+ monitor_access: DF.Check
+ offsite_backups: DF.Check
+ plan_title: DF.Data | None
+ price_inr: DF.Currency
+ price_usd: DF.Currency
+ private_bench_support: DF.Check
+ private_benches: DF.Check
+ release_groups: DF.Table[SitePlanReleaseGroup]
+ roles: DF.Table[HasRole]
+ support_included: DF.Check
+ vcpu: DF.Int
+ # end: auto-generated types
+
+ dashboard_fields = (
+ "name",
+ "plan_title",
+ "document_type",
+ "document_name",
+ "price_inr",
+ "price_usd",
+ "period",
+ "cpu_time_per_day",
+ "max_database_usage",
+ "max_storage_usage",
+ "database_access",
+ "support_included",
+ "private_benches",
+ "monitor_access",
+ "is_trial_plan",
+ )
+
+ def get_doc(self, doc):
+ doc["price_per_day_inr"] = self.get_price_per_day("INR")
+ doc["price_per_day_usd"] = self.get_price_per_day("USD")
+ return doc
+
+ @classmethod
+ def get_ones_without_offsite_backups(cls) -> list[str]:
+ return frappe.get_all("Site Plan", filters={"offsite_backups": False}, pluck="name")
+
+ def validate(self):
+ self.validate_active_subscriptions()
+
+ def validate_active_subscriptions(self):
+ old_doc = self.get_doc_before_save()
+ if old_doc and old_doc.enabled and not self.enabled and not self.legacy_plan:
+ active_sub_count = frappe.db.count("Subscription", {"enabled": 1, "plan": self.name})
+ if active_sub_count > 0:
+ frappe.throw(
+ f"Cannot disable this plan. This plan is used in {active_sub_count} active subscription(s)."
+ )
+
+
+def get_plan_config(name):
+ limits = frappe.db.get_value(
+ "Site Plan",
+ name,
+ ["cpu_time_per_day", "max_database_usage", "max_storage_usage"],
+ as_dict=True,
+ )
+ if limits and limits.get("cpu_time_per_day", 0) > 0:
+ return {
+ "rate_limit": {"limit": limits.cpu_time_per_day * 3600, "window": 86400},
+ "plan_limit": {
+ "max_database_usage": limits.max_database_usage,
+ "max_storage_usage": limits.max_storage_usage,
+ },
+ }
+ return {}
diff --git a/press/press/doctype/site_plan/test_site_plan.py b/press/press/doctype/site_plan/test_site_plan.py
new file mode 100644
index 00000000000..bd61972278b
--- /dev/null
+++ b/press/press/doctype/site_plan/test_site_plan.py
@@ -0,0 +1,94 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+from __future__ import annotations
+
+from datetime import date
+from unittest.mock import patch
+
+import frappe
+from frappe.model.naming import make_autoname
+from frappe.tests.utils import FrappeTestCase
+
+
+def create_test_plan(
+ document_type: str,
+ price_usd: float = 10.0,
+ price_inr: float = 750.0,
+ cpu_time: int = 1,
+ max_database_usage: int = 1000,
+ max_storage_usage: int = 1000,
+ plan_title: str | None = None,
+ plan_name: str | None = None,
+ allow_downgrading_from_other_plan: bool = True,
+ allowed_apps: list[str] | None = None,
+ release_groups: list[str] | None = None,
+ private_benches: bool = False,
+ is_trial_plan: bool = False,
+):
+ """Create test Plan doc."""
+ plan_name = plan_name or f"Test {document_type} plan {make_autoname('.#')}"
+ plan_title = plan_name
+ plan = frappe.get_doc(
+ {
+ "doctype": "Site Plan",
+ "document_type": "Site",
+ "name": plan_name,
+ "plan_title": plan_title,
+ "price_inr": price_inr,
+ "price_usd": price_usd,
+ "cpu_time_per_day": cpu_time,
+ "max_database_usage": max_database_usage,
+ "max_storage_usage": max_storage_usage,
+ "allow_downgrading_from_other_plan": allow_downgrading_from_other_plan,
+ "disk": 50,
+ "instance_type": "t2.micro",
+ "private_benches": private_benches,
+ "is_trial_plan": is_trial_plan,
+ }
+ )
+ if allowed_apps:
+ for app in allowed_apps:
+ plan.append("allowed_apps", {"app": app})
+ if release_groups:
+ for release_group in release_groups:
+ plan.append("release_groups", {"release_group": release_group})
+
+ plan.insert(ignore_if_duplicate=True)
+ plan.reload()
+ return plan
+
+
+class TestSitePlan(FrappeTestCase):
+ def setUp(self):
+ self.plan = create_test_plan("Site")
+
+ def tearDown(self):
+ frappe.db.rollback()
+
+ def test_period_int(self):
+ self.assertIsInstance(self.plan.period, int)
+
+ def test_per_day_difference(self):
+ per_day_usd = self.plan.get_price_per_day("USD")
+ per_day_inr = self.plan.get_price_per_day("INR")
+ self.assertIsInstance(per_day_inr, (int, float))
+ self.assertIsInstance(per_day_usd, (int, float))
+ self.assertNotEqual(per_day_inr, per_day_usd)
+
+ def test_dynamic_period(self):
+ month_with_29_days = frappe.utils.get_last_day(date(2020, 2, 3))
+ month_with_30_days = frappe.utils.get_last_day(date(1997, 4, 3))
+
+ with patch.object(frappe.utils, "get_last_day", return_value=month_with_30_days):
+ self.assertEqual(self.plan.period, 30)
+ per_day_for_30_usd = self.plan.get_price_per_day("USD")
+ per_day_for_30_inr = self.plan.get_price_per_day("INR")
+
+ with patch.object(frappe.utils, "get_last_day", return_value=month_with_29_days):
+ self.assertEqual(self.plan.period, 29)
+ per_day_for_29_usd = self.plan.get_price_per_day("USD")
+ per_day_for_29_inr = self.plan.get_price_per_day("INR")
+
+ self.assertNotEqual(per_day_for_29_usd, per_day_for_30_usd)
+ self.assertNotEqual(per_day_for_29_inr, per_day_for_30_inr)
diff --git a/press/press/doctype/site_plan_allowed_app/__init__.py b/press/press/doctype/site_plan_allowed_app/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/site_plan_allowed_app/site_plan_allowed_app.json b/press/press/doctype/site_plan_allowed_app/site_plan_allowed_app.json
new file mode 100644
index 00000000000..bfc665add11
--- /dev/null
+++ b/press/press/doctype/site_plan_allowed_app/site_plan_allowed_app.json
@@ -0,0 +1,33 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-07-09 23:27:13.919845",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "app"
+ ],
+ "fields": [
+ {
+ "fieldname": "app",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "App",
+ "options": "App",
+ "reqd": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2024-07-09 23:27:30.587794",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Site Plan Allowed App",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/site_plan_allowed_app/site_plan_allowed_app.py b/press/press/doctype/site_plan_allowed_app/site_plan_allowed_app.py
new file mode 100644
index 00000000000..117cc01d39e
--- /dev/null
+++ b/press/press/doctype/site_plan_allowed_app/site_plan_allowed_app.py
@@ -0,0 +1,22 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class SitePlanAllowedApp(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ app: DF.Link
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+ pass
diff --git a/press/press/doctype/site_plan_change/site_plan_change.json b/press/press/doctype/site_plan_change/site_plan_change.json
index 940be1f106f..367d08c299a 100644
--- a/press/press/doctype/site_plan_change/site_plan_change.json
+++ b/press/press/doctype/site_plan_change/site_plan_change.json
@@ -20,7 +20,8 @@
"in_list_view": 1,
"label": "Site",
"options": "Site",
- "reqd": 1
+ "reqd": 1,
+ "search_index": 1
},
{
"fetch_from": "site.team",
@@ -34,14 +35,14 @@
"fieldname": "from_plan",
"fieldtype": "Link",
"label": "From Plan",
- "options": "Plan"
+ "options": "Site Plan"
},
{
"fieldname": "to_plan",
"fieldtype": "Link",
"in_list_view": 1,
"label": "To Plan",
- "options": "Plan",
+ "options": "Site Plan",
"reqd": 1
},
{
@@ -63,7 +64,7 @@
}
],
"links": [],
- "modified": "2023-02-23 09:25:03.915667",
+ "modified": "2025-03-18 10:10:37.246343",
"modified_by": "Administrator",
"module": "Press",
"name": "Site Plan Change",
diff --git a/press/press/doctype/site_plan_change/site_plan_change.py b/press/press/doctype/site_plan_change/site_plan_change.py
index bf0ba6b9e91..c2852f7e8b3 100644
--- a/press/press/doctype/site_plan_change/site_plan_change.py
+++ b/press/press/doctype/site_plan_change/site_plan_change.py
@@ -1,24 +1,63 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
import frappe
from frappe import _
from frappe.model.document import Document
+from press.utils.webhook import create_webhook_event
+
class SitePlanChange(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from_plan: DF.Link | None
+ site: DF.Link
+ team: DF.Link | None
+ timestamp: DF.Datetime | None
+ to_plan: DF.Link
+ type: DF.Literal["", "Initial Plan", "Upgrade", "Downgrade"]
+ # end: auto-generated types
+
+ dashboard_fields = ("from_plan", "to_plan", "type", "site", "timestamp")
+
def validate(self):
+ if not self.from_plan and self.to_plan:
+ self.type = "Initial Plan"
+
+ if self.from_plan and self.to_plan and self.from_plan == self.to_plan:
+ frappe.throw("From Plan and To Plan cannot be the same")
+
if self.from_plan and not self.type:
- from_plan_value = frappe.db.get_value("Plan", self.from_plan, "price_usd")
- to_plan_value = frappe.db.get_value("Plan", self.to_plan, "price_usd")
+ from_plan_value = frappe.db.get_value("Site Plan", self.from_plan, "price_usd")
+ to_plan_value = frappe.db.get_value("Site Plan", self.to_plan, "price_usd")
self.type = "Downgrade" if from_plan_value > to_plan_value else "Upgrade"
+ if (
+ self.from_plan
+ and self.to_plan
+ and self.type == "Downgrade"
+ and not frappe.db.get_value("Site Plan", self.to_plan, "allow_downgrading_from_other_plan")
+ ):
+ frappe.throw(
+ f"Sorry, you cannot downgrade to {self.to_plan} from {self.from_plan}. Why? "
+ )
+
if self.type == "Initial Plan":
self.from_plan = ""
def after_insert(self):
+ if self.team != "Administrator":
+ create_webhook_event("Site Plan Change", self, self.team)
+
if self.type == "Initial Plan":
self.create_subscription()
return
@@ -38,9 +77,10 @@ def create_subscription(self):
frappe.get_doc(
doctype="Subscription",
team=self.team,
- plan=self.to_plan,
document_type="Site",
document_name=self.site,
+ plan_type="Site Plan",
+ plan=self.to_plan,
).insert()
def change_subscription_plan(self):
diff --git a/press/press/doctype/site_plan_change/test_site_plan_change.py b/press/press/doctype/site_plan_change/test_site_plan_change.py
index 5f36c981aa4..e6d88b07b3f 100644
--- a/press/press/doctype/site_plan_change/test_site_plan_change.py
+++ b/press/press/doctype/site_plan_change/test_site_plan_change.py
@@ -1,11 +1,78 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
-# import frappe
-import unittest
+import frappe
+from frappe.tests.utils import FrappeTestCase
+from press.press.doctype.site.test_site import create_test_site
+from press.press.doctype.site_plan.test_site_plan import create_test_plan
-class TestSitePlanChange(unittest.TestCase):
- pass
+
+class TestSitePlanChange(FrappeTestCase):
+ def setUp(self):
+ super().setUp()
+
+ self.tiny_plan = create_test_plan(
+ "Site",
+ plan_name="Tiny Plan",
+ allow_downgrading_from_other_plan=False,
+ price_usd=5.0,
+ price_inr=375.0,
+ )
+ self.nano_plan = create_test_plan(
+ "Site",
+ plan_name="Nano Plan",
+ allow_downgrading_from_other_plan=True,
+ price_usd=7.0,
+ price_inr=525.0,
+ )
+ self.unlimited_plan = create_test_plan(
+ "Site",
+ plan_name="Unlimited Plan",
+ allow_downgrading_from_other_plan=True,
+ price_usd=10.0,
+ price_inr=750.0,
+ )
+ self.site = create_test_site(subdomain="testsite")
+
+ def tearDown(self):
+ frappe.db.rollback()
+
+ def test_raise_error_while_downgrading_to_plan_in_which__allow_downgrading_from_other_plan__flag_is_disabled(
+ self,
+ ):
+ # Initially Set `Unlimited Plan` to site
+ self.site._create_initial_site_plan_change(self.unlimited_plan.name)
+ self.site.reload()
+ self.assertEqual(self.site.plan, self.unlimited_plan.name)
+ # Try to downgrade to `Tiny Plan` from `Unlimited Plan`
+ with self.assertRaises(frappe.exceptions.ValidationError) as context:
+ frappe.get_doc(
+ {
+ "doctype": "Site Plan Change",
+ "site": self.site.name,
+ "from_plan": self.unlimited_plan.name,
+ "to_plan": self.tiny_plan.name,
+ }
+ ).insert(ignore_permissions=True)
+
+ self.assertTrue("you cannot downgrade" in str(context.exception))
+
+ def test_allowed_to_downgrade_while__allow_downgrading_from_other_plan__flag_is_enabled(
+ self,
+ ):
+ # Initially Set `Unlimited Plan` to site
+ self.site._create_initial_site_plan_change(self.unlimited_plan.name)
+ self.site.reload()
+ self.assertEqual(self.site.plan, self.unlimited_plan.name)
+ # Try to downgrade to `Nano Plan` from `Unlimited Plan`
+ frappe.get_doc(
+ {
+ "doctype": "Site Plan Change",
+ "site": self.site.name,
+ "from_plan": self.unlimited_plan.name,
+ "to_plan": self.nano_plan.name,
+ }
+ ).insert(ignore_permissions=True)
+ self.assertEqual(frappe.db.get_value("Site", self.site.name, "plan"), self.nano_plan.name)
diff --git a/press/press/doctype/site_plan_release_group/__init__.py b/press/press/doctype/site_plan_release_group/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/site_plan_release_group/site_plan_release_group.json b/press/press/doctype/site_plan_release_group/site_plan_release_group.json
new file mode 100644
index 00000000000..4924c3e70f5
--- /dev/null
+++ b/press/press/doctype/site_plan_release_group/site_plan_release_group.json
@@ -0,0 +1,33 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-07-09 12:42:51.268467",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "release_group"
+ ],
+ "fields": [
+ {
+ "fieldname": "release_group",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Release Group",
+ "options": "Release Group",
+ "reqd": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2024-07-09 12:43:12.168927",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Site Plan Release Group",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/site_plan_release_group/site_plan_release_group.py b/press/press/doctype/site_plan_release_group/site_plan_release_group.py
new file mode 100644
index 00000000000..4b0935d9694
--- /dev/null
+++ b/press/press/doctype/site_plan_release_group/site_plan_release_group.py
@@ -0,0 +1,22 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class SitePlanReleaseGroup(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ release_group: DF.Link
+ # end: auto-generated types
+ pass
diff --git a/press/press/doctype/site_replication/site_replication.py b/press/press/doctype/site_replication/site_replication.py
index 4aa970940ee..d3ff255e18f 100644
--- a/press/press/doctype/site_replication/site_replication.py
+++ b/press/press/doctype/site_replication/site_replication.py
@@ -1,14 +1,33 @@
# Copyright (c) 2022, Frappe and contributors
# For license information, please see license.txt
+from typing import List
+
import frappe
from frappe.model.document import Document
-from press.press.doctype.site.site import prepare_site
+
from press.api.site import _new
-from typing import List
+from press.press.doctype.site.site import prepare_site
class SiteReplication(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ bench: DF.Link | None
+ new_site: DF.Link | None
+ release_group: DF.Link | None
+ server: DF.Link | None
+ site: DF.Link | None
+ status: DF.Literal["Not Started", "Running", "Success", "Failure"]
+ subdomain: DF.Data | None
+ # end: auto-generated types
+
doctype = "Site Replication"
def validate(self):
diff --git a/press/press/doctype/site_update/scheduled_auto_updates.py b/press/press/doctype/site_update/scheduled_auto_updates.py
index c37b2cab5b7..8def2576d6f 100644
--- a/press/press/doctype/site_update/scheduled_auto_updates.py
+++ b/press/press/doctype/site_update/scheduled_auto_updates.py
@@ -2,13 +2,14 @@
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
+from calendar import monthrange
+
import frappe
+from frappe.utils import get_datetime, get_time, now_datetime
-from calendar import monthrange
-from press.utils import log_error
-from frappe.utils import now_datetime
-from frappe.utils import get_time, get_datetime
+from press.press.doctype.site.site import Site
from press.press.doctype.site_update.site_update import benches_with_available_update
+from press.utils import log_error
def trigger():
@@ -19,7 +20,8 @@ def trigger():
"Site",
filters={
"status": ("in", ("Active", "Inactive")),
- "auto_updates_scheduled": True,
+ "only_update_at_specified_time": True,
+ "skip_auto_updates": False,
"bench": (
"in",
benches_with_available_update(), # An update should be available for this site
@@ -52,7 +54,7 @@ def trigger():
set_schedule_details(auto_update_log, site)
try:
- site_doc = frappe.get_doc("Site", site.name)
+ site_doc: Site = frappe.get_doc("Site", site.name)
site_doc.schedule_update()
site_doc.auto_update_last_triggered_on = now_datetime()
site_doc.save()
diff --git a/press/press/doctype/site_update/site_update.js b/press/press/doctype/site_update/site_update.js
index bfcbbbc7113..f7d97467e20 100644
--- a/press/press/doctype/site_update/site_update.js
+++ b/press/press/doctype/site_update/site_update.js
@@ -2,14 +2,79 @@
// For license information, please see license.txt
frappe.ui.form.on('Site Update', {
- onload: function (frm) {
- frm.set_query('destination_bench', function () {
- return {
- filters: {
- status: 'Active',
- server: frm.doc.server,
- },
- };
+ refresh: function (frm) {
+ // Disable save button
+ frm.disable_save();
+
+ // Add link
+ frm.add_web_link(
+ `/dashboard/sites/${frm.doc.site}/updates/${frm.doc.name}`,
+ __('Visit Dashboard'),
+ );
+
+ if (frm.doc.status === 'Cancelled') return;
+
+ // Add custom buttons
+ [
+ [
+ __('Trigger Recovery Job'),
+ 'trigger_recovery_job',
+ !frm.doc.recover_job,
+ ],
+ [__('Start'), 'start', ['Scheduled', 'Failure'].includes(frm.doc.status)],
+ [
+ __('Cause of Failure is Resolved'),
+ 'set_cause_of_failure_is_resolved',
+ !frm.doc.cause_of_failure_is_resolved,
+ ],
+ ].forEach(([label, method, condition]) => {
+ if (typeof condition === 'undefined' || condition) {
+ frm.add_custom_button(
+ label,
+ () => {
+ frappe.confirm(
+ `Are you sure you want to ${label.toLowerCase()} this site update?`,
+ () => frm.call(method).then((r) => frm.refresh()),
+ );
+ },
+ __('Actions'),
+ );
+ }
});
+
+ // Allow to change status
+ frm.add_custom_button(
+ __('Change Status'),
+ () => {
+ let options = ['Success', 'Recovered', 'Failure', 'Fatal'];
+ frm.doc.status === 'Scheduled' ? options.push('Cancelled') : null;
+
+ const dialog = new frappe.ui.Dialog({
+ title: __('Change Status'),
+ fields: [
+ {
+ fieldtype: 'Select',
+ label: __('Status'),
+ fieldname: 'status',
+ options: options,
+ },
+ ],
+ });
+
+ dialog.set_primary_action(__('Change Status'), (args) => {
+ frm
+ .call('set_status', {
+ status: args.status,
+ })
+ .then((r) => {
+ dialog.hide();
+ frm.reload_doc();
+ });
+ });
+
+ dialog.show();
+ },
+ __('Actions'),
+ );
},
});
diff --git a/press/press/doctype/site_update/site_update.json b/press/press/doctype/site_update/site_update.json
index e4a328f9fc0..4d6821b60b4 100644
--- a/press/press/doctype/site_update/site_update.json
+++ b/press/press/doctype/site_update/site_update.json
@@ -10,21 +10,39 @@
"source_bench",
"source_candidate",
"group",
+ "team",
"column_break_4",
"status",
"destination_bench",
"destination_candidate",
"destination_group",
+ "scheduled_time",
"section_break_8",
"difference",
"difference_deploy_type",
"deploy_type",
+ "skipped_failing_patches",
"column_break_14",
+ "skipped_backups",
+ "backup_type",
+ "site_backup",
+ "logical_replication_backup",
+ "section_break_luvm",
+ "deactivate_site_job",
"update_job",
+ "activate_site_job",
+ "column_break_rcyp",
+ "physical_backup_restoration",
"recover_job",
"cause_of_failure_is_resolved",
- "skipped_failing_patches",
- "skipped_backups"
+ "section_break_gmrz",
+ "update_start",
+ "column_break_jtqs",
+ "update_end",
+ "column_break_ellx",
+ "update_duration",
+ "section_break_tpap",
+ "touched_tables"
],
"fields": [
{
@@ -37,6 +55,7 @@
},
{
"fetch_from": "site.bench",
+ "fetch_if_empty": 1,
"fieldname": "source_bench",
"fieldtype": "Link",
"hide_days": 1,
@@ -60,7 +79,8 @@
"in_list_view": 1,
"in_standard_filter": 1,
"label": "Status",
- "options": "Pending\nRunning\nSuccess\nFailure\nRecovered\nFatal"
+ "no_copy": 1,
+ "options": "Pending\nRunning\nSuccess\nFailure\nRecovering\nRecovered\nFatal\nScheduled\nCancelled"
},
{
"fieldname": "destination_bench",
@@ -77,6 +97,7 @@
"hide_seconds": 1
},
{
+ "fetch_if_empty": 1,
"fieldname": "deploy_type",
"fieldtype": "Select",
"hide_days": 1,
@@ -84,24 +105,29 @@
"in_list_view": 1,
"in_standard_filter": 1,
"label": "Deploy Type",
+ "no_copy": 1,
"options": "\nPull\nMigrate"
},
{
"fetch_from": "source_bench.candidate",
+ "fetch_if_empty": 1,
"fieldname": "source_candidate",
"fieldtype": "Link",
"hide_days": 1,
"hide_seconds": 1,
"label": "Source Deploy Candidate",
+ "no_copy": 1,
"options": "Deploy Candidate"
},
{
"fetch_from": "destination_bench.candidate",
+ "fetch_if_empty": 1,
"fieldname": "destination_candidate",
"fieldtype": "Link",
"hide_days": 1,
"hide_seconds": 1,
"label": "Destination Deploy Candidate",
+ "no_copy": 1,
"options": "Deploy Candidate"
},
{
@@ -114,15 +140,18 @@
},
{
"fetch_from": "source_bench.group",
+ "fetch_if_empty": 1,
"fieldname": "group",
"fieldtype": "Link",
"hide_days": 1,
"hide_seconds": 1,
"label": "Source Group",
+ "no_copy": 1,
"options": "Release Group"
},
{
"fetch_from": "site.server",
+ "fetch_if_empty": 1,
"fieldname": "server",
"fieldtype": "Link",
"hide_days": 1,
@@ -138,7 +167,10 @@
"hide_days": 1,
"hide_seconds": 1,
"label": "Update Job",
- "options": "Agent Job"
+ "no_copy": 1,
+ "options": "Agent Job",
+ "read_only": 1,
+ "search_index": 1
},
{
"default": "0",
@@ -146,7 +178,8 @@
"fieldtype": "Check",
"hide_days": 1,
"hide_seconds": 1,
- "label": "Cause of Failure is Resolved"
+ "label": "Cause of Failure is Resolved",
+ "no_copy": 1
},
{
"fieldname": "column_break_14",
@@ -160,10 +193,14 @@
"hide_days": 1,
"hide_seconds": 1,
"label": "Recover Job",
- "options": "Agent Job"
+ "no_copy": 1,
+ "options": "Agent Job",
+ "read_only": 1,
+ "search_index": 1
},
{
"fetch_from": "difference.deploy_type",
+ "fetch_if_empty": 1,
"fieldname": "difference_deploy_type",
"fieldtype": "Select",
"label": "Difference Deploy Type",
@@ -174,25 +211,138 @@
"fieldname": "skipped_failing_patches",
"fieldtype": "Check",
"label": "Skipped Failing Patches",
+ "no_copy": 1,
"read_only": 1
},
{
"fetch_from": "destination_bench.group",
+ "fetch_if_empty": 1,
"fieldname": "destination_group",
"fieldtype": "Link",
"label": "Destination Group",
+ "no_copy": 1,
"options": "Release Group"
},
{
"default": "0",
"fieldname": "skipped_backups",
"fieldtype": "Check",
- "label": "Skipped Backups"
+ "label": "Skipped Backups",
+ "no_copy": 1,
+ "read_only": 1
+ },
+ {
+ "fieldname": "scheduled_time",
+ "fieldtype": "Datetime",
+ "label": "Scheduled Time"
+ },
+ {
+ "fetch_from": "site.team",
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "label": "Team",
+ "options": "Team"
+ },
+ {
+ "default": "Logical",
+ "fieldname": "backup_type",
+ "fieldtype": "Select",
+ "label": "Backup Type",
+ "options": "Logical\nPhysical\nLogical Replication",
+ "set_only_once": 1
+ },
+ {
+ "depends_on": "eval: doc.backup_type == \"Physical\"",
+ "fieldname": "site_backup",
+ "fieldtype": "Link",
+ "label": "Site Backup",
+ "options": "Site Backup",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "activate_site_job",
+ "fieldtype": "Link",
+ "label": "Activate Site Job",
+ "options": "Agent Job",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "deactivate_site_job",
+ "fieldtype": "Link",
+ "label": "Deactivate Site Job",
+ "options": "Agent Job",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "section_break_luvm",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "column_break_rcyp",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "physical_backup_restoration",
+ "fieldtype": "Link",
+ "label": "Physical Backup Restoration",
+ "options": "Physical Backup Restoration",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "section_break_tpap",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "touched_tables",
+ "fieldtype": "Code",
+ "label": "Touched Tables",
+ "read_only": 1
+ },
+ {
+ "fieldname": "section_break_gmrz",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "update_start",
+ "fieldtype": "Datetime",
+ "label": "Update Start",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_jtqs",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "update_end",
+ "fieldtype": "Datetime",
+ "label": "Update End",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_ellx",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "update_duration",
+ "fieldtype": "Duration",
+ "label": "Update Duration",
+ "read_only": 1
+ },
+ {
+ "fieldname": "logical_replication_backup",
+ "fieldtype": "Link",
+ "label": "Logical Replication Backup",
+ "options": "Logical Replication Backup",
+ "search_index": 1
}
],
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2023-03-21 14:31:40.445390",
+ "modified": "2025-12-06 21:13:57.822331",
"modified_by": "Administrator",
"module": "Press",
"name": "Site Update",
@@ -212,16 +362,19 @@
},
{
"create": 1,
- "role": "Press Admin"
+ "role": "Press Admin",
+ "write": 1
},
{
"create": 1,
- "role": "Press Member"
+ "role": "Press Member",
+ "write": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"title_field": "site",
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/site_update/site_update.py b/press/press/doctype/site_update/site_update.py
index af22e440ed7..31c2913361c 100644
--- a/press/press/doctype/site_update/site_update.py
+++ b/press/press/doctype/site_update/site_update.py
@@ -1,20 +1,130 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
-import pytz
-import random
-import frappe
+from __future__ import annotations
-from press.agent import Agent
+import json
+import random
from datetime import datetime
-from press.utils import log_error
+from typing import TYPE_CHECKING, ClassVar
+
+import frappe
+import frappe.utils
+import pytz
from frappe.core.utils import find
from frappe.model.document import Document
+from frappe.utils import convert_utc_to_system_timezone
from frappe.utils.caching import site_cache
+from frappe.utils.data import cint
+
+from press.agent import Agent
+from press.api.client import dashboard_whitelist
+from press.exceptions import SiteAlreadyArchived, SiteUnderMaintenance
+from press.press.doctype.logical_replication_backup.logical_replication_backup import (
+ get_logical_replication_backup_restoration_steps,
+)
+from press.press.doctype.physical_backup_restoration.physical_backup_restoration import (
+ get_physical_backup_restoration_steps,
+)
+from press.utils import log_error
+
+if TYPE_CHECKING:
+ from press.press.doctype.agent_job.agent_job import AgentJob
+ from press.press.doctype.logical_replication_backup.logical_replication_backup import (
+ LogicalReplicationBackup,
+ )
+ from press.press.doctype.physical_backup_restoration.physical_backup_restoration import (
+ PhysicalBackupRestoration,
+ )
+ from press.press.doctype.site.site import Site
class SiteUpdate(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ name: DF.Data
+ activate_site_job: DF.Link | None
+ backup_type: DF.Literal["Logical", "Physical", "Logical Replication"]
+ cause_of_failure_is_resolved: DF.Check
+ deactivate_site_job: DF.Link | None
+ deploy_type: DF.Literal["", "Pull", "Migrate"]
+ destination_bench: DF.Link | None
+ destination_candidate: DF.Link | None
+ destination_group: DF.Link | None
+ difference: DF.Link | None
+ difference_deploy_type: DF.Literal["", "Pull", "Migrate"]
+ group: DF.Link | None
+ logical_replication_backup: DF.Link | None
+ physical_backup_restoration: DF.Link | None
+ recover_job: DF.Link | None
+ scheduled_time: DF.Datetime | None
+ server: DF.Link | None
+ site: DF.Link | None
+ site_backup: DF.Link | None
+ skipped_backups: DF.Check
+ skipped_failing_patches: DF.Check
+ source_bench: DF.Link | None
+ source_candidate: DF.Link | None
+ status: DF.Literal[
+ "Pending",
+ "Running",
+ "Success",
+ "Failure",
+ "Recovering",
+ "Recovered",
+ "Fatal",
+ "Scheduled",
+ "Cancelled",
+ ]
+ team: DF.Link | None
+ touched_tables: DF.Code | None
+ update_duration: DF.Duration | None
+ update_end: DF.Datetime | None
+ update_job: DF.Link | None
+ update_start: DF.Datetime | None
+ # end: auto-generated types
+
+ dashboard_fields: ClassVar = [
+ "status",
+ "site",
+ "destination_bench",
+ "source_bench",
+ "deploy_type",
+ "difference",
+ "scheduled_time",
+ "creation",
+ "skipped_backups",
+ "skipped_failing_patches",
+ "backup_type",
+ "physical_backup_restoration",
+ "activate_site_job",
+ "deactivate_site_job",
+ "update_job",
+ "recover_job",
+ "update_start",
+ "update_end",
+ "update_duration",
+ ]
+
+ @staticmethod
+ def get_list_query(query):
+ results = query.run(as_dict=True)
+ for result in results:
+ if result.updated_on:
+ result.updated_on = convert_utc_to_system_timezone(result.updated_on).replace(tzinfo=None)
+
+ return results
+
+ def get_doc(self, doc):
+ doc.steps = self.get_steps()
+ return doc
+
def validate(self):
if not self.is_new():
return
@@ -42,6 +152,8 @@ def validate(self):
self.validate_apps()
self.validate_pending_updates()
self.validate_past_failed_updates()
+ self.set_physical_backup_mode_if_eligible()
+ self.set_logical_replication_backup_mode_if_eligible()
def validate_destination_bench(self, differences):
if not self.destination_bench:
@@ -70,7 +182,7 @@ def validate_deploy_candidate_difference(self, differences):
self.difference = difference.name
self.deploy_type = "Pull"
difference_doc = frappe.get_doc("Deploy Candidate Difference", self.difference)
- site_doc = frappe.get_doc("Site", self.site)
+ site_doc: "Site" = frappe.get_doc("Site", self.site)
for site_app in site_doc.apps:
difference_app = find(difference_doc.apps, lambda x: x.app == site_app.app)
if difference_app and difference_app.deploy_type == "Migrate":
@@ -87,7 +199,25 @@ def validate_pending_updates(self):
if self.has_pending_updates():
frappe.throw("An update is already pending for this site", frappe.ValidationError)
+ @property
+ def triggered_by_user(self):
+ return frappe.session.user != "Administrator"
+
+ @property
+ def use_physical_backup(self):
+ return self.backup_type == "Physical" and not self.skipped_backups
+
+ @property
+ def use_logical_replication_backup(self):
+ return self.backup_type == "Logical Replication" and not self.skipped_backups
+
def validate_past_failed_updates(self):
+ if getattr(self, "ignore_past_failures", False):
+ return
+
+ if self.triggered_by_user:
+ return # Allow user to trigger update for same source and destination
+
if not self.skipped_failing_patches and self.have_past_updates_failed():
frappe.throw(
f"Update from Source Candidate {self.source_candidate} to Destination"
@@ -105,8 +235,127 @@ def validate_apps(self):
frappe.ValidationError,
)
+ def before_insert(self):
+ self.backup_type = "Logical"
+ site: "Site" = frappe.get_cached_doc("Site", self.site)
+ site.check_move_scheduled()
+
def after_insert(self):
- self.create_agent_request()
+ if not self.scheduled_time:
+ self.start()
+
+ def set_physical_backup_mode_if_eligible(self): # noqa: C901
+ if self.skipped_backups:
+ return
+
+ if self.deploy_type != "Migrate":
+ return
+
+ # Check if physical backup is disabled globally from Press Settings
+ if frappe.utils.cint(frappe.get_value("Press Settings", None, "disable_physical_backup")):
+ return
+
+ database_server = frappe.get_value("Server", self.server, "database_server")
+ if not database_server:
+ # It might be the case of configured RDS server and no self hosted database server
+ return
+
+ # Check if physical backup is enabled on the database server
+ enable_physical_backup = frappe.get_value(
+ "Database Server", database_server, "enable_physical_backup"
+ )
+ if not enable_physical_backup:
+ return
+
+ # Sanity check - Provider should be AWS EC2
+ provider = frappe.get_value("Database Server", database_server, "provider")
+ if provider != "AWS EC2":
+ return
+
+ # In case of ebs encryption don't proceed with physical backup
+ virtual_machine = frappe.get_value("Database Server", database_server, "virtual_machine")
+ has_kms_key_id = frappe.get_value("Virtual Machine", virtual_machine, "kms_key_id")
+ if has_kms_key_id:
+ return
+
+ # Check for last logical backup
+ last_logical_site_backups = frappe.db.get_list(
+ "Site Backup",
+ filters={"site": self.site, "physical": False, "status": "Success"},
+ pluck="database_size",
+ limit=1,
+ order_by="creation desc",
+ ignore_permissions=True,
+ )
+ db_backup_size = 0
+ if len(last_logical_site_backups) > 0:
+ db_backup_size = cint(last_logical_site_backups[0])
+
+ # If last logical backup size is greater than 300MB (actual db size approximate 3GB)
+ # Then only take physical backup
+ if db_backup_size > 314572800:
+ self.backup_type = "Physical"
+
+ def set_logical_replication_backup_mode_if_eligible(self):
+ if self.skipped_backups:
+ return
+
+ if self.deploy_type != "Migrate":
+ return
+
+ database_server = frappe.get_value("Server", self.server, "database_server")
+ if not database_server:
+ # It might be the case of configured RDS server and no self hosted database server
+ return
+
+ # Sanity check - Provider should be AWS EC2
+ provider = frappe.get_value("Database Server", database_server, "provider")
+ if provider != "AWS EC2":
+ return
+
+ if not frappe.get_value("Server", self.server, "enable_logical_replication_during_site_update"):
+ return
+
+ self.backup_type = "Logical Replication"
+
+ @dashboard_whitelist()
+ def start(self):
+ previous_status = self.status
+
+ self.status = "Pending"
+ self.update_start = frappe.utils.now()
+ self.save()
+ site: Site = frappe.get_cached_doc("Site", self.site)
+ try:
+ site.ready_for_move()
+ except SiteAlreadyArchived:
+ # There is no point in retrying the update if the site is already archived
+ update_status(self.name, "Fatal")
+ return
+ except SiteUnderMaintenance:
+ # Just ignore the update for now
+ # It will be retried later
+ if previous_status == "Pending":
+ # During `Bench Update` and in some other cases
+ # Site Update get status `Pending` with no `scheduled_time`
+ # If we can't do the update right now in those cases
+ # Then we should set the status to `Scheduled`
+ # And set the `scheduled_time` to now
+ self.status = "Scheduled"
+ if not self.scheduled_time:
+ self.scheduled_time = frappe.utils.now_datetime()
+ self.save()
+ else:
+ update_status(self.name, previous_status)
+
+ return
+
+ if self.use_physical_backup:
+ self.deactivate_site()
+ elif self.use_logical_replication_backup:
+ self.create_logical_replication_backup_record()
+ else:
+ self.create_update_site_agent_request()
def get_before_migrate_scripts(self, rollback=False):
site_apps = [app.app for app in frappe.get_doc("Site", self.site).apps]
@@ -117,7 +366,9 @@ def get_before_migrate_scripts(self, rollback=False):
scripts = {}
for app_rename in frappe.get_all(
- "App Rename", {"new_name": ["in", site_apps]}, ["old_name", "new_name", script_field]
+ "App Rename",
+ {"new_name": ["in", site_apps], "enabled": True},
+ ["old_name", "new_name", script_field],
):
scripts[app_rename.old_name] = app_rename.get(script_field)
@@ -128,7 +379,36 @@ def is_destination_above_v12(self):
version = frappe.get_cached_value("Release Group", self.destination_group, "version")
return frappe.get_cached_value("Frappe Version", version, "number") > 12
- def create_agent_request(self):
+ def create_logical_replication_backup_record(self):
+ record: LogicalReplicationBackup = frappe.get_doc(
+ {"doctype": "Logical Replication Backup", "site": self.site, "execution_stage": "Pre-Migrate"}
+ ).insert(ignore_permissions=True)
+ frappe.db.set_value("Site Update", self.name, "logical_replication_backup", record.name)
+ record.execute()
+
+ def trigger_post_migration_stage_logical_replication_backup(self):
+ if not self.logical_replication_backup:
+ return
+ record: LogicalReplicationBackup = frappe.get_doc(
+ "Logical Replication Backup", self.logical_replication_backup
+ )
+ record.execution_stage = "Post-Migrate"
+ record.status = "Pending"
+ record.save()
+ record.execute()
+
+ def trigger_failover_stage_logical_replication_backup(self):
+ if not self.logical_replication_backup:
+ return
+ record: LogicalReplicationBackup = frappe.get_doc(
+ "Logical Replication Backup", self.logical_replication_backup
+ )
+ record.execution_stage = "Failover"
+ record.status = "Pending"
+ record.save()
+ record.execute()
+
+ def create_update_site_agent_request(self):
agent = Agent(self.server)
site = frappe.get_doc("Site", self.site)
job = agent.update_site(
@@ -136,17 +416,49 @@ def create_agent_request(self):
self.destination_bench,
self.deploy_type,
skip_failing_patches=self.skipped_failing_patches,
- skip_backups=self.skipped_backups,
+ skip_backups=self.skipped_backups or self.backup_type in ["Physical", "Logical Replication"],
before_migrate_scripts=self.get_before_migrate_scripts(),
skip_search_index=self.is_destination_above_v12,
)
+ self.set_update_job_value(job)
+
+ def set_update_job_value(self, job):
frappe.db.set_value("Site Update", self.name, "update_job", job.name)
+ site_activity = frappe.db.get_value(
+ "Site Activity",
+ {
+ "site": self.site,
+ "action": "Update",
+ "job": ("is", "not set"),
+ },
+ order_by="creation desc",
+ )
+ if site_activity:
+ frappe.db.set_value("Site Activity", site_activity, "job", job.name)
+
+ def activate_site(self, backup_failed=False):
+ agent = Agent(self.server)
+ job = agent.activate_site(
+ frappe.get_doc("Site", self.site), reference_doctype="Site Update", reference_name=self.name
+ )
+ frappe.db.set_value("Site Update", self.name, "activate_site_job", job.name)
+ if backup_failed:
+ update_status(self.name, "Fatal")
+
+ def deactivate_site(self):
+ agent = Agent(self.server)
+ job = agent.deactivate_site(
+ frappe.get_doc("Site", self.site), reference_doctype="Site Update", reference_name=self.name
+ )
+ frappe.db.set_value("Site Update", self.name, {"deactivate_site_job": job.name, "status": "Running"})
+
+ def create_physical_backup(self):
+ site: Site = frappe.get_doc("Site", self.site)
+ frappe.db.set_value(
+ "Site Update", self.name, "site_backup", site.physical_backup(for_site_update=True).name
+ )
def have_past_updates_failed(self):
- if (
- not frappe.session.user == "Administrator"
- ): # Allow user to trigger update for same source and destination
- return False
return frappe.db.exists(
"Site Update",
{
@@ -160,9 +472,27 @@ def have_past_updates_failed(self):
def has_pending_updates(self):
return frappe.db.exists(
"Site Update",
- {"site": self.site, "status": ("in", ("Pending", "Running", "Failure"))},
+ {
+ "site": self.site,
+ "status": ("in", ("Pending", "Running", "Failure", "Scheduled", "Recovering")),
+ },
)
+ def is_workload_diff_high(self) -> bool:
+ site_plan = frappe.get_value("Site", self.site, "plan")
+ cpu = frappe.get_value("Site Plan", site_plan, "cpu_time_per_day") or 0 # if plan not set, assume 0
+
+ THRESHOLD = 8 # USD 100 site equivalent. (Since workload is based off of CPU)
+
+ workload_diff_high = cpu >= THRESHOLD
+
+ if not workload_diff_high:
+ source_bench = frappe.get_doc("Bench", self.source_bench)
+ dest_bench = frappe.get_doc("Bench", self.destination_bench)
+ workload_diff_high = (dest_bench.workload - source_bench.workload) > THRESHOLD
+
+ return workload_diff_high
+
def reallocate_workers(self):
"""
Reallocate workers on source and destination benches
@@ -171,70 +501,304 @@ def reallocate_workers(self):
"""
group = frappe.get_doc("Release Group", self.destination_group)
- if group.public or group.central_bench:
+ if group.public or group.central_bench or not self.is_workload_diff_high():
+ return
+
+ frappe.enqueue_doc(
+ "Server",
+ self.server,
+ method="auto_scale_workers",
+ job_id=f"auto_scale_workers:{self.server}",
+ deduplicate=True,
+ enqueue_after_commit=True,
+ at_front=True,
+ )
+
+ @property
+ def touched_tables_list(self):
+ try:
+ return json.loads(self.touched_tables)
+ except Exception:
+ return []
+
+ @frappe.whitelist()
+ def trigger_recovery_job(self): # noqa: C901
+ if self.recover_job:
return
+ agent = Agent(self.server)
+ site: "Site" = frappe.get_doc("Site", self.site)
+ job = None
+ if site.bench == self.destination_bench:
+ # The site is already on destination bench
+ update_status(self.name, "Recovering")
+ frappe.db.set_value("Site", self.site, "status", "Recovering")
+
+ # If physical backup is enabled, we need to first perform physical backup restoration
+ if self.use_physical_backup and not self.physical_backup_restoration:
+ # Perform Physical Backup Restoration if not already done
+ doc: PhysicalBackupRestoration = frappe.get_doc(
+ {
+ "doctype": "Physical Backup Restoration",
+ "site": self.site,
+ "status": "Pending",
+ "site_backup": self.site_backup,
+ "source_database": site.database_name,
+ "destination_database": site.database_name,
+ "destination_server": frappe.get_value("Server", site.server, "database_server"),
+ "restore_specific_tables": len(self.touched_tables_list) > 0,
+ "tables_to_restore": json.dumps(self.touched_tables_list),
+ }
+ )
+ doc.insert(ignore_permissions=True)
+ frappe.db.set_value(self.doctype, self.name, "physical_backup_restoration", doc.name)
+ doc.execute()
+ # After physical backup restoration, that will trigger recovery job again
+ # via site_update.process_physical_backup_restoration_status_update(...) method
+ return
+
+ # If logical replication backup is enabled, we need to first perform failover stage
+ if (
+ self.use_logical_replication_backup
+ and frappe.get_value(
+ "Logical Replication Backup", self.logical_replication_backup, "failover_stage_status"
+ )
+ == "Pending"
+ ):
+ self.trigger_failover_stage_logical_replication_backup()
+ return
+
+ if not self.skipped_backups and self.physical_backup_restoration:
+ physical_backup_restoration_status = frappe.get_value(
+ "Physical Backup Restoration", self.physical_backup_restoration, "status"
+ )
+ if physical_backup_restoration_status == "Failure":
+ # mark site update as Fatal
+ update_status(self.name, "Fatal")
+ # mark site as broken
+ frappe.db.set_value("Site", self.site, "status", "Broken")
+ return
+ if physical_backup_restoration_status != "Success":
+ # just to be safe
+ frappe.throw("Physical Backup Restoration is still in progress")
+
+ # Attempt to move site to source bench
- server = frappe.get_doc("Server", self.server)
- source_bench = frappe.get_doc("Bench", self.source_bench)
- dest_bench = frappe.get_doc("Bench", self.destination_bench)
+ # Disable maintenance mode for active sites
+ activate = site.status_before_update == "Active"
+ job = agent.update_site_recover_move(
+ site,
+ self.source_bench,
+ self.deploy_type,
+ activate,
+ rollback_scripts=self.get_before_migrate_scripts(rollback=True),
+ restore_touched_tables=(not self.skipped_backups and self.backup_type == "Logical"),
+ )
+ else:
+ # Site is already on the source bench
+ if site.status_before_update == "Active":
+ # Disable maintenance mode for active sites
+ job = agent.update_site_recover(site)
+ else:
+ # Site is already on source bench and maintenance mode is on
+ # No need to do anything
+ site.reset_previous_status()
+ update_status(self.name, "Recovered")
+ if job:
+ frappe.db.set_value("Site Update", self.name, "recover_job", job.name)
+
+ def delete_backup_snapshot(self):
+ if self.site_backup:
+ snapshot = frappe.get_value("Site Backup", self.site_backup, "database_snapshot")
+ if snapshot:
+ frappe.get_doc("Virtual Disk Snapshot", snapshot).delete_snapshot()
+
+ @dashboard_whitelist()
+ def get_steps(self): # noqa: C901
+ """
+ {
+ "title": "Step Name",
+ "status": "Success",
+ "output": "Output",
+ }
+ TODO > Add duration of each step
+
+ Expand the steps of job
+ - Steps of Deactivate Job [if exists]
+ - Steps of Physical Backup Job [Site Backup] [if exists]
+ - Steps of Update Job
+ - Steps of Physical Restore Job [if exists]
+ - Steps of Recovery Job [if exists]
+ - Steps of Activate Job [if exists]
+ """
+ steps = []
+ if self.deactivate_site_job:
+ steps.extend(self.get_job_steps(self.deactivate_site_job, "Deactivate Site"))
+ if self.backup_type == "Physical" and self.site_backup:
+ if frappe.db.exists("Site Backup", self.site_backup):
+ agent_job = frappe.get_value("Site Backup", self.site_backup, "job")
+ steps.extend(self.get_job_steps(agent_job, "Backup Site"))
+ else:
+ steps.append(
+ {
+ "name": "site_backup_not_found",
+ "title": "Backup Cleared",
+ "status": "Skipped",
+ "output": "",
+ "stage": "Physical Backup",
+ }
+ )
- work_load_diff = dest_bench.work_load - source_bench.work_load
if (
- server.new_worker_allocation
- and work_load_diff
- >= 8 # USD 100 site equivalent. (Since workload is based off of CPU)
+ self.logical_replication_backup
+ and frappe.db.get_value(
+ "Logical Replication Backup", self.logical_replication_backup, "pre_migrate_stage_status"
+ )
+ != "Pending"
):
- server.auto_scale_workers()
+ steps.extend(
+ get_logical_replication_backup_restoration_steps(
+ self.logical_replication_backup, "Pre-Migrate"
+ )
+ )
+ if self.update_job:
+ steps.extend(self.get_job_steps(self.update_job, "Update Site"))
-def trigger_recovery_job(site_update_name):
- site_update = frappe.get_doc("Site Update", site_update_name)
- if site_update.recover_job:
- return
- agent = Agent(site_update.server)
- site = frappe.get_doc("Site", site_update.site)
- job = None
- if site.bench == site_update.destination_bench:
- # The site is already on destination bench
- # Attempt to move site to source bench
-
- # Disable maintenance mode for active sites
- activate = site.status_before_update == "Active"
- job = agent.update_site_recover_move(
- site,
- site_update.source_bench,
- site_update.deploy_type,
- activate,
- rollback_scripts=site_update.get_before_migrate_scripts(rollback=True),
+ if (
+ self.logical_replication_backup
+ and frappe.db.get_value(
+ "Logical Replication Backup", self.logical_replication_backup, "post_migrate_stage_status"
+ )
+ != "Pending"
+ ):
+ steps.extend(
+ get_logical_replication_backup_restoration_steps(
+ self.logical_replication_backup, "Post-Migrate"
+ )
+ )
+
+ if self.physical_backup_restoration:
+ steps.extend(get_physical_backup_restoration_steps(self.physical_backup_restoration))
+
+ if self.recover_job:
+ steps.extend(self.get_job_steps(self.recover_job, "Recover Site"))
+
+ if (
+ self.logical_replication_backup
+ and frappe.db.get_value(
+ "Logical Replication Backup", self.logical_replication_backup, "failover_stage_status"
+ )
+ != "Pending"
+ ):
+ steps.extend(
+ get_logical_replication_backup_restoration_steps(self.logical_replication_backup, "Failover")
+ )
+
+ if self.activate_site_job:
+ steps.extend(self.get_job_steps(self.activate_site_job, "Activate Site"))
+ return steps
+
+ def get_job_steps(self, job: str, stage: str):
+ agent_steps = frappe.get_all(
+ "Agent Job Step",
+ filters={"agent_job": job},
+ fields=["output", "step_name", "status", "name"],
+ order_by="creation asc",
)
- else:
- # Site is already on the source bench
+ return [
+ {
+ "name": step.get("name"),
+ "title": step.get("step_name"),
+ "status": step.get("status"),
+ "output": step.get("output"),
+ "stage": stage,
+ }
+ for step in agent_steps
+ ]
+
+ @frappe.whitelist()
+ def set_cause_of_failure_is_resolved(self):
+ frappe.db.set_value("Site Update", self.name, "cause_of_failure_is_resolved", 1)
+
+ @frappe.whitelist()
+ def set_status(self, status):
+ return self.update_status(self.name, status)
+
+ @classmethod
+ def update_status(cls, name, status):
+ if status == "Cancelled":
+ try:
+ if (
+ _status := frappe.db.get_value("Site Update", name, "status", for_update=True, wait=False)
+ ) != "Scheduled":
+ frappe.throw(f"Cannot cancel a Site Update with status {_status}")
- if site.status_before_update == "Active":
- # Disable maintenance mode for active sites
- job = agent.update_site_recover(site)
- else:
- # Site is already on source bench and maintenance mode is on
- # No need to do anything
- site.reset_previous_status()
- if job:
- frappe.db.set_value("Site Update", site_update_name, "recover_job", job.name)
+ except (frappe.QueryTimeoutError, frappe.QueryDeadlockError):
+ frappe.throw(
+ "The update is probably underway. Please reload/refresh to get the latest status."
+ )
+
+ frappe.db.set_value("Site Update", name, "status", status)
+
+ @classmethod
+ def get_ongoing_update(cls, job_name: str) -> OngoingUpdate | None:
+ updates: list[OngoingUpdate] = frappe.get_all(
+ "Site Update",
+ fields=["name", "status", "destination_bench", "destination_group", "backup_type"],
+ filters={"update_job": job_name},
+ )
+ if updates:
+ return updates[0]
+ return None
+
+
+def update_status(name: str, status: str):
+ SiteUpdate.update_status(name, status)
+ if status in ("Success", "Failure", "Fatal", "Recovered"):
+ frappe.db.set_value("Site Update", name, "update_end", frappe.utils.now())
+ update_start = frappe.db.get_value("Site Update", name, "update_start")
+ if update_start:
+ frappe.db.set_value(
+ "Site Update",
+ name,
+ "update_duration",
+ frappe.utils.cint(
+ frappe.utils.time_diff_in_seconds(frappe.utils.now_datetime(), update_start)
+ ),
+ )
+ if status in ["Success", "Recovered"]:
+ backup_type = frappe.db.get_value("Site Update", name, "backup_type")
+ if backup_type == "Physical":
+ # Remove the snapshot
+ frappe.enqueue_doc(
+ "Site Update",
+ name,
+ "delete_backup_snapshot",
+ enqueue_after_commit=True,
+ )
@site_cache(ttl=60)
-def benches_with_available_update():
+def benches_with_available_update(site=None, server=None):
+ site_bench = frappe.db.get_value("Site", site, "bench") if site else None
+ values = {}
+ if site:
+ values["site_bench"] = site_bench
+ if server:
+ values["server"] = server
source_benches_info = frappe.db.sql(
- """
+ f"""
SELECT sb.name AS source_bench, sb.candidate AS source_candidate, sb.server AS server, dcd.destination AS destination_candidate
FROM `tabBench` sb, `tabDeploy Candidate Difference` dcd
WHERE sb.status IN ('Active', 'Broken') AND sb.candidate = dcd.source
+ {"AND sb.name = %(site_bench)s" if site else ""}
+ {"AND sb.server = %(server)s" if server else ""}
""",
+ values=values,
as_dict=True,
)
- destination_candidates = list(
- set(d["destination_candidate"] for d in source_benches_info)
- )
+ destination_candidates = list(set(d["destination_candidate"] for d in source_benches_info))
destination_benches_info = frappe.get_all(
"Bench",
@@ -255,36 +819,49 @@ def benches_with_available_update():
return list(set([bench.source_bench for bench in updates_available_for_benches]))
-def sites_with_available_update():
- benches = benches_with_available_update()
- sites = frappe.get_all(
+@frappe.whitelist()
+def sites_with_available_update(server=None):
+ benches = benches_with_available_update(server=server)
+ return frappe.get_all(
"Site",
filters={
"status": ("in", ("Active", "Inactive", "Suspended")),
"bench": ("in", benches),
+ "only_update_at_specified_time": False, # will be taken care of by another scheduled job
+ "skip_auto_updates": False,
},
- fields=["name", "timezone", "bench", "status", "skip_auto_updates"],
+ fields=["name", "timezone", "bench", "server", "status"],
)
- return sites
def schedule_updates():
+ servers = frappe.get_all("Server", {"status": "Active"}, pluck="name")
+ for server in servers:
+ frappe.enqueue(
+ "press.press.doctype.site_update.site_update.schedule_updates_server",
+ server=server,
+ job_id=f"schedule_updates:{server}",
+ deduplicate=True,
+ queue="long",
+ )
+
+
+def schedule_updates_server(server):
# Prevent flooding the queue
queue_size = frappe.db.get_single_value("Press Settings", "auto_update_queue_size")
pending_update_count = frappe.db.count(
"Site Update",
{
"status": ("in", ("Pending", "Running")),
+ "server": server,
"creation": (">", frappe.utils.add_to_date(None, hours=-4)),
},
)
if pending_update_count > queue_size:
return
- sites = sites_with_available_update()
- sites = list(filter(should_not_skip_auto_updates, sites))
+ sites = sites_with_available_update(server)
sites = list(filter(is_site_in_deploy_hours, sites))
- sites = list(filter(should_try_update, sites))
# If a site can't be updated for some reason, then we shouldn't get stuck
# Shuffle sites list, to achieve this
@@ -297,13 +874,19 @@ def schedule_updates():
continue
if update_triggered_count > queue_size:
break
- if frappe.db.exists(
+ if not should_try_update(site) or frappe.db.exists(
"Site Update",
- {"site": site.name, "status": ("in", ("Pending", "Running", "Failure"))},
+ {
+ "site": site.name,
+ "status": ("in", ("Pending", "Running", "Failure", "Scheduled")),
+ },
):
continue
+
try:
site = frappe.get_doc("Site", site.name)
+ if site.site_migration_scheduled():
+ continue
site.schedule_update()
update_triggered_count += 1
frappe.db.commit()
@@ -313,23 +896,32 @@ def schedule_updates():
frappe.db.rollback()
-def should_not_skip_auto_updates(site):
- return not site.skip_auto_updates
-
-
def should_try_update(site):
source = frappe.db.get_value("Bench", site.bench, "candidate")
- destination = frappe.get_all(
- "Deploy Candidate Difference",
- fields=["destination"],
- filters={"source": source},
+ candidates = frappe.get_all(
+ "Deploy Candidate Difference", filters={"source": source}, pluck="destination"
+ )
+
+ source_apps = [app.app for app in frappe.get_cached_doc("Site", site.name).apps]
+ dest_apps = []
+ destinations = frappe.get_all(
+ "Bench",
+ ["name", "candidate"],
+ {
+ "candidate": ("in", candidates),
+ "status": "Active",
+ "server": site.server,
+ },
limit=1,
- )[0].destination
+ ignore_ifnull=True,
+ order_by="creation DESC",
+ )
+ # Most recent active bench is the destination bench
+ if not destinations:
+ return False
- source_apps = [app.app for app in frappe.get_doc("Site", site.name).apps]
- dest_apps = [
- app.app for app in frappe.get_doc("Bench", dict(candidate=destination)).apps
- ]
+ destination_bench = frappe.get_cached_doc("Bench", destinations[0].name)
+ dest_apps = [app.app for app in destination_bench.apps]
if set(source_apps) - set(dest_apps):
return False
@@ -339,7 +931,7 @@ def should_try_update(site):
{
"site": site.name,
"source_candidate": source,
- "destination_candidate": destination,
+ "destination_candidate": destination_bench.candidate,
"cause_of_failure_is_resolved": False,
},
)
@@ -359,46 +951,167 @@ def is_site_in_deploy_hours(site):
return False
-def process_update_site_job_update(job):
- updated_status = job.status
- site_update = frappe.get_all(
+def process_physical_backup_restoration_status_update(name: str):
+ site_backup_name = frappe.db.exists(
"Site Update",
- fields=["name", "status", "destination_bench", "destination_group"],
- filters={"update_job": job.name},
+ {
+ "physical_backup_restoration": name,
+ },
)
+ if site_backup_name:
+ site_update: SiteUpdate = frappe.get_doc("Site Update", site_backup_name)
+ physical_backup_restoration: PhysicalBackupRestoration = frappe.get_doc(
+ "Physical Backup Restoration", name
+ )
+ if physical_backup_restoration.status in ["Success", "Failure"]:
+ site_update.trigger_recovery_job()
- if not site_update:
+
+def process_activate_site_job_update(job: AgentJob):
+ if job.reference_doctype != "Site Update" or not job.reference_name:
return
+ if job.status == "Success":
+ # If `Site Update` successful, then mark site as `Active`
+ if frappe.db.get_value(job.reference_doctype, job.reference_name, "status") == "Success":
+ frappe.get_doc("Site", job.site).reset_previous_status(fix_broken=True)
+ else:
+ # Set status to `status_before_update`
+ frappe.get_doc("Site", job.site).reset_previous_status()
+ elif job.status in ["Failure", "Delivery Failure"]:
+ # Mark the site as broken
+ frappe.db.set_value("Site", job.site, "status", "Broken")
+ update_status(job.reference_name, "Fatal")
- site_update = site_update[0]
+
+def process_deactivate_site_job_update(job):
+ if job.reference_doctype != "Site Update":
+ return
+ if job.status == "Success":
+ # proceed to backup stage
+ site_update = frappe.get_doc("Site Update", job.reference_name)
+ site_update.create_physical_backup()
+ elif job.status in ["Failure", "Delivery Failure"]:
+ # mark Site Update as Fatal
+ update_status(job.reference_name, "Fatal")
+ # Run the activate site to ensure site is active
+ site_update = frappe.get_doc("Site Update", job.reference_name)
+ site_update.activate_site()
+
+
+def update_site_bench_group_fields(job: AgentJob, site_update: OngoingUpdate):
+ if not job.site:
+ raise NotImplementedError
+ bench = frappe.db.get_value("Site", job.site, "bench")
+ move_site_step_status = frappe.db.get_value(
+ "Agent Job Step", {"step_name": "Move Site", "agent_job": job.name}, "status"
+ )
+ if bench != site_update.destination_bench and move_site_step_status == "Success":
+ frappe.db.set_value("Site", job.site, "bench", site_update.destination_bench)
+ frappe.db.set_value("Site", job.site, "group", site_update.destination_group)
+
+
+def reallocate_workers_if_moved(job: AgentJob, site_update: OngoingUpdate):
+ site_enable_step_status = frappe.db.get_value(
+ "Agent Job Step",
+ {"step_name": "Disable Maintenance Mode", "agent_job": job.name},
+ "status",
+ )
+ if site_enable_step_status == "Success":
+ SiteUpdate("Site Update", site_update.name).reallocate_workers()
+
+
+def update_touched_tables_step(job: AgentJob, site_update: OngoingUpdate):
+ log_touched_tables_step = frappe.db.get_value(
+ "Agent Job Step",
+ {"step_name": "Log Touched Tables", "agent_job": job.name},
+ ["status", "data"],
+ as_dict=True,
+ )
+ if log_touched_tables_step and log_touched_tables_step.status == "Success":
+ frappe.db.set_value("Site Update", site_update.name, "touched_tables", log_touched_tables_step.data)
+
+
+def handle_success(job: AgentJob, site_update: OngoingUpdate):
+ if site_update.backup_type == "Logical Replication":
+ SiteUpdate("Site Update", site_update.name).trigger_post_migration_stage_logical_replication_backup()
+ else:
+ frappe.get_doc("Site", job.site).reset_previous_status(fix_broken=True)
+
+
+def handle_fatal(job: AgentJob, site_update: OngoingUpdate):
+ if site_update.backup_type in ["Physical", "Logical Replication"]:
+ # For Physical restore, just do activate site
+ # Because we have deactivated site first
+ SiteUpdate("Site Update", site_update.name).activate_site()
+ else:
+ # For Logical restore, we need to reset site status just
+ frappe.get_doc("Site", job.site).reset_previous_status()
+
+
+def handle_failure(job: AgentJob, site_update: OngoingUpdate):
+ frappe.db.set_value("Site", job.site, "status", "Broken")
+ frappe.db.set_value(
+ "Site Update",
+ site_update.name,
+ "cause_of_failure_is_resolved",
+ job.failed_because_of_agent_update or job.failed_because_of_incident,
+ )
+ if not frappe.db.get_value("Site Update", site_update.name, "skipped_backups"):
+ doc = SiteUpdate("Site Update", site_update.name)
+ doc.trigger_recovery_job()
+ else:
+ # If user has done Site Update with skipped_backups
+ # We have nothing to do here
+ update_status(site_update.name, "Fatal")
+ SiteUpdate("Site Update", site_update.name).reallocate_workers()
+
+
+class OngoingUpdate(frappe._dict):
+ name: str
+ status: str
+ destination_bench: str
+ destination_group: str
+ backup_type: str
+
+
+def process_update_site_job_update(job: AgentJob):
+ site_update = SiteUpdate.get_ongoing_update(job.name)
+ if not site_update:
+ return
+ updated_status = {
+ # For physical backup, we have already deactivated site first
+ # So no point in setting status back to Pending
+ "Pending": "Running" if site_update.backup_type in ["Physical", "Logical Replication"] else "Pending",
+ "Running": "Running",
+ "Success": "Success",
+ "Failure": "Failure",
+ "Delivery Failure": "Fatal",
+ }[job.status]
if updated_status != site_update.status:
- site_bench = frappe.db.get_value("Site", job.site, "bench")
- move_site_step_status = frappe.db.get_value(
- "Agent Job Step", {"step_name": "Move Site", "agent_job": job.name}, "status"
- )
- if site_bench != site_update.destination_bench and move_site_step_status == "Success":
- frappe.db.set_value("Site", job.site, "bench", site_update.destination_bench)
- frappe.db.set_value("Site", job.site, "group", site_update.destination_group)
+ update_site_bench_group_fields(job, site_update)
+ reallocate_workers_if_moved(job, site_update)
+ update_touched_tables_step(job, site_update)
- frappe.db.set_value("Site Update", site_update.name, "status", updated_status)
+ if not (site_update.backup_type == "Logical Replication" and updated_status == "Success"):
+ update_status(site_update.name, updated_status)
if updated_status == "Running":
frappe.db.set_value("Site", job.site, "status", "Updating")
elif updated_status == "Success":
- frappe.get_doc("Site", job.site).reset_previous_status()
- frappe.get_doc("Site Update", site_update.name).reallocate_workers()
+ handle_success(job, site_update)
+ elif updated_status == "Fatal":
+ handle_fatal(job, site_update)
elif updated_status == "Failure":
- frappe.db.set_value("Site", job.site, "status", "Broken")
- if not frappe.db.get_value("Site Update", site_update.name, "skipped_backups"):
- trigger_recovery_job(site_update.name)
+ handle_failure(job, site_update)
-def process_update_site_recover_job_update(job):
+def process_update_site_recover_job_update(job: AgentJob):
updated_status = {
- "Pending": "Pending",
- "Running": "Running",
+ "Pending": "Recovering",
+ "Running": "Recovering",
"Success": "Recovered",
"Failure": "Fatal",
+ "Delivery Failure": "Fatal",
}[job.status]
site_update = frappe.get_all(
"Site Update",
@@ -414,8 +1127,11 @@ def process_update_site_recover_job_update(job):
frappe.db.set_value("Site", job.site, "bench", site_update.source_bench)
frappe.db.set_value("Site", job.site, "group", site_update.group)
- frappe.db.set_value("Site Update", site_update.name, "status", updated_status)
- if updated_status == "Recovered":
+ update_status(site_update.name, updated_status)
+
+ if updated_status == "Recovering":
+ frappe.db.set_value("Site", job.site, "status", "Recovering")
+ elif updated_status == "Recovered":
frappe.get_doc("Site", job.site).reset_previous_status()
elif updated_status == "Fatal":
frappe.db.set_value("Site", job.site, "status", "Broken")
@@ -431,3 +1147,67 @@ def mark_stuck_updates_as_fatal():
"status",
"Fatal",
)
+
+
+def run_scheduled_updates():
+ updates = frappe.get_all(
+ "Site Update",
+ {"scheduled_time": ("<=", frappe.utils.now()), "status": "Scheduled"},
+ pluck="name",
+ )
+
+ for update in updates:
+ try:
+ site_update: SiteUpdate = frappe.get_doc("Site Update", update, for_update=True)
+ if site_update.status != "Scheduled":
+ continue
+
+ site_update.validate()
+ site_update.start()
+ frappe.db.commit()
+ except Exception:
+ log_error("Scheduled Site Update Error", update=update)
+ frappe.db.rollback()
+
+
+def on_doctype_update():
+ frappe.db.add_index("Site Update", ["site", "source_candidate", "destination_candidate"])
+ frappe.db.add_index("Site Update", ["server", "status"])
+
+
+def process_callback_from_logical_replication_backup(backup: "LogicalReplicationBackup"): # noqa: C901
+ site_update_name = frappe.db.exists("Site Update", {"logical_replication_backup": backup.name})
+ if not site_update_name:
+ return
+ site_update: "SiteUpdate" = frappe.get_doc("Site Update", site_update_name)
+ """
+ Site Update Statuses:
+
+ Pending > Stage before migration
+ During transitioning to Pending state, we create the logical replication backup record
+ Running > Steps during actual site migration
+ Success > Migration Done ! Run Post Migration Steps
+ Failure > Migration Failed ! Run Failover
+ """
+ if site_update.status == "Pending":
+ if backup.pre_migrate_stage_status == "Success":
+ site_update.create_update_site_agent_request()
+ elif backup.pre_migrate_stage_status == "Failure":
+ site_update.activate_site(backup_failed=True)
+
+ elif site_update.status == "Running":
+ # Site update is in Running stage because
+ # After successful migration we will set `Site Update` status to `Running`
+ # And trigger post_migration
+
+ # Irrespective of post_migrate stage status,
+ # Site Update should be marked as Success
+ if backup.post_migrate_stage_status in ["Success", "Failure"]:
+ site_update.activate_site()
+
+ elif site_update.status == "Recovering":
+ if backup.failover_stage_status == "Success":
+ site_update.trigger_recovery_job()
+ elif backup.failover_stage_status == "Failure":
+ update_status(site_update.name, "Fatal")
+ frappe.db.set_value("Site", backup.site, "status", "Broken")
diff --git a/press/press/doctype/site_update/test_site_update.py b/press/press/doctype/site_update/test_site_update.py
index 76af259ab0d..bb414431f52 100644
--- a/press/press/doctype/site_update/test_site_update.py
+++ b/press/press/doctype/site_update/test_site_update.py
@@ -1,31 +1,42 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
import json
+from unittest.mock import MagicMock, Mock, patch
+
import frappe
from frappe.tests.utils import FrappeTestCase
-from press.press.doctype.agent_job.agent_job import AgentJob
+
+from press.press.doctype.agent_job.agent_job import AgentJob, poll_pending_jobs
+from press.press.doctype.agent_job.test_agent_job import fake_agent_job
from press.press.doctype.app.test_app import create_test_app
from press.press.doctype.app_release.test_app_release import create_test_app_release
from press.press.doctype.app_source.test_app_source import create_test_app_source
-from press.press.doctype.deploy.deploy import create_deploy_candidate_differences
+from press.press.doctype.deploy_candidate_difference.test_deploy_candidate_difference import (
+ create_test_deploy_candidate_differences,
+)
from press.press.doctype.release_group.test_release_group import (
create_test_release_group,
)
-
from press.press.doctype.site.test_site import create_test_bench, create_test_site
+from press.press.doctype.site_plan.test_site_plan import create_test_plan
+from press.press.doctype.site_update.site_update import SiteUpdate
+from press.press.doctype.subscription.test_subscription import create_test_subscription
+
-from unittest.mock import patch, Mock
+@patch.object(SiteUpdate, "start", new=Mock())
+def create_test_site_update(site: str, destination_group: str, status: str):
+ return frappe.get_doc(
+ dict(doctype="Site Update", site=site, destination_group=destination_group, status=status)
+ ).insert(ignore_if_duplicate=True)
-@patch("press.press.doctype.deploy.deploy.frappe.db.commit", new=Mock())
-@patch.object(AgentJob, "enqueue_http_request", new=Mock())
class TestSiteUpdate(FrappeTestCase):
def tearDown(self):
frappe.db.rollback()
+ @patch.object(AgentJob, "enqueue_http_request", new=Mock())
def test_update_of_v12_site_skips_search_index(self):
version = "Version 12"
app = create_test_app()
@@ -40,16 +51,15 @@ def test_update_of_v12_site_skips_search_index(self):
bench2 = create_test_bench(group=group, server=bench1.server)
self.assertNotEqual(bench1, bench2)
- create_deploy_candidate_differences(bench2) # for site update to be available
+ create_test_deploy_candidate_differences(bench2.candidate) # for site update to be available
site = create_test_site(bench=bench1.name)
site.schedule_update()
agent_job = frappe.get_last_doc("Agent Job", dict(job_type=("like", "Update Site %")))
- self.assertLess(
- dict(skip_search_index=False).items(), json.loads(agent_job.request_data).items()
- )
+ self.assertLess(dict(skip_search_index=False).items(), json.loads(agent_job.request_data).items())
+ @patch.object(AgentJob, "enqueue_http_request", new=Mock())
def test_update_of_non_v12_site_doesnt_skip_search_index(self):
version = "Version 13"
app = create_test_app()
@@ -64,16 +74,15 @@ def test_update_of_non_v12_site_doesnt_skip_search_index(self):
bench2 = create_test_bench(group=group, server=bench1.server)
self.assertNotEqual(bench1, bench2)
- create_deploy_candidate_differences(bench2) # for site update to be available
+ create_test_deploy_candidate_differences(bench2.candidate) # for site update to be available
site = create_test_site(bench=bench1.name)
site.schedule_update()
agent_job = frappe.get_last_doc("Agent Job", dict(job_type=("like", "Update Site %")))
- self.assertLess(
- dict(skip_search_index=True).items(), json.loads(agent_job.request_data).items()
- )
+ self.assertLess(dict(skip_search_index=True).items(), json.loads(agent_job.request_data).items())
+ @patch.object(AgentJob, "enqueue_http_request", new=Mock())
def test_site_update_throws_when_destination_doesnt_have_all_the_apps_in_the_site(
self,
):
@@ -89,7 +98,7 @@ def test_site_update_throws_when_destination_doesnt_have_all_the_apps_in_the_sit
bench2.apps.pop()
bench2.save()
- create_deploy_candidate_differences(bench2) # for site update to be available
+ create_test_deploy_candidate_differences(bench2.candidate) # for site update to be available
site = create_test_site(bench=bench1.name)
@@ -98,3 +107,52 @@ def test_site_update_throws_when_destination_doesnt_have_all_the_apps_in_the_sit
f".*apps installed on {site.name}: app., app.$",
site.schedule_update,
)
+
+ @patch("press.press.doctype.server.server.frappe.db.commit", new=MagicMock)
+ def test_site_update_callback_reallocates_workers_after_disable_maintenance_mode_job(
+ self,
+ ):
+ app1 = create_test_app() # frappe
+ app2 = create_test_app("app2", "App 2")
+ app3 = create_test_app("app3", "App 3")
+
+ group = create_test_release_group([app1, app2, app3])
+ bench1 = create_test_bench(group=group)
+ bench2 = create_test_bench(group=group, server=bench1.server)
+
+ create_test_deploy_candidate_differences(bench2.candidate) # for site update to be available
+
+ site = create_test_site(bench=bench1.name)
+ plan = create_test_plan(site.doctype, cpu_time=8)
+ create_test_subscription(site.name, plan.name, site.team)
+ site.reload()
+
+ server = frappe.get_doc("Server", bench1.server)
+ server.disable_agent_job_auto_retry = True
+ server.save()
+ server.auto_scale_workers()
+ bench1.reload()
+ bench2.reload()
+ self.assertEqual(site.bench, bench1.name)
+ self.assertGreater(bench1.gunicorn_workers, 2)
+ self.assertGreater(bench1.background_workers, 1)
+ self.assertEqual(bench2.gunicorn_workers, 2)
+ self.assertEqual(bench2.background_workers, 1)
+
+ with fake_agent_job(
+ "Update Site Pull",
+ "Success",
+ steps=[{"name": "Disable Maintenance Mode", "status": "Success"}],
+ ):
+ site.schedule_update()
+ poll_pending_jobs()
+
+ bench1.reload()
+ bench2.reload()
+ site.reload()
+
+ self.assertEqual(site.bench, bench2.name)
+ self.assertEqual(bench1.gunicorn_workers, 2)
+ self.assertEqual(bench1.background_workers, 1)
+ self.assertGreater(bench2.gunicorn_workers, 2)
+ self.assertGreater(bench2.background_workers, 1)
diff --git a/press/press/doctype/site_usage/site_usage.json b/press/press/doctype/site_usage/site_usage.json
index 5d1ba9e79cb..354b384d014 100644
--- a/press/press/doctype/site_usage/site_usage.json
+++ b/press/press/doctype/site_usage/site_usage.json
@@ -9,7 +9,9 @@
"database",
"backups",
"public",
- "private"
+ "private",
+ "database_free_tables",
+ "database_free"
],
"fields": [
{
@@ -45,10 +47,22 @@
"fieldtype": "Int",
"label": "Backups",
"read_only": 1
+ },
+ {
+ "fieldname": "database_free_tables",
+ "fieldtype": "Code",
+ "label": "Database Free Tables",
+ "read_only": 1
+ },
+ {
+ "fieldname": "database_free",
+ "fieldtype": "Int",
+ "label": "Database Free",
+ "read_only": 1
}
],
"links": [],
- "modified": "2020-11-11 12:50:10.146277",
+ "modified": "2023-10-19 14:31:13.374011",
"modified_by": "Administrator",
"module": "Press",
"name": "Site Usage",
@@ -70,5 +84,6 @@
"quick_entry": 1,
"sort_field": "modified",
"sort_order": "DESC",
+ "states": [],
"title_field": "site"
}
\ No newline at end of file
diff --git a/press/press/doctype/site_usage/site_usage.py b/press/press/doctype/site_usage/site_usage.py
index 27a3692704c..01680c75967 100644
--- a/press/press/doctype/site_usage/site_usage.py
+++ b/press/press/doctype/site_usage/site_usage.py
@@ -10,6 +10,23 @@
class SiteUsage(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ backups: DF.Int
+ database: DF.Int
+ database_free: DF.Int
+ database_free_tables: DF.Code | None
+ private: DF.Int
+ public: DF.Int
+ site: DF.Link | None
+ # end: auto-generated types
+
@staticmethod
def clear_old_logs(days=60):
table = frappe.qb.DocType("Site Usage")
diff --git a/press/press/doctype/site_usage/test_site_usage.py b/press/press/doctype/site_usage/test_site_usage.py
index eaa4fdfe450..af7c2992529 100644
--- a/press/press/doctype/site_usage/test_site_usage.py
+++ b/press/press/doctype/site_usage/test_site_usage.py
@@ -1,11 +1,10 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestSiteUsage(unittest.TestCase):
+class TestSiteUsage(FrappeTestCase):
pass
diff --git a/press/press/doctype/site_user/__init__.py b/press/press/doctype/site_user/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/site_user/site_user.js b/press/press/doctype/site_user/site_user.js
new file mode 100644
index 00000000000..df159386e8e
--- /dev/null
+++ b/press/press/doctype/site_user/site_user.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Site User", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/site_user/site_user.json b/press/press/doctype/site_user/site_user.json
new file mode 100644
index 00000000000..13fc8fd5fb5
--- /dev/null
+++ b/press/press/doctype/site_user/site_user.json
@@ -0,0 +1,59 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-01-16 15:02:28.939437",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "user",
+ "site",
+ "enabled"
+ ],
+ "fields": [
+ {
+ "fieldname": "user",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "User",
+ "reqd": 1
+ },
+ {
+ "fieldname": "site",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Site",
+ "options": "Site",
+ "reqd": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "enabled",
+ "fieldtype": "Check",
+ "label": "Enabled"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-02-19 16:35:29.861290",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Site User",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/site_user/site_user.py b/press/press/doctype/site_user/site_user.py
new file mode 100644
index 00000000000..95587425a0f
--- /dev/null
+++ b/press/press/doctype/site_user/site_user.py
@@ -0,0 +1,47 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+import frappe
+from frappe.model.document import Document
+
+
+class SiteUser(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ enabled: DF.Check
+ site: DF.Link
+ user: DF.Data
+ # end: auto-generated types
+
+ def login_to_site(self):
+ """Login to the site."""
+ if not self.enabled:
+ frappe.throw("User is disabled")
+
+ site = frappe.get_doc("Site", self.site)
+ return site.login_as_user(self.user)
+
+
+def create_user_for_product_site(site, data):
+ analytics = data["analytics"]
+ users_data = analytics.get("users", [])
+ for user_data in users_data:
+ user_mail = user_data.get("email")
+ enabled = user_data.get("enabled")
+ if frappe.db.exists("Site User", {"site": site, "user": user_mail}):
+ user = frappe.db.get_value(
+ "Site User", {"site": site, "user": user_mail}, ["name", "enabled"], as_dict=True
+ )
+ if user.enabled != enabled:
+ frappe.db.set_value("Site User", user.name, "enabled", enabled)
+ else:
+ user = frappe.get_doc(
+ {"doctype": "Site User", "site": site, "user": user_mail, "enabled": enabled}
+ )
+ user.insert(ignore_permissions=True)
diff --git a/press/press/doctype/site_user/test_site_user.py b/press/press/doctype/site_user/test_site_user.py
new file mode 100644
index 00000000000..ea5307a2c02
--- /dev/null
+++ b/press/press/doctype/site_user/test_site_user.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class IntegrationTestSiteUser(FrappeTestCase):
+ """
+ Integration tests for SiteUser.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/site_user_session/__init__.py b/press/press/doctype/site_user_session/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/site_user_session/site_user_session.js b/press/press/doctype/site_user_session/site_user_session.js
new file mode 100644
index 00000000000..6d7a0cf1428
--- /dev/null
+++ b/press/press/doctype/site_user_session/site_user_session.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Site User Session", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/site_user_session/site_user_session.json b/press/press/doctype/site_user_session/site_user_session.json
new file mode 100644
index 00000000000..615fc8da2a9
--- /dev/null
+++ b/press/press/doctype/site_user_session/site_user_session.json
@@ -0,0 +1,66 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-01-26 21:54:39.279966",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "user",
+ "session_id",
+ "otp",
+ "verified",
+ "otp_generated_at"
+ ],
+ "fields": [
+ {
+ "fieldname": "user",
+ "fieldtype": "Data",
+ "label": "User"
+ },
+ {
+ "fieldname": "otp",
+ "fieldtype": "Data",
+ "label": "OTP"
+ },
+ {
+ "fieldname": "session_id",
+ "fieldtype": "Data",
+ "label": "Session ID"
+ },
+ {
+ "default": "0",
+ "fieldname": "verified",
+ "fieldtype": "Check",
+ "label": "Verified"
+ },
+ {
+ "fieldname": "otp_generated_at",
+ "fieldtype": "Datetime",
+ "label": "OTP Generated at"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-02-17 12:19:46.639929",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Site User Session",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/site_user_session/site_user_session.py b/press/press/doctype/site_user_session/site_user_session.py
new file mode 100644
index 00000000000..16c825d2aec
--- /dev/null
+++ b/press/press/doctype/site_user_session/site_user_session.py
@@ -0,0 +1,61 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+import frappe
+from frappe.model.document import Document
+
+
+class SiteUserSession(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ otp: DF.Data | None
+ otp_generated_at: DF.Datetime | None
+ session_id: DF.Data | None
+ user: DF.Data | None
+ verified: DF.Check
+ # end: auto-generated types
+
+ def send_otp(self):
+ """Send OTP to the user for site login."""
+
+ from press.utils.otp import generate_otp
+
+ self.otp = generate_otp()
+ self.session_id = frappe.generate_hash()
+ self.otp_generated_at = frappe.utils.now_datetime()
+ if frappe.conf.developer_mode and frappe.local.dev_server:
+ self.otp = 111111
+ self.save()
+
+ if frappe.conf.developer_mode:
+ print(f"\nOTP for {self.user} for site login:")
+ print()
+ print(self.otp)
+ print()
+ return
+
+ subject = f"{self.otp} - OTP for Frappe Cloud Site Login"
+ args = {}
+
+ args.update(
+ {
+ "otp": self.otp,
+ "image_path": "https://github.com/frappe/gameplan/assets/9355208/447035d0-0686-41d2-910a-a3d21928ab94",
+ }
+ )
+
+ frappe.sendmail(
+ recipients=self.user,
+ subject=subject,
+ template="verify_account_for_site_login",
+ args=args,
+ now=True,
+ )
diff --git a/press/press/doctype/site_user_session/test_site_user_session.py b/press/press/doctype/site_user_session/test_site_user_session.py
new file mode 100644
index 00000000000..9b29248ac7e
--- /dev/null
+++ b/press/press/doctype/site_user_session/test_site_user_session.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class IntegrationTestSiteUserSession(FrappeTestCase):
+ """
+ Integration tests for SiteUserSession.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/sql_playground_log/__init__.py b/press/press/doctype/sql_playground_log/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/sql_playground_log/sql_playground_log.js b/press/press/doctype/sql_playground_log/sql_playground_log.js
new file mode 100644
index 00000000000..ad5c2dc597b
--- /dev/null
+++ b/press/press/doctype/sql_playground_log/sql_playground_log.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("SQL Playground Log", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/sql_playground_log/sql_playground_log.json b/press/press/doctype/sql_playground_log/sql_playground_log.json
new file mode 100644
index 00000000000..16f7f78336c
--- /dev/null
+++ b/press/press/doctype/sql_playground_log/sql_playground_log.json
@@ -0,0 +1,100 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-10-16 16:18:15.770106",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "site",
+ "column_break_mgop",
+ "team",
+ "section_break_fnmu",
+ "query",
+ "committed"
+ ],
+ "fields": [
+ {
+ "fieldname": "site",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Site",
+ "options": "Site",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_mgop",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "label": "Team",
+ "options": "Team",
+ "read_only": 1
+ },
+ {
+ "fieldname": "section_break_fnmu",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "query",
+ "fieldtype": "Small Text",
+ "label": "Query",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "committed",
+ "fieldtype": "Check",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Committed",
+ "read_only": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2024-10-24 12:18:15.580077",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "SQL Playground Log",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1
+ }
+ ],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/sql_playground_log/sql_playground_log.py b/press/press/doctype/sql_playground_log/sql_playground_log.py
new file mode 100644
index 00000000000..146323c1565
--- /dev/null
+++ b/press/press/doctype/sql_playground_log/sql_playground_log.py
@@ -0,0 +1,33 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+from frappe.model.document import Document
+
+from press.overrides import get_permission_query_conditions_for_doctype
+
+
+class SQLPlaygroundLog(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ committed: DF.Check
+ query: DF.SmallText | None
+ site: DF.Link | None
+ team: DF.Link | None
+ # end: auto-generated types
+
+ DOCTYPE = "SQL Playground Log"
+ dashboard_fields = (
+ "site",
+ "query",
+ "committed",
+ )
+
+
+get_permission_query_conditions = get_permission_query_conditions_for_doctype("SQL Playground Log")
diff --git a/press/press/doctype/sql_playground_log/test_sql_playground_log.py b/press/press/doctype/sql_playground_log/test_sql_playground_log.py
new file mode 100644
index 00000000000..8649ad82ce0
--- /dev/null
+++ b/press/press/doctype/sql_playground_log/test_sql_playground_log.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests import UnitTestCase
+
+
+class TestSQLPlaygroundLog(UnitTestCase):
+ """
+ Unit tests for SQLPlaygroundLog.
+ Use this class for testing individual functions and methods.
+ """
+
+ pass
diff --git a/press/press/doctype/ssh_certificate/ssh_certificate.py b/press/press/doctype/ssh_certificate/ssh_certificate.py
index 8723262d87e..c70079d54bb 100644
--- a/press/press/doctype/ssh_certificate/ssh_certificate.py
+++ b/press/press/doctype/ssh_certificate/ssh_certificate.py
@@ -8,12 +8,44 @@
import re
import shlex
import subprocess
+from typing import TYPE_CHECKING
import frappe
from frappe.model.document import Document
+from press.utils import log_error
+
+if TYPE_CHECKING:
+ from press.press.doctype.ssh_certificate_authority.ssh_certificate_authority import (
+ SSHCertificateAuthority,
+ )
+
class SSHCertificate(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ certificate_details: DF.Code | None
+ certificate_type: DF.Literal["User", "Host"]
+ group: DF.Link | None
+ key_type: DF.Data | None
+ serial_number: DF.Int
+ ssh_certificate: DF.Code | None
+ ssh_certificate_authority: DF.Link | None
+ ssh_certificate_authority_public_key: DF.Code | None
+ ssh_fingerprint: DF.Data | None
+ ssh_public_key: DF.Code
+ user: DF.Link | None
+ user_ssh_key: DF.Link | None
+ valid_until: DF.Datetime | None
+ validity: DF.Literal["1h", "3h", "6h", "30d"]
+ # end: auto-generated types
+
def validate(self):
self.validate_public_key()
self.validate_existing_certificates()
@@ -52,7 +84,11 @@ def validate_certificate_authority(self):
def validate_existing_certificates(self):
if frappe.get_all(
"SSH Certificate",
- {"user": self.user, "valid_until": [">", frappe.utils.now()], "group": self.group},
+ {
+ "user_ssh_key": self.user_ssh_key,
+ "valid_until": [">", frappe.utils.now()],
+ "group": self.group,
+ },
):
frappe.throw("A valid certificate already exists.")
@@ -69,7 +105,9 @@ def after_insert(self):
self.save()
def generate_certificate(self):
- ca = frappe.get_doc("SSH Certificate Authority", self.ssh_certificate_authority)
+ ca: "SSHCertificateAuthority" = frappe.get_doc(
+ "SSH Certificate Authority", self.ssh_certificate_authority
+ )
ca.sign(
self.user,
[self.group],
@@ -80,7 +118,13 @@ def generate_certificate(self):
self.read_certificate()
def run(self, command):
- return subprocess.check_output(shlex.split(command)).decode()
+ try:
+ return subprocess.check_output(
+ shlex.split(command), stderr=subprocess.STDOUT
+ ).decode()
+ except subprocess.CalledProcessError as e:
+ log_error("Command failed", output={e.output.decode()}, doc=self)
+ raise
def extract_certificate_details(self):
self.certificate_details = self.run(f"ssh-keygen -Lf {self.certificate_file}")
diff --git a/press/press/doctype/ssh_certificate/test_ssh_certificate.py b/press/press/doctype/ssh_certificate/test_ssh_certificate.py
index cf88009dea1..51412491e7f 100644
--- a/press/press/doctype/ssh_certificate/test_ssh_certificate.py
+++ b/press/press/doctype/ssh_certificate/test_ssh_certificate.py
@@ -1,10 +1,10 @@
# Copyright (c) 2023, Frappe and Contributors
# See license.txt
-import unittest
from unittest.mock import Mock, patch
import frappe
+from frappe.tests.utils import FrappeTestCase
from press.press.doctype.agent_job.agent_job import AgentJob
from press.press.doctype.site.test_site import create_test_bench
@@ -17,8 +17,10 @@
@patch.object(SSHCertificate, "generate_certificate", new=Mock())
@patch.object(SSHCertificate, "extract_certificate_details", new=Mock())
@patch.object(AgentJob, "enqueue_http_request", new=Mock())
-class TestSSHCertificate(unittest.TestCase):
+class TestSSHCertificate(FrappeTestCase):
def setUp(self):
+ super().setUp()
+
self.team = create_test_press_admin_team()
self.user = self.team.user
@@ -37,7 +39,7 @@ def test_press_admin_user_can_create_ssh_certificate(self):
"certificate_type": "User",
"group": group,
"user": frappe.session.user,
- "user_ssh_key": user_ssh_key,
+ "user_ssh_key": user_ssh_key.name,
"validity": "6h",
}
).insert()
diff --git a/press/press/doctype/ssh_certificate_authority/ssh_certificate_authority.py b/press/press/doctype/ssh_certificate_authority/ssh_certificate_authority.py
index ab4273d9605..da350228785 100644
--- a/press/press/doctype/ssh_certificate_authority/ssh_certificate_authority.py
+++ b/press/press/doctype/ssh_certificate_authority/ssh_certificate_authority.py
@@ -1,19 +1,36 @@
# Copyright (c) 2022, Frappe and contributors
# For license information, please see license.txt
-import frappe
-
import os
import shlex
import shutil
import subprocess
-import docker
+import docker
+import frappe
from frappe.model.document import Document
from frappe.utils import cint
+from press.utils import log_error
+
class SSHCertificateAuthority(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ directory: DF.Data
+ docker_image: DF.Data | None
+ docker_image_repository: DF.Data | None
+ docker_image_tag: DF.Int
+ key_fingerprint: DF.Code | None
+ public_key: DF.Code | None
+ # end: auto-generated types
+
def after_insert(self):
self.setup_directory()
self.generate_key_pair()
@@ -22,18 +39,27 @@ def after_insert(self):
self.save()
def setup_directory(self):
- os.mkdir(self.directory)
+ if not os.path.exists(self.directory):
+ os.mkdir(self.directory)
def run(self, command, directory, environment=None):
- return subprocess.check_output(
- shlex.split(command), cwd=directory, env=environment
- ).decode()
+ try:
+ return subprocess.check_output(
+ shlex.split(command), cwd=directory, env=environment, stderr=subprocess.STDOUT
+ ).decode()
+ except subprocess.CalledProcessError as e:
+ log_error("Command failed", output={e.output.decode()}, doc=self)
+ raise
def generate_key_pair(self):
- domain = frappe.db.get_value("Press Settings", None, "domain")
- self.run(
- f"ssh-keygen -C ca@{domain} -t rsa -b 4096 -f ca -N ''", directory=self.directory
- )
+ if not os.path.exists(self.private_key_file) and not os.path.exists(
+ self.public_key_file
+ ):
+ domain = frappe.db.get_value("Press Settings", None, "domain")
+ self.run(
+ f"ssh-keygen -C ca@{domain} -t rsa -b 4096 -f ca -N ''", directory=self.directory
+ )
+
os.chmod(self.public_key_file, 0o400)
os.chmod(self.private_key_file, 0o400)
diff --git a/press/press/doctype/ssh_certificate_authority/test_ssh_certificate_authority.py b/press/press/doctype/ssh_certificate_authority/test_ssh_certificate_authority.py
index b15f2e0bac5..ba34ec991f5 100644
--- a/press/press/doctype/ssh_certificate_authority/test_ssh_certificate_authority.py
+++ b/press/press/doctype/ssh_certificate_authority/test_ssh_certificate_authority.py
@@ -2,8 +2,8 @@
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestSSHCertificateAuthority(unittest.TestCase):
+class TestSSHCertificateAuthority(FrappeTestCase):
pass
diff --git a/press/press/doctype/ssh_key/ssh_key.json b/press/press/doctype/ssh_key/ssh_key.json
index 4889520c51b..94c80268447 100644
--- a/press/press/doctype/ssh_key/ssh_key.json
+++ b/press/press/doctype/ssh_key/ssh_key.json
@@ -7,6 +7,8 @@
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
+ "enabled",
+ "default",
"public_key"
],
"fields": [
@@ -17,11 +19,23 @@
"label": "Public Key",
"reqd": 1,
"set_only_once": 1
+ },
+ {
+ "default": "1",
+ "fieldname": "enabled",
+ "fieldtype": "Check",
+ "label": "Enabled"
+ },
+ {
+ "default": "0",
+ "fieldname": "default",
+ "fieldtype": "Check",
+ "label": "Default"
}
],
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2021-09-08 12:43:09.290608",
+ "modified": "2024-02-05 15:49:59.460166",
"modified_by": "Administrator",
"module": "Press",
"name": "SSH Key",
@@ -43,5 +57,6 @@
],
"sort_field": "modified",
"sort_order": "DESC",
+ "states": [],
"track_changes": 1
}
\ No newline at end of file
diff --git a/press/press/doctype/ssh_key/ssh_key.py b/press/press/doctype/ssh_key/ssh_key.py
index 4706117b5ff..1afe7554887 100644
--- a/press/press/doctype/ssh_key/ssh_key.py
+++ b/press/press/doctype/ssh_key/ssh_key.py
@@ -6,4 +6,17 @@
class SSHKey(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ default: DF.Check
+ enabled: DF.Check
+ public_key: DF.Text
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/ssh_key/test_ssh_key.py b/press/press/doctype/ssh_key/test_ssh_key.py
index 3f7c126c09a..db74d5f7c04 100644
--- a/press/press/doctype/ssh_key/test_ssh_key.py
+++ b/press/press/doctype/ssh_key/test_ssh_key.py
@@ -2,7 +2,7 @@
# See license.txt
import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
from press.press.doctype.ssh_key.ssh_key import SSHKey
@@ -17,5 +17,5 @@ def create_test_ssh_key() -> SSHKey:
).insert(ignore_if_duplicate=True)
-class TestSSHKey(unittest.TestCase):
+class TestSSHKey(FrappeTestCase):
pass
diff --git a/press/press/doctype/storage_integration_bucket/storage_integration_bucket.py b/press/press/doctype/storage_integration_bucket/storage_integration_bucket.py
index 82095d507d9..20b1dd4e586 100644
--- a/press/press/doctype/storage_integration_bucket/storage_integration_bucket.py
+++ b/press/press/doctype/storage_integration_bucket/storage_integration_bucket.py
@@ -6,4 +6,18 @@
class StorageIntegrationBucket(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ bucket_name: DF.Data | None
+ minio_host_ip: DF.Data | None
+ minio_server_on: DF.Link | None
+ region: DF.Data | None
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/storage_integration_bucket/test_storage_integration_bucket.py b/press/press/doctype/storage_integration_bucket/test_storage_integration_bucket.py
index 4c5a929b29d..714b85479f3 100644
--- a/press/press/doctype/storage_integration_bucket/test_storage_integration_bucket.py
+++ b/press/press/doctype/storage_integration_bucket/test_storage_integration_bucket.py
@@ -2,8 +2,8 @@
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestStorageIntegrationBucket(unittest.TestCase):
+class TestStorageIntegrationBucket(FrappeTestCase):
pass
diff --git a/press/press/doctype/storage_integration_subscription/storage_integration_subscription.py b/press/press/doctype/storage_integration_subscription/storage_integration_subscription.py
index 19968f02a51..b4574c0006a 100644
--- a/press/press/doctype/storage_integration_subscription/storage_integration_subscription.py
+++ b/press/press/doctype/storage_integration_subscription/storage_integration_subscription.py
@@ -2,15 +2,38 @@
# For license information, please see license.txt
import json
+import math
+
import boto3
import frappe
-import math
+from frappe.model.document import Document
from frappe.utils.password import get_decrypted_password
+
from press.agent import Agent
-from frappe.model.document import Document
+from press.api.site import protected
class StorageIntegrationSubscription(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ access_key: DF.Data | None
+ enabled: DF.Check
+ limit: DF.Data | None
+ minio_server_on: DF.Data | None
+ policy_json: DF.Code | None
+ policy_name: DF.Data | None
+ secret_key: DF.Data | None
+ site: DF.Link | None
+ team: DF.Data | None
+ usage: DF.Data | None
+ # end: auto-generated types
+
SERVER_TYPE = "Proxy Server"
def after_insert(self):
@@ -31,9 +54,7 @@ def set_secret_key(self):
self.secret_key = frappe.generate_hash(length=40)
def set_policy_json(self):
- bucket_name = frappe.db.get_value(
- "Storage Integration Bucket", self.minio_server_on, "bucket_name"
- )
+ bucket_name = frappe.db.get_value("Storage Integration Bucket", self.minio_server_on, "bucket_name")
data = {
"Version": "2012-10-17",
"Statement": [
@@ -96,15 +117,13 @@ def create_after_insert(doc, method):
return
if doc.app == "storage_integration":
- sub_exists = frappe.db.exists(
- {"doctype": "Storage Integration Subscription", "site": doc.site}
- )
+ sub_exists = frappe.db.exists({"doctype": "Storage Integration Subscription", "site": doc.site})
if sub_exists:
return
- frappe.get_doc(
- {"doctype": "Storage Integration Subscription", "site": doc.site}
- ).insert(ignore_permissions=True)
+ frappe.get_doc({"doctype": "Storage Integration Subscription", "site": doc.site}).insert(
+ ignore_permissions=True
+ )
if doc.app == "email_delivery_service":
# TODO: add a separate doctype to track email service setup completion
@@ -121,9 +140,7 @@ def monitor_storage():
"Storage Integration Subscription", fields=["site", "name"], filters={"enabled": 1}
)
access_key = frappe.db.get_value("Add On Settings", None, "aws_access_key")
- secret_key = get_decrypted_password(
- "Add On Settings", "Add On Settings", "aws_secret_key"
- )
+ secret_key = get_decrypted_password("Add On Settings", "Add On Settings", "aws_secret_key")
for sub in active_subs:
usage, unit_u = get_size("bucket_name", sub["site"], access_key, secret_key)
@@ -147,9 +164,7 @@ def monitor_storage():
def get_size(bucket, path, access_key, secret_key):
- s3 = boto3.resource(
- "s3", aws_access_key_id=access_key, aws_secret_access_key=secret_key
- )
+ s3 = boto3.resource("s3", aws_access_key_id=access_key, aws_secret_access_key=secret_key)
my_bucket = s3.Bucket(bucket)
total_size = 0
@@ -162,13 +177,14 @@ def get_size(bucket, path, access_key, secret_key):
def convert_size(size_bytes):
if size_bytes == 0:
return 0, "B"
- i = int(math.floor(math.log(size_bytes, 1024)))
+ i = math.floor(math.log(size_bytes, 1024))
p = math.pow(1024, i)
s = round(size_bytes / p, 2)
return s, size_name[i]
@frappe.whitelist()
+@protected("Storage Integration Subscription")
def toggle_user_status(docname, status):
doc = frappe.get_doc("Storage Integration Subscription", docname)
status = int(status)
@@ -186,15 +202,13 @@ def get_analytics(**data):
from press.api.developer.marketplace import get_subscription_status
if get_subscription_status(data["secret_key"]) != "Active":
- return
+ return None
site, available = frappe.db.get_value(
"Storage Integration Subscription", data["access_key"], ["site", "limit"]
)
access_key = frappe.db.get_value("Add On Settings", None, "aws_access_key")
- secret_key = get_decrypted_password(
- "Add On Settings", "Add On Settings", "aws_secret_key"
- )
+ secret_key = get_decrypted_password("Add On Settings", "Add On Settings", "aws_secret_key")
used, unit_u = get_size(data["bucket"], site, access_key, secret_key)
return {"used": f"{used} {unit_u}", "available": available}
diff --git a/press/press/doctype/storage_integration_subscription/test_storage_integration_subscription.py b/press/press/doctype/storage_integration_subscription/test_storage_integration_subscription.py
index 12876850149..5700ef5fa1f 100644
--- a/press/press/doctype/storage_integration_subscription/test_storage_integration_subscription.py
+++ b/press/press/doctype/storage_integration_subscription/test_storage_integration_subscription.py
@@ -2,8 +2,8 @@
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestStorageIntegrationSubscription(unittest.TestCase):
+class TestStorageIntegrationSubscription(FrappeTestCase):
pass
diff --git a/press/press/doctype/stripe_micro_charge_record/stripe_micro_charge_record.py b/press/press/doctype/stripe_micro_charge_record/stripe_micro_charge_record.py
index d55ca0b4268..e6691350e64 100644
--- a/press/press/doctype/stripe_micro_charge_record/stripe_micro_charge_record.py
+++ b/press/press/doctype/stripe_micro_charge_record/stripe_micro_charge_record.py
@@ -2,12 +2,26 @@
# For license information, please see license.txt
import frappe
-
from frappe.model.document import Document
+
from press.utils.billing import get_stripe
class StripeMicroChargeRecord(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ has_been_refunded: DF.Check
+ stripe_payment_intent_id: DF.Data
+ stripe_payment_method: DF.Link | None
+ team: DF.Link | None
+ # end: auto-generated types
+
def after_insert(self):
# Auto-refund
self.refund()
diff --git a/press/press/doctype/stripe_payment_event/stripe_payment_event.py b/press/press/doctype/stripe_payment_event/stripe_payment_event.py
index d86db7c6500..81262e8c93b 100644
--- a/press/press/doctype/stripe_payment_event/stripe_payment_event.py
+++ b/press/press/doctype/stripe_payment_event/stripe_payment_event.py
@@ -1,14 +1,33 @@
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
-
-import frappe
+from __future__ import annotations
from datetime import datetime
+
+import frappe
from frappe.model.document import Document
+
+from press.api.billing import get_stripe
from press.utils.billing import convert_stripe_money
class StripePaymentEvent(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ event_type: DF.Literal["Finalized", "Failed", "Succeeded"]
+ invoice: DF.Link | None
+ payment_status: DF.Literal["Paid", "Unpaid"]
+ stripe_invoice_id: DF.Data | None
+ stripe_invoice_object: DF.Code | None
+ team: DF.Link | None
+ # end: auto-generated types
+
def after_insert(self):
if self.event_type == "Finalized":
self.handle_finalized()
@@ -19,6 +38,8 @@ def after_insert(self):
def handle_finalized(self):
invoice = frappe.get_doc("Invoice", self.invoice, for_update=True)
+ if invoice.status == "Paid":
+ return
stripe_invoice = frappe.parse_json(self.stripe_invoice_object)
invoice.update(
@@ -26,20 +47,38 @@ def handle_finalized(self):
"amount_paid": convert_stripe_money(stripe_invoice["amount_paid"]),
"stripe_invoice_url": stripe_invoice["hosted_invoice_url"],
"status": self.payment_status,
+ "stripe_payment_intent_id": stripe_invoice["payment_intent"],
}
)
invoice.save()
def handle_payment_succeeded(self):
invoice = frappe.get_doc("Invoice", self.invoice, for_update=True)
+
+ if invoice.status == "Paid" and invoice.amount_paid == 0:
+ # check if invoice is already refunded
+ stripe = get_stripe()
+ inv = stripe.Invoice.retrieve(invoice.stripe_invoice_id)
+ payment_intent = stripe.PaymentIntent.retrieve(inv.payment_intent)
+ is_refunded = payment_intent["charges"]["data"][0]["refunded"]
+ if is_refunded:
+ return
+ # if the fc invoice is already paid via credits and the stripe payment succeeded
+ # issue a refund of the invoice payment
+ invoice.refund(reason="Payment done via credits")
+ invoice.add_comment(
+ text=(
+ f"Stripe Invoice {invoice.stripe_invoice_id} refunded because"
+ " payment is done via credits and card both."
+ )
+ )
+ return
stripe_invoice = frappe.parse_json(self.stripe_invoice_object)
team = frappe.get_doc("Team", self.team)
invoice.update(
{
- "payment_date": datetime.fromtimestamp(
- stripe_invoice["status_transitions"]["paid_at"]
- ),
+ "payment_date": datetime.fromtimestamp(stripe_invoice["status_transitions"]["paid_at"]),
"status": "Paid",
"amount_paid": stripe_invoice["amount_paid"] / 100,
"stripe_invoice_url": stripe_invoice["hosted_invoice_url"],
@@ -54,16 +93,40 @@ def handle_payment_succeeded(self):
invoice.submit()
- if frappe.db.count("Invoice", {"team": team.name, "status": "Unpaid"}) < 2:
- # unsuspend sites only if all invoices are paid
- team.unsuspend_sites(
- reason=f"Unsuspending sites because of successful payment of {self.invoice}"
+ if (
+ frappe.db.count(
+ "Invoice",
+ {
+ "team": team.name,
+ "status": "Unpaid",
+ "type": "Subscription",
+ "docstatus": ("<", 2),
+ },
)
+ == 0
+ ):
+ # unsuspend sites only if all invoices are paid
+ team.unsuspend_sites(reason=f"Unsuspending sites because of successful payment of {self.invoice}")
def handle_payment_failed(self):
invoice = frappe.get_doc("Invoice", self.invoice, for_update=True)
if invoice.status == "Paid":
+ if invoice.amount_paid == 0:
+ # check if invoice is already voided
+ stripe = get_stripe()
+ inv = stripe.Invoice.retrieve(invoice.stripe_invoice_id)
+ if inv.status == "void":
+ return
+ # if the fc invoice is already paid via credits and the stripe payment failed
+ # mark the stripe invoice as void
+ invoice.change_stripe_invoice_status("Void")
+ invoice.add_comment(
+ text=(
+ f"Stripe Invoice {invoice.stripe_invoice_id} voided because"
+ " payment is done via credits."
+ )
+ )
return
stripe_invoice = frappe.parse_json(self.stripe_invoice_object)
diff --git a/press/press/doctype/stripe_payment_event/test_stripe_payment_event.py b/press/press/doctype/stripe_payment_event/test_stripe_payment_event.py
index 9c49c9e0067..9d0f3eb312a 100644
--- a/press/press/doctype/stripe_payment_event/test_stripe_payment_event.py
+++ b/press/press/doctype/stripe_payment_event/test_stripe_payment_event.py
@@ -2,8 +2,8 @@
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestStripePaymentEvent(unittest.TestCase):
+class TestStripePaymentEvent(FrappeTestCase):
pass
diff --git a/press/press/doctype/stripe_payment_method/stripe_payment_method.js b/press/press/doctype/stripe_payment_method/stripe_payment_method.js
index 1d5936855f0..f1f0eb45a78 100644
--- a/press/press/doctype/stripe_payment_method/stripe_payment_method.js
+++ b/press/press/doctype/stripe_payment_method/stripe_payment_method.js
@@ -9,5 +9,17 @@ frappe.ui.form.on('Stripe Payment Method', {
doctype: 'Stripe Payment Method',
};
frappe.contacts.render_address_and_contact(frm);
+
+ if (frm.doc.stripe_mandate_id) {
+ frm.add_custom_button('Check Mandate Status', () => {
+ frm.call('check_mandate_status').then((r) => {
+ if (r.message) {
+ frappe.msgprint(`Mandate status: ${r.message}`);
+ } else {
+ frappe.msgprint('No mandate found or status is not available.');
+ }
+ });
+ });
+ }
},
});
diff --git a/press/press/doctype/stripe_payment_method/stripe_payment_method.json b/press/press/doctype/stripe_payment_method/stripe_payment_method.json
index 10824036a89..af3a0e76c23 100644
--- a/press/press/doctype/stripe_payment_method/stripe_payment_method.json
+++ b/press/press/doctype/stripe_payment_method/stripe_payment_method.json
@@ -11,12 +11,16 @@
"last_4",
"expiry_month",
"expiry_year",
+ "brand",
"stripe_customer_id",
"stripe_payment_method_id",
"is_default",
"column_break_9",
"address_html",
- "is_verified_with_micro_charge"
+ "is_verified_with_micro_charge",
+ "stripe_setup_intent_id",
+ "stripe_mandate_id",
+ "stripe_mandate_reference"
],
"fields": [
{
@@ -84,11 +88,31 @@
"in_list_view": 1,
"label": "Verified with Micro Charge",
"read_only": 1
+ },
+ {
+ "fieldname": "brand",
+ "fieldtype": "Data",
+ "label": "Card Brand"
+ },
+ {
+ "fieldname": "stripe_mandate_id",
+ "fieldtype": "Data",
+ "label": "Stripe Mandate ID"
+ },
+ {
+ "fieldname": "stripe_setup_intent_id",
+ "fieldtype": "Data",
+ "label": "Stripe Setup Intent ID"
+ },
+ {
+ "fieldname": "stripe_mandate_reference",
+ "fieldtype": "Data",
+ "label": "Stripe Mandate Reference"
}
],
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2022-10-17 17:20:24.776635",
+ "modified": "2024-06-26 10:29:16.449579",
"modified_by": "Administrator",
"module": "Press",
"name": "Stripe Payment Method",
diff --git a/press/press/doctype/stripe_payment_method/stripe_payment_method.py b/press/press/doctype/stripe_payment_method/stripe_payment_method.py
index cd238ec6f39..24ab14704b7 100644
--- a/press/press/doctype/stripe_payment_method/stripe_payment_method.py
+++ b/press/press/doctype/stripe_payment_method/stripe_payment_method.py
@@ -1,19 +1,89 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
-
+from __future__ import annotations
import frappe
+from frappe.contacts.address_and_contact import load_address_and_contact
from frappe.model.document import Document
+
from press.api.billing import get_stripe
-from frappe.contacts.address_and_contact import load_address_and_contact
+from press.api.client import dashboard_whitelist
from press.overrides import get_permission_query_conditions_for_doctype
+from press.utils import log_error
+from press.utils.telemetry import capture
class StripePaymentMethod(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ brand: DF.Data | None
+ expiry_month: DF.Data | None
+ expiry_year: DF.Data | None
+ is_default: DF.Check
+ is_verified_with_micro_charge: DF.Check
+ last_4: DF.Data | None
+ name_on_card: DF.Data | None
+ stripe_customer_id: DF.Data | None
+ stripe_mandate_id: DF.Data | None
+ stripe_mandate_reference: DF.Data | None
+ stripe_payment_method_id: DF.Data | None
+ stripe_setup_intent_id: DF.Data | None
+ team: DF.Link
+ # end: auto-generated types
+
+ dashboard_fields = (
+ "is_default",
+ "expiry_month",
+ "expiry_year",
+ "brand",
+ "name_on_card",
+ "last_4",
+ "stripe_mandate_id",
+ )
+
def onload(self):
load_address_and_contact(self)
+ @staticmethod
+ def get_list_query(query, filters=None, **list_args):
+ StripeWebhookLog = frappe.qb.DocType("Stripe Webhook Log")
+ StripePaymentMethod = frappe.qb.DocType("Stripe Payment Method")
+
+ query = (
+ query.select(StripeWebhookLog.stripe_payment_method)
+ .left_join(StripeWebhookLog)
+ .on(
+ (StripeWebhookLog.stripe_payment_method == StripePaymentMethod.name)
+ & (StripeWebhookLog.event_type == "payment_intent.payment_failed")
+ )
+ .distinct()
+ )
+
+ return query # noqa: RET504
+
+ @dashboard_whitelist()
+ def delete(self):
+ if webhook_logs := frappe.get_all(
+ "Stripe Webhook Log",
+ filters={"stripe_payment_method": self.name},
+ pluck="name",
+ ):
+ frappe.db.set_value(
+ "Stripe Webhook Log",
+ {"name": ("in", webhook_logs)},
+ "stripe_payment_method",
+ None,
+ )
+
+ super().delete()
+
+ @dashboard_whitelist()
def set_default(self):
stripe = get_stripe()
# set default payment method on stripe
@@ -30,6 +100,13 @@ def set_default(self):
self.is_default = 1
self.save()
frappe.db.set_value("Team", self.team, "default_payment_method", self.name)
+ if not frappe.db.get_value("Team", self.team, "payment_mode"):
+ frappe.db.set_value("Team", self.team, "payment_mode", "Card")
+ account_request_name = frappe.get_value("Team", self.team, "account_request")
+ if account_request_name:
+ account_request = frappe.get_doc("Account Request", account_request_name)
+ if not (account_request.is_saas_signup() or account_request.invited_by_parent_team):
+ capture("added_card_or_prepaid_credits", "fc_signup", account_request.email)
def on_trash(self):
self.remove_address_links()
@@ -68,10 +145,24 @@ def remove_micro_charge_links(self):
)
def after_delete(self):
+ try:
+ stripe = get_stripe()
+ stripe.PaymentMethod.detach(self.stripe_payment_method_id)
+ except Exception as e:
+ log_error("Failed to detach payment method from stripe", data=e)
+
+ @frappe.whitelist()
+ def check_mandate_status(self):
+ if not self.stripe_mandate_id:
+ return False
+
stripe = get_stripe()
- stripe.PaymentMethod.detach(self.stripe_payment_method_id)
+ mandate = stripe.Mandate.retrieve(self.stripe_mandate_id)
+ return mandate.status
+
+
+get_permission_query_conditions = get_permission_query_conditions_for_doctype("Stripe Payment Method")
-get_permission_query_conditions = get_permission_query_conditions_for_doctype(
- "Stripe Payment Method"
-)
+def on_doctype_update():
+ frappe.db.add_index("Stripe Payment Method", ["team", "is_verified_with_micro_charge"])
diff --git a/press/press/doctype/stripe_payment_method/test_stripe_payment_method.py b/press/press/doctype/stripe_payment_method/test_stripe_payment_method.py
index b42ccd71cfc..2e48a54783b 100644
--- a/press/press/doctype/stripe_payment_method/test_stripe_payment_method.py
+++ b/press/press/doctype/stripe_payment_method/test_stripe_payment_method.py
@@ -1,11 +1,10 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestStripePaymentMethod(unittest.TestCase):
+class TestStripePaymentMethod(FrappeTestCase):
pass
diff --git a/press/press/doctype/stripe_webhook_log/patches/add_payment_method_for_failed_events.py b/press/press/doctype/stripe_webhook_log/patches/add_payment_method_for_failed_events.py
new file mode 100644
index 00000000000..cfceecbdf99
--- /dev/null
+++ b/press/press/doctype/stripe_webhook_log/patches/add_payment_method_for_failed_events.py
@@ -0,0 +1,37 @@
+# Copyright (c) 2024, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+
+import frappe
+
+
+def execute():
+ logs = frappe.get_all(
+ "Stripe Webhook Log",
+ {"event_type": "payment_intent.payment_failed"},
+ ["name", "payload", "customer_id"],
+ )
+
+ for log in logs:
+ payload = frappe.parse_json(log.payload)
+ if payment_method_id := (
+ payload.get("data", {})
+ .get("object", {})
+ .get("last_payment_error", {})
+ .get("payment_method", {})
+ .get("id")
+ ):
+ stripe_payment_method = frappe.db.get_value(
+ "Stripe Payment Method",
+ {
+ "stripe_customer_id": log.customer_id,
+ "stripe_payment_method_id": payment_method_id,
+ },
+ "name",
+ )
+ frappe.db.set_value(
+ "Stripe Webhook Log",
+ log.name,
+ "stripe_payment_method",
+ stripe_payment_method,
+ update_modified=False,
+ )
diff --git a/press/press/doctype/stripe_webhook_log/stripe_webhook_log.json b/press/press/doctype/stripe_webhook_log/stripe_webhook_log.json
index cd46bace427..bf601e5d834 100644
--- a/press/press/doctype/stripe_webhook_log/stripe_webhook_log.json
+++ b/press/press/doctype/stripe_webhook_log/stripe_webhook_log.json
@@ -7,6 +7,14 @@
"engine": "InnoDB",
"field_order": [
"event_type",
+ "invoice",
+ "team",
+ "column_break_hywj",
+ "customer_id",
+ "invoice_id",
+ "stripe_payment_method",
+ "stripe_payment_intent_id",
+ "section_break_ecbt",
"payload"
],
"fields": [
@@ -22,10 +30,55 @@
"fieldtype": "Code",
"label": "Payload",
"read_only": 1
+ },
+ {
+ "fieldname": "customer_id",
+ "fieldtype": "Data",
+ "label": "Stripe Customer ID"
+ },
+ {
+ "fieldname": "invoice",
+ "fieldtype": "Link",
+ "label": "Invoice",
+ "options": "Invoice",
+ "search_index": 1
+ },
+ {
+ "fieldname": "invoice_id",
+ "fieldtype": "Data",
+ "label": "Stripe Invoice ID"
+ },
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "label": "Team",
+ "options": "Team",
+ "search_index": 1
+ },
+ {
+ "fieldname": "column_break_hywj",
+ "fieldtype": "Column Break"
+ },
+ {
+ "description": "This is only shown when a payment intent fails",
+ "fieldname": "stripe_payment_method",
+ "fieldtype": "Link",
+ "label": "Stripe Payment Method",
+ "options": "Stripe Payment Method",
+ "read_only": 1
+ },
+ {
+ "fieldname": "section_break_ecbt",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "stripe_payment_intent_id",
+ "fieldtype": "Data",
+ "label": "Stripe Payment Intent ID"
}
],
"links": [],
- "modified": "2022-01-19 22:50:30.082445",
+ "modified": "2024-11-29 10:44:55.011202",
"modified_by": "Administrator",
"module": "Press",
"name": "Stripe Webhook Log",
diff --git a/press/press/doctype/stripe_webhook_log/stripe_webhook_log.py b/press/press/doctype/stripe_webhook_log/stripe_webhook_log.py
index a146dd68473..b5877b1765c 100644
--- a/press/press/doctype/stripe_webhook_log/stripe_webhook_log.py
+++ b/press/press/doctype/stripe_webhook_log/stripe_webhook_log.py
@@ -1,12 +1,15 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2022, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
+
+import re
+from datetime import datetime
import frappe
+from frappe.model.document import Document
import press.utils
from press.api.billing import get_stripe
-from frappe.model.document import Document
class InvalidStripeWebhookEvent(Exception):
@@ -14,7 +17,106 @@ class InvalidStripeWebhookEvent(Exception):
class StripeWebhookLog(Document):
- pass
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ customer_id: DF.Data | None
+ event_type: DF.Data | None
+ invoice: DF.Link | None
+ invoice_id: DF.Data | None
+ payload: DF.Code | None
+ stripe_payment_intent_id: DF.Data | None
+ stripe_payment_method: DF.Link | None
+ team: DF.Link | None
+ # end: auto-generated types
+
+ def before_insert(self):
+ payload = frappe.parse_json(self.payload)
+ self.name = payload.get("id")
+ self.event_type = payload.get("type")
+ customer_id = get_customer_id(payload)
+ invoice_id = get_invoice_id(payload)
+ self.stripe_payment_intent_id = ""
+
+ if self.event_type in [
+ "payment_intent.succeeded",
+ "payment_intent.failed",
+ "payment_intent.requires_action",
+ ]:
+ self.stripe_payment_intent_id = get_intent_id(payload)
+
+ if customer_id:
+ self.customer_id = customer_id
+ self.team = frappe.db.get_value("Team", {"stripe_customer_id": customer_id}, "name")
+
+ if invoice_id:
+ self.invoice_id = invoice_id
+ self.invoice = frappe.db.get_value("Invoice", {"stripe_invoice_id": invoice_id}, "name")
+
+ if self.event_type == "payment_intent.payment_failed":
+ payment_method = (
+ payload.get("data", {}).get("object", {}).get("last_payment_error", {}).get("payment_method")
+ )
+ if payment_method:
+ payment_method_id = payment_method.get("id")
+
+ self.stripe_payment_method = frappe.db.get_value(
+ "Stripe Payment Method",
+ {"stripe_customer_id": customer_id, "stripe_payment_method_id": payment_method_id},
+ "name",
+ )
+
+ if (
+ self.event_type == "invoice.payment_failed"
+ and self.invoice
+ and payload.get("data", {}).get("object", {}).get("next_payment_attempt")
+ ):
+ next_payment_attempt_date = datetime.fromtimestamp(
+ payload.get("data", {}).get("object", {}).get("next_payment_attempt")
+ ).strftime("%Y-%m-%d")
+ frappe.db.set_value(
+ "Invoice",
+ self.invoice,
+ "next_payment_attempt_date",
+ frappe.utils.getdate(next_payment_attempt_date),
+ )
+
+
+def allow_insert_log(event):
+ if isinstance(event, str):
+ event = frappe.parse_json(event)
+ evt_id = event.get("id")
+ invoice_id = get_invoice_id(event)
+ intent_id = get_intent_id(event)
+
+ description = None
+ if event.get("type") == "payment_intent.succeeded":
+ description = event["data"]["object"]["description"]
+
+ if not frappe.db.exists("Stripe Webhook Log", evt_id):
+ return True
+
+ if invoice_id and frappe.db.get_value("Invoice", {"stripe_invoice_id": invoice_id}, "status") == "Paid":
+ # Do not insert duplicate webhook logs for invoices that are already paid
+ return False
+
+ if (
+ description
+ and description == "Prepaid Credits"
+ and intent_id
+ and frappe.db.exists(
+ "Invoice", {"type": "Prepaid Credits", "status": "Paid", "stripe_payment_intent_id": intent_id}
+ )
+ ):
+ return False
+
+ frappe.delete_doc("Stripe Webhook Log", evt_id)
+ return True
@frappe.whitelist(allow_guest=True)
@@ -29,19 +131,52 @@ def stripe_webhook_handler():
# set user to Administrator, to not have to do ignore_permissions everywhere
frappe.set_user("Administrator")
+ if not allow_insert_log(event):
+ return
frappe.get_doc(
- {
- "doctype": "Stripe Webhook Log",
- "name": event.id,
- "payload": frappe.as_json(form_dict),
- "event_type": event.type,
- }
- ).insert(ignore_if_duplicate=True)
+ doctype="Stripe Webhook Log",
+ payload=frappe.as_json(event),
+ ).insert()
except Exception:
frappe.db.rollback()
press.utils.log_error(title="Stripe Webhook Handler", stripe_event_id=form_dict.id)
frappe.set_user(current_user)
- raise Exception
+ raise
+
+
+def get_intent_id(form_dict):
+ try:
+ form_dict_str = frappe.as_json(form_dict)
+ intent_id = re.findall(r"pi_\w+", form_dict_str)
+ if intent_id:
+ return intent_id[1]
+ return None
+ except IndexError:
+ return None
+ except Exception:
+ frappe.log_error(title="Failed to capture intent id from stripe webhook log")
+
+
+def get_customer_id(form_dict):
+ try:
+ form_dict_str = frappe.as_json(form_dict)
+ customer_id = re.search(r"cus_\w+", form_dict_str)
+ if customer_id:
+ return customer_id.group(0)
+ return None
+ except Exception:
+ frappe.log_error(title="Failed to capture customer id from stripe webhook log")
+
+
+def get_invoice_id(form_dict):
+ try:
+ form_dict_str = frappe.as_json(form_dict)
+ invoice_id = re.search(r"in_\w+", form_dict_str)
+ if invoice_id:
+ return invoice_id.group(0)
+ return None
+ except Exception:
+ frappe.log_error(title="Failed to capture invoice id from stripe webhook log")
def parse_payload(payload, signature):
diff --git a/press/press/doctype/stripe_webhook_log/test_stripe_webhook_log.py b/press/press/doctype/stripe_webhook_log/test_stripe_webhook_log.py
index dc000bc5669..8eeb54829a0 100644
--- a/press/press/doctype/stripe_webhook_log/test_stripe_webhook_log.py
+++ b/press/press/doctype/stripe_webhook_log/test_stripe_webhook_log.py
@@ -1,11 +1,10 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestStripeWebhookLog(unittest.TestCase):
+class TestStripeWebhookLog(FrappeTestCase):
pass
diff --git a/press/press/doctype/subscription/subscription.json b/press/press/doctype/subscription/subscription.json
index 0edd71d461f..6fa969b023f 100644
--- a/press/press/doctype/subscription/subscription.json
+++ b/press/press/doctype/subscription/subscription.json
@@ -1,6 +1,6 @@
{
"actions": [],
- "autoname": "SUB-.YYYY.-.#####.",
+ "autoname": "hash",
"creation": "2020-10-19 19:01:35.606131",
"doctype": "DocType",
"editable_grid": 1,
@@ -10,9 +10,13 @@
"team",
"document_type",
"document_name",
+ "plan_type",
"plan",
"interval",
- "marketplace_app_subscription"
+ "site",
+ "marketplace_app_subscription",
+ "additional_storage",
+ "secret_key"
],
"fields": [
{
@@ -21,7 +25,8 @@
"in_list_view": 1,
"label": "Team",
"options": "Team",
- "reqd": 1
+ "reqd": 1,
+ "search_index": 1
},
{
"fieldname": "document_type",
@@ -41,24 +46,24 @@
},
{
"default": "Daily",
- "fetch_from": "plan.interval",
"fieldname": "interval",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Interval",
- "options": "Daily\nMonthly"
+ "options": "Hourly\nDaily\nMonthly"
},
{
"default": "1",
"fieldname": "enabled",
"fieldtype": "Check",
- "label": "Enabled"
+ "label": "Enabled",
+ "search_index": 1
},
{
"fieldname": "plan",
- "fieldtype": "Link",
+ "fieldtype": "Dynamic Link",
"label": "Plan",
- "options": "Plan",
+ "options": "plan_type",
"reqd": 1
},
{
@@ -66,6 +71,34 @@
"fieldtype": "Link",
"label": "Marketplace App Subscription",
"options": "Marketplace App Subscription"
+ },
+ {
+ "allow_in_quick_entry": 1,
+ "fieldname": "plan_type",
+ "fieldtype": "Link",
+ "label": "Plan Type",
+ "options": "DocType",
+ "reqd": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "site",
+ "fieldtype": "Link",
+ "label": "Site",
+ "options": "Site",
+ "search_index": 1
+ },
+ {
+ "depends_on": "eval:doc.plan_type === \"Server Storage Plan\"",
+ "fieldname": "additional_storage",
+ "fieldtype": "Data",
+ "label": "Additional Storage"
+ },
+ {
+ "fieldname": "secret_key",
+ "fieldtype": "Data",
+ "label": "Secret Key",
+ "search_index": 1
}
],
"index_web_pages_for_search": 1,
@@ -75,11 +108,11 @@
"link_fieldname": "subscription"
}
],
- "modified": "2023-06-27 21:57:40.400786",
+ "modified": "2025-12-02 14:15:27.501756",
"modified_by": "Administrator",
"module": "Press",
"name": "Subscription",
- "naming_rule": "Expression (old style)",
+ "naming_rule": "Random",
"owner": "Administrator",
"permissions": [
{
@@ -108,6 +141,8 @@
}
],
"quick_entry": 1,
+ "row_format": "Dynamic",
+ "rows_threshold_for_grid_search": 20,
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
diff --git a/press/press/doctype/subscription/subscription.py b/press/press/doctype/subscription/subscription.py
index ae94d9b65a3..3fa36f69633 100644
--- a/press/press/doctype/subscription/subscription.py
+++ b/press/press/doctype/subscription/subscription.py
@@ -1,79 +1,213 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
-from typing import List
-from press.press.doctype.plan.plan import Plan
+from typing import TYPE_CHECKING
import frappe
+import rq
from frappe.model.document import Document
-from press.utils import log_error
+from frappe.query_builder.functions import Coalesce, Count
+from frappe.utils import cint, flt
+
from press.overrides import get_permission_query_conditions_for_doctype
+from press.press.doctype.site_plan.site_plan import SitePlan
+from press.utils import log_error
+from press.utils.jobs import has_job_timeout_exceeded
+
+if TYPE_CHECKING:
+ from frappe.types import DF
class Subscription(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ additional_storage: DF.Data | None
+ document_name: DF.DynamicLink
+ document_type: DF.Link
+ enabled: DF.Check
+ interval: DF.Literal["Hourly", "Daily", "Monthly"]
+ marketplace_app_subscription: DF.Link | None
+ plan: DF.DynamicLink
+ plan_type: DF.Link
+ secret_key: DF.Data | None
+ site: DF.Link | None
+ team: DF.Link
+ # end: auto-generated types
+
+ dashboard_fields = (
+ "site",
+ "enabled",
+ "document_type",
+ "document_name",
+ "team",
+ )
+
+ @staticmethod
+ def get_list_query(query, **list_args):
+ Subscription = frappe.qb.DocType("Subscription")
+ UsageRecord = frappe.qb.DocType("Usage Record")
+ Plan = frappe.qb.DocType("Marketplace App Plan")
+ price_field = Plan.price_inr if frappe.local.team().currency == "INR" else Plan.price_usd
+ filters = list_args.get("filters", {})
+
+ query = (
+ frappe.qb.from_(Subscription)
+ .join(Plan)
+ .on(Subscription.plan == Plan.name)
+ .left_join(UsageRecord)
+ .on(UsageRecord.subscription == Subscription.name)
+ .groupby(Subscription.name)
+ .select(
+ Subscription.site,
+ Subscription.enabled,
+ price_field.as_("price"),
+ Coalesce(Count(UsageRecord.subscription), 0).as_("active_for"),
+ )
+ .where(
+ (Subscription.document_type == "Marketplace App")
+ & (Subscription.document_name == filters["document_name"])
+ & (Subscription.site != "")
+ & (price_field > 0)
+ )
+ .limit(list_args["limit"])
+ .offset(list_args["start"])
+ )
+
+ if filters.get("enabled"):
+ enabled = 1 if filters["enabled"] == "Active" else 0
+ query = query.where(Subscription.enabled == enabled)
+
+ return query.run(as_dict=True)
+
+ def before_validate(self):
+ if not self.secret_key and self.document_type == "Marketplace App":
+ self.secret_key = frappe.utils.generate_hash(length=40)
+ if not frappe.db.exists("Site Config Key", {"key": f"sk_{self.document_name}"}):
+ frappe.get_doc(
+ doctype="Site Config Key", internal=True, key=f"sk_{self.document_name}"
+ ).insert(ignore_permissions=True)
+
def validate(self):
self.validate_duplicate()
def on_update(self):
+ if self.plan_type in ["Server Storage Plan", "Server Snapshot Plan"]:
+ return
+
doc = self.get_subscribed_document()
plan_field = doc.meta.get_field("plan")
- if not (plan_field and plan_field.options == "Plan"):
+ if not (plan_field and plan_field.options in ["Site Plan", "Server Plan", "Marketplace App Plan"]):
return
if self.enabled and doc.plan != self.plan:
doc.plan = self.plan
doc.save()
+
+ if doc.doctype == "Server" and doc.is_unified_server:
+ # Update database server plan for sanity in case of unified servers
+ frappe.db.set_value("Database Server", doc.database_server, "plan", self.plan)
+
if not self.enabled and doc.plan:
doc.plan = ""
doc.save()
def enable(self):
+ if self.enabled:
+ return
try:
self.enabled = True
- self.save()
+ self.save(ignore_permissions=True)
except Exception:
frappe.log_error(title="Enable Subscription Error")
def disable(self):
+ if not self.enabled:
+ return
try:
self.enabled = False
- self.save()
+ self.save(ignore_permissions=True)
except Exception:
frappe.log_error(title="Disable Subscription Error")
+ def is_valid_subscription(self, date: DF.Date | None = None) -> bool:
+ if not date:
+ date = frappe.utils.getdate()
+
+ if frappe.utils.getdate(self.creation) <= date:
+ return True
+
+ return False
+
@frappe.whitelist()
- def create_usage_record(self):
+ def create_usage_record(self, date: DF.Date | None = None): # noqa: C901
cannot_charge = not self.can_charge_for_subscription()
if cannot_charge:
- return
+ return None
- if self.is_usage_record_created():
- return
+ if self.is_usage_record_created(date):
+ return None
+
+ if not self.is_valid_subscription(date):
+ return None
team = frappe.get_cached_doc("Team", self.team)
if team.parent_team:
team = frappe.get_cached_doc("Team", team.parent_team)
+ if team.billing_team and team.payment_mode == "Paid By Partner":
+ team = frappe.get_cached_doc("Team", team.billing_team)
+
if not team.get_upcoming_invoice():
team.create_upcoming_invoice()
- plan = frappe.get_cached_doc("Plan", self.plan)
- amount = plan.get_price_for_interval(self.interval, team.currency)
+ plan = frappe.get_cached_doc(self.plan_type, self.plan)
+
+ if self.additional_storage:
+ price = plan.price_inr if team.currency == "INR" else plan.price_usd
+ price_per_day = price / plan.period # no rounding off to avoid discrepancies
+ amount = flt((price_per_day * cint(self.additional_storage)), 2)
+
+ elif self.plan_type == "Server Snapshot Plan":
+ price = plan.price_inr if team.currency == "INR" else plan.price_usd
+ price_per_day = price / plan.period # no rounding off to avoid discrepancies
+ amount = flt(
+ (
+ price_per_day
+ * cint(frappe.get_value("Server Snapshot", self.document_name, "total_size_gb"))
+ ),
+ 2,
+ )
+ else:
+ amount = plan.get_price_for_interval(self.interval, team.currency)
+
+ if self.plan_type == "Server Plan" and self.document_type == "Server":
+ is_primary = frappe.db.get_value("Server", self.document_name, "is_primary")
+ if not is_primary:
+ return None # If the server is a secondary application server don't create a usage record
usage_record = frappe.get_doc(
doctype="Usage Record",
team=team.name,
document_type=self.document_type,
document_name=self.document_name,
+ plan_type=self.plan_type,
plan=plan.name,
amount=amount,
+ date=date,
subscription=self.name,
interval=self.interval,
- site=frappe.get_value(
- "Marketplace App Subscription", self.marketplace_app_subscription, "site"
+ site=(
+ self.site
+ or frappe.get_value("Marketplace App Subscription", self.marketplace_app_subscription, "site")
)
if self.document_type == "Marketplace App"
else None,
@@ -92,7 +226,7 @@ def can_charge_for_subscription(self):
return True
- def is_usage_record_created(self):
+ def is_usage_record_created(self, date=None):
filters = {
"team": self.team,
"document_type": self.document_type,
@@ -103,7 +237,8 @@ def is_usage_record_created(self):
}
if self.interval == "Daily":
- filters.update({"date": frappe.utils.today()})
+ date = date or frappe.utils.today()
+ filters.update({"date": date})
if self.interval == "Monthly":
date = frappe.utils.getdate()
@@ -121,6 +256,7 @@ def validate_duplicate(self):
"team": self.team,
"document_type": self.document_type,
"document_name": self.document_name,
+ "plan_type": self.plan_type,
}
if self.document_type == "Marketplace App":
filters.update({"marketplace_app_subscription": self.marketplace_app_subscription})
@@ -130,6 +266,7 @@ def validate_duplicate(self):
filters,
pluck="name",
limit=1,
+ ignore_ifnull=True,
)
if results:
link = frappe.utils.get_link_to_form("Subscription", results[0])
@@ -141,8 +278,8 @@ def get_subscribed_document(self):
return self._subscribed_document
@classmethod
- def get_sites_without_offsite_backups(cls) -> List[str]:
- plans = Plan.get_ones_without_offsite_backups()
+ def get_sites_without_offsite_backups(cls) -> list[str]:
+ plans = SitePlan.get_ones_without_offsite_backups()
return frappe.get_all(
"Subscription",
filters={"document_type": "Site", "plan": ("in", plans)},
@@ -151,64 +288,104 @@ def get_sites_without_offsite_backups(cls) -> List[str]:
def create_usage_records():
+ create_usage_records_of_date()
+
+
+def create_usage_records_of_date(
+ date: DF.Date | None = None, usage_record_creation_batch_size: int | None = None
+):
"""
Creates daily usage records for paid Subscriptions
+
+ If no date is provided, it defaults to today.
+ If usage_record_creation_batch_size is not provided, it will fetch from `Press Settings` or default to 500.
"""
free_sites = sites_with_free_hosting()
+ settings = frappe.get_single("Press Settings")
subscriptions = frappe.db.get_all(
"Subscription",
filters={
"enabled": True,
"plan": ("in", paid_plans()),
- "name": ("not in", created_usage_records(free_sites)),
+ "name": ("not in", created_usage_records(free_sites, date=date)),
"document_name": ("not in", free_sites),
},
pluck="name",
- limit=2000,
+ order_by=None,
+ limit=usage_record_creation_batch_size or settings.usage_record_creation_batch_size or 500,
+ ignore_ifnull=True,
+ debug=True,
)
for name in subscriptions:
+ if has_job_timeout_exceeded():
+ return
subscription = frappe.get_cached_doc("Subscription", name)
try:
- subscription.create_usage_record()
+ subscription.create_usage_record(date=date)
frappe.db.commit()
+ except rq.timeouts.JobTimeoutException:
+ # This job took too long to execute
+ # We need to rollback the transaction
+ # Try again in the next job
+ frappe.db.rollback()
+ return
except Exception:
frappe.db.rollback()
log_error(title="Create Usage Record Error", name=name)
def paid_plans():
- return frappe.db.get_all(
- "Plan",
- {
- "document_type": (
- "in",
- ("Site", "Server", "Database Server", "Self Hosted Server", "Marketplace App"),
- ),
- "is_trial_plan": 0,
- "price_inr": (">", 0),
- },
- pluck="name",
- ignore_ifnull=True,
- )
+ paid_plans = []
+
+ doctypes = [
+ "Site Plan",
+ "Marketplace App Plan",
+ "Server Plan",
+ "Server Storage Plan",
+ "Cluster Plan",
+ ]
+
+ for name in doctypes:
+ doctype = frappe.qb.DocType(name)
+ if name in ("Server Plan", "Site Plan"):
+ paid_plans += (
+ frappe.qb.from_(doctype)
+ .select(doctype.name)
+ .where(doctype.price_inr > 0)
+ .where((doctype.enabled == 1) | (doctype.legacy_plan == 1))
+ .run(pluck=True)
+ )
+ else:
+ paid_plans += (
+ frappe.qb.from_(doctype)
+ .select(doctype.name)
+ .where(doctype.price_inr > 0)
+ .where(doctype.enabled == 1)
+ .run(pluck=True)
+ )
+
+ return list(set(paid_plans))
def sites_with_free_hosting():
- """Includes sites that have standard hosting plan from Marketplace Plan"""
- marketplace_paid_plans = frappe.get_all(
- "Marketplace App Plan",
- {"is_free": 0, "standard_hosting_plan": ("is", "set")},
+ # sites marked as free
+ free_teams = frappe.get_all("Team", filters={"free_account": True, "enabled": True}, pluck="name")
+ free_team_sites = frappe.get_all(
+ "Site",
+ {"status": ("not in", ("Archived", "Suspended")), "team": ("in", free_teams)},
pluck="name",
+ ignore_ifnull=True,
)
- sites_with_standard_hosting = frappe.get_all(
- "Marketplace App Subscription",
- {"marketplace_app_plan": ("in", marketplace_paid_plans), "status": "Active"},
- pluck="site",
- )
-
- free_sites = frappe.get_all(
- "Site", filters={"free": True, "status": "Active"}, pluck="name"
+ return free_team_sites + frappe.get_all(
+ "Site",
+ filters={
+ "free": True,
+ "status": ("not in", ("Archived", "Suspended")),
+ "team": ("not in", free_teams),
+ },
+ pluck="name",
+ ignore_ifnull=True,
)
- return sites_with_standard_hosting + free_sites
def created_usage_records(free_sites, date=None):
@@ -217,15 +394,24 @@ def created_usage_records(free_sites, date=None):
return frappe.get_all(
"Usage Record",
filters={
- "document_type": ("in", ("Site", "Server", "Database Server", "Self Hosted Server")),
+ "document_type": (
+ "in",
+ (
+ "Site",
+ "Server",
+ "Database Server",
+ "Self Hosted Server",
+ "Marketplace App",
+ "Cluster",
+ ),
+ ),
"date": date,
"document_name": ("not in", free_sites),
},
pluck="subscription",
+ order_by=None,
ignore_ifnull=True,
)
-get_permission_query_conditions = get_permission_query_conditions_for_doctype(
- "Subscription"
-)
+get_permission_query_conditions = get_permission_query_conditions_for_doctype("Subscription")
diff --git a/press/press/doctype/subscription/test_subscription.py b/press/press/doctype/subscription/test_subscription.py
index b521719a05e..1879d53eb9d 100644
--- a/press/press/doctype/subscription/test_subscription.py
+++ b/press/press/doctype/subscription/test_subscription.py
@@ -1,45 +1,57 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
-import unittest
from unittest.mock import patch
import frappe
+from frappe.tests.utils import FrappeTestCase
from press.press.doctype.site.test_site import create_test_site
+from press.press.doctype.subscription.subscription import sites_with_free_hosting
from press.press.doctype.team.test_team import create_test_team
def create_test_subscription(
- document_name: str, plan: str, team: str, document_type: str = "Site"
+ document_name: str,
+ plan: str,
+ team: str,
+ document_type: str = "Site",
+ plan_type: str = "Site Plan",
):
-
subscription = frappe.get_doc(
{
"doctype": "Subscription",
"document_type": document_type,
"document_name": document_name,
"team": team,
+ "plan_type": plan_type,
"plan": plan,
+ "site": document_name if document_type == "Site" else None,
}
).insert(ignore_if_duplicate=True)
subscription.reload()
return subscription
-class TestSubscription(unittest.TestCase):
+class TestSubscription(FrappeTestCase):
def setUp(self):
+ super().setUp()
+
self.team = create_test_team()
+ self.team.allocate_credit_amount(1000, source="Prepaid Credits")
+ self.team.payment_mode = "Prepaid Credits"
+ self.team.save()
+ frappe.set_user(self.team.user)
def tearDown(self):
+ frappe.set_user("Administrator")
frappe.db.rollback()
def test_subscription_daily(self):
todo = frappe.get_doc(doctype="ToDo", description="Test todo").insert()
plan = frappe.get_doc(
- doctype="Plan",
+ doctype="Site Plan",
name="Plan-10",
document_type="ToDo",
interval="Daily",
@@ -52,6 +64,7 @@ def test_subscription_daily(self):
team=self.team.name,
document_type="ToDo",
document_name=todo.name,
+ plan_type="Site Plan",
plan=plan.name,
).insert()
@@ -84,7 +97,7 @@ def test_subscription_daily(self):
def test_subscription_for_non_chargeable_document(self):
todo = frappe.get_doc(doctype="ToDo", description="Test todo").insert()
plan = frappe.get_doc(
- doctype="Plan",
+ doctype="Site Plan",
name="Plan-10",
document_type="ToDo",
interval="Daily",
@@ -97,6 +110,7 @@ def test_subscription_for_non_chargeable_document(self):
team=self.team.name,
document_type="ToDo",
document_name=todo.name,
+ plan_type="Site Plan",
plan=plan.name,
).insert()
@@ -120,7 +134,7 @@ def test_site_in_trial(self):
site.save()
plan = frappe.get_doc(
- doctype="Plan",
+ doctype="Site Plan",
name="Plan-10",
document_type="Site",
interval="Daily",
@@ -134,6 +148,7 @@ def test_site_in_trial(self):
team=self.team.name,
document_type="Site",
document_name=site.name,
+ plan_type="Site Plan",
plan=plan.name,
).insert()
@@ -151,3 +166,22 @@ def test_site_in_trial(self):
invoice = frappe.get_doc("Invoice", {"team": self.team.name, "status": "Draft"})
self.assertEqual(invoice.total, 0)
+
+ def test_sites_with_free_hosting(self):
+ self.team.create_upcoming_invoice()
+
+ site1 = create_test_site(team=self.team.name)
+ site1.free = 1
+ site1.save()
+ create_test_site(team=self.team.name)
+
+ # test: site marked as free
+ free_sites = sites_with_free_hosting()
+ self.assertEqual(len(free_sites), 1)
+
+ self.team.free_account = True
+ self.team.save()
+
+ # test: site owned by free account
+ free_sites = sites_with_free_hosting()
+ self.assertEqual(len(free_sites), 2)
diff --git a/press/press/doctype/support_access/__init__.py b/press/press/doctype/support_access/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/support_access/support_access.js b/press/press/doctype/support_access/support_access.js
new file mode 100644
index 00000000000..9c604832dc6
--- /dev/null
+++ b/press/press/doctype/support_access/support_access.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Support Access", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/support_access/support_access.json b/press/press/doctype/support_access/support_access.json
new file mode 100644
index 00000000000..f3f2f765754
--- /dev/null
+++ b/press/press/doctype/support_access/support_access.json
@@ -0,0 +1,213 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "autoname": "hash",
+ "creation": "2025-09-25 15:21:55.641624",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "access_expired",
+ "status",
+ "requested_by",
+ "requested_team",
+ "target_team",
+ "access_allowed_till",
+ "column_break_vrkb",
+ "allowed_for",
+ "reason",
+ "site_permissions_section",
+ "login_as_administrator",
+ "site_domains",
+ "site_release_group",
+ "bench_permissions_section",
+ "bench_ssh",
+ "section_break_lwpm",
+ "resources"
+ ],
+ "fields": [
+ {
+ "default": "0",
+ "fieldname": "access_expired",
+ "fieldtype": "Check",
+ "is_virtual": 1,
+ "label": "Access Expired"
+ },
+ {
+ "fieldname": "requested_by",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Requested By",
+ "options": "User",
+ "read_only": 1
+ },
+ {
+ "default": "72",
+ "description": "in hours",
+ "fieldname": "allowed_for",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Allowed for",
+ "options": "3\n6\n12\n24\n72",
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "reason",
+ "fieldtype": "Small Text",
+ "label": "Reason"
+ },
+ {
+ "fieldname": "column_break_vrkb",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "access_allowed_till",
+ "fieldtype": "Datetime",
+ "label": "Access allowed till",
+ "read_only": 1
+ },
+ {
+ "default": "Pending",
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Status",
+ "options": "Pending\nAccepted\nRejected\nForfeited\nRevoked"
+ },
+ {
+ "fieldname": "section_break_lwpm",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "resources",
+ "fieldtype": "Table",
+ "label": "Resources",
+ "options": "Support Access Resource",
+ "reqd": 1
+ },
+ {
+ "fieldname": "requested_team",
+ "fieldtype": "Link",
+ "in_standard_filter": 1,
+ "label": "Requested Team",
+ "options": "Team",
+ "read_only": 1
+ },
+ {
+ "fieldname": "target_team",
+ "fieldtype": "Link",
+ "in_standard_filter": 1,
+ "label": "Target Team",
+ "options": "Team",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "login_as_administrator",
+ "fieldtype": "Check",
+ "label": "Login as Administrator",
+ "set_only_once": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "site_domains",
+ "fieldtype": "Check",
+ "label": "Domains",
+ "set_only_once": 1
+ },
+ {
+ "fieldname": "site_permissions_section",
+ "fieldtype": "Section Break",
+ "label": "Site Permissions"
+ },
+ {
+ "fieldname": "bench_permissions_section",
+ "fieldtype": "Section Break",
+ "label": "Bench Permissions"
+ },
+ {
+ "default": "0",
+ "fieldname": "bench_ssh",
+ "fieldtype": "Check",
+ "label": "SSH",
+ "set_only_once": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "site_release_group",
+ "fieldtype": "Check",
+ "label": "Release Group",
+ "set_only_once": 1
+ }
+ ],
+ "grid_page_length": 50,
+ "links": [],
+ "modified": "2025-11-14 21:55:56.574233",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Support Access",
+ "naming_rule": "Random",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Support Agent",
+ "share": 1
+ },
+ {
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": [
+ {
+ "color": "Gray",
+ "title": "Pending"
+ },
+ {
+ "color": "Green",
+ "title": "Accepted"
+ },
+ {
+ "color": "Red",
+ "title": "Rejected"
+ }
+ ]
+}
diff --git a/press/press/doctype/support_access/support_access.py b/press/press/doctype/support_access/support_access.py
new file mode 100644
index 00000000000..261fea0cb8e
--- /dev/null
+++ b/press/press/doctype/support_access/support_access.py
@@ -0,0 +1,294 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+
+import frappe
+import frappe.utils
+from frappe.model.document import Document
+from frappe.query_builder import Criterion, JoinType
+from frappe.query_builder.functions import Count
+
+from press.utils import get_current_team
+
+
+class SupportAccess(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.support_access_resource.support_access_resource import SupportAccessResource
+
+ access_allowed_till: DF.Datetime | None
+ allowed_for: DF.Literal["3", "6", "12", "24", "72"]
+ bench_ssh: DF.Check
+ login_as_administrator: DF.Check
+ reason: DF.SmallText | None
+ requested_by: DF.Link | None
+ requested_team: DF.Link | None
+ resources: DF.Table[SupportAccessResource]
+ site_domains: DF.Check
+ site_release_group: DF.Check
+ status: DF.Literal["Pending", "Accepted", "Rejected", "Forfeited", "Revoked"]
+ target_team: DF.Link | None
+ # end: auto-generated types
+
+ dashboard_fields = (
+ "access_allowed_till",
+ "allowed_for",
+ "login_as_administrator",
+ "reason",
+ "requested_by",
+ "requested_team",
+ "resources",
+ "site_domains",
+ "site_release_group",
+ "status",
+ "target_team",
+ "bench_ssh",
+ )
+
+ def get_list_query(query, filters: dict | None, **args):
+ filters = filters or {}
+ team = get_current_team()
+ Access = frappe.qb.DocType("Support Access")
+ AccessResource = frappe.qb.DocType("Support Access Resource")
+ query = (
+ query.join(AccessResource, JoinType.left)
+ .on(AccessResource.parent == Access.name)
+ .select(Count(AccessResource.name).as_("resource_count"))
+ .groupby(Access.name)
+ .select(AccessResource.document_name.as_("resource_name"))
+ )
+ conditions = []
+ match filters.get("source"):
+ case "Received":
+ conditions.append(Access.target_team == team)
+ case "Sent":
+ conditions.append(Access.requested_by == frappe.session.user)
+ conditions.append(Access.requested_team == team)
+ return query.where(Criterion.any(conditions)).run(as_dict=True)
+
+ @property
+ def access_expired(self):
+ return bool(
+ self.access_allowed_till
+ and frappe.utils.get_datetime(self.access_allowed_till) < frappe.utils.now_datetime()
+ )
+
+ def before_validate(self):
+ self.requested_by = self.requested_by or frappe.session.user
+ self.requested_team = self.requested_team or get_current_team()
+ self.set_expiry()
+ self.resolve_sites()
+ self.add_release_group()
+
+ def add_release_group(self):
+ """
+ Add release group and bench as resources if `site_release_group` is checked.
+ """
+
+ if not self.site_release_group:
+ return
+
+ # Only add release group and bench for new requests. Meaning, do not
+ # add them on updates.
+ if not self.is_new():
+ return
+
+ site = None
+ for resource in self.resources:
+ if resource.document_type == "Site":
+ site = resource.document_name
+ break
+ if not site:
+ return
+
+ site = frappe.get_doc("Site", site)
+ release_group = frappe.get_doc("Release Group", site.group)
+
+ # Ensure release group and site belong to the same team.
+ if site.team != release_group.team:
+ return
+
+ # Add release group as a resource.
+ self.append(
+ "resources",
+ {
+ "document_type": "Release Group",
+ "document_name": release_group.name,
+ },
+ )
+
+ # Add bench as a resource.
+ self.append(
+ "resources",
+ {
+ "document_type": "Bench",
+ "document_name": site.bench,
+ },
+ )
+
+ def set_expiry(self):
+ doc_before = self.get_doc_before_save()
+ hours = frappe.utils.cint(self.allowed_for)
+ if hours and doc_before and doc_before.status != self.status and self.status == "Accepted":
+ self.access_allowed_till = frappe.utils.add_to_date(frappe.utils.now_datetime(), hours=hours)
+
+ def resolve_sites(self):
+ for resource in self.resources:
+ if resource.document_type == "Site":
+ resource.document_name = self.resolve_site_name(resource.document_name)
+
+ def resolve_site_name(self, site) -> str:
+ try:
+ domain = frappe.get_doc("Site Domain", site)
+ return domain.site
+ except frappe.DoesNotExistError:
+ return site
+
+ def validate(self):
+ self.validate_status_change()
+ self.validate_expiry()
+ self.validate_target_team()
+
+ @property
+ def target_statuses(self) -> list[str]:
+ """
+ Returns the possible target statuses for the current user.
+ """
+ current_team = get_current_team()
+ if self.target_team == current_team:
+ return ["Accepted", "Rejected", "Revoked"]
+ if self.requested_team == current_team:
+ return ["Pending", "Forfeited"]
+ return []
+
+ def is_valid_status_transition(self, status_from: str, status_to: str) -> bool:
+ """
+ Checks if status can be changed from `status_from` to `status_to`.
+ """
+ return status_to in {
+ "Pending": ["Accepted", "Rejected"],
+ "Accepted": ["Revoked", "Forfeited"],
+ "Rejected": [],
+ "Forfeited": [],
+ "Revoked": [],
+ }.get(status_from, [])
+
+ def validate_status_change(self):
+ status_changed = self.has_value_changed("status")
+ if not status_changed:
+ return
+ doc_before = self.get_doc_before_save()
+ if not doc_before:
+ return
+ status_before = doc_before.status
+ status_after = self.status
+ if not self.is_valid_status_transition(status_before, status_after):
+ frappe.throw(f"Cannot change status from {status_before} to {status_after}")
+ if status_after not in self.target_statuses:
+ frappe.throw("You are not allowed to set this status")
+
+ def validate_expiry(self):
+ if self.access_expired:
+ frappe.throw("Access expiry must be in the future")
+ if self.status == "Pending" and self.access_allowed_till:
+ frappe.throw("Pending requests cannot have access expiry")
+
+ def validate_target_team(self):
+ teams = set()
+ for resource in self.resources:
+ team = frappe.get_value(resource.document_type, resource.document_name, "team")
+ teams.add(team)
+ if len(teams) != 1:
+ frappe.throw("Resources must belong to the same team")
+ self.target_team = teams.pop()
+
+ def after_insert(self):
+ self.notify_on_request()
+
+ def on_update(self):
+ self.notify_on_status_change()
+
+ def notify_on_status_change(self):
+ if not self.has_value_changed("status"):
+ return
+
+ title = f"Access Request {self.status}"
+ message = f"Your request for support access has been {self.status.lower()}."
+ recipient = self.requested_by
+
+ if self.status == "Forfeited":
+ message = "Support access has been forfieted."
+ recipient = self.target_team
+
+ frappe.sendmail(
+ subject=title,
+ message=message,
+ recipients=recipient,
+ template="access_request_update",
+ args={
+ "status": self.status,
+ "resources": self.resources,
+ },
+ )
+
+ frappe.get_doc(
+ {
+ "doctype": "Press Notification",
+ "team": self.requested_team,
+ "type": "Support Access",
+ "document_type": "Support Access",
+ "document_name": self.name,
+ "title": title,
+ "message": message,
+ }
+ ).insert()
+
+ frappe.publish_realtime(
+ "press_notification",
+ doctype="Press Notification",
+ message={
+ "team": self.requested_team,
+ },
+ )
+
+ def notify_on_request(self):
+ title = "New Access Request"
+ message = f"{self.requested_by} has requested support access for one of your resources."
+ team_email = frappe.get_value("Team", self.target_team, "user")
+
+ frappe.sendmail(
+ subject=title,
+ message=message,
+ recipients=team_email,
+ template="access_request",
+ args={
+ "reason": self.reason,
+ "resources": self.resources,
+ },
+ )
+
+ frappe.get_doc(
+ {
+ "doctype": "Press Notification",
+ "team": self.target_team,
+ "type": "Support Access",
+ "document_type": "Support Access",
+ "document_name": self.name,
+ "title": title,
+ "message": message,
+ }
+ ).insert()
+
+ frappe.publish_realtime(
+ "press_notification",
+ doctype="Press Notification",
+ message={
+ "team": self.target_team,
+ },
+ )
diff --git a/press/press/doctype/support_access/test_support_access.py b/press/press/doctype/support_access/test_support_access.py
new file mode 100644
index 00000000000..7463511213d
--- /dev/null
+++ b/press/press/doctype/support_access/test_support_access.py
@@ -0,0 +1,56 @@
+# Copyright (c) 2025, Frappe and Contributors
+# See license.txt
+
+from typing import TYPE_CHECKING
+
+import frappe
+from frappe.tests import IntegrationTestCase, UnitTestCase
+
+if TYPE_CHECKING:
+ from press.press.doctype.support_access.support_access import SupportAccess
+
+# On IntegrationTestCase, the doctype test records and all
+# link-field test record dependencies are recursively loaded
+# Use these module variables to add/remove to/from that list
+EXTRA_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+IGNORE_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
+
+
+class UnitTestSupportAccess(UnitTestCase):
+ """
+ Unit tests for SupportAccess.
+ Use this class for testing individual functions and methods.
+ """
+
+ def test_valid_status_transition(self):
+ combinations = [
+ ["Pending", "Accepted", True],
+ ["Pending", "Rejected", True],
+ ["Accepted", "Revoked", True],
+ ["Accepted", "Forfeited", True],
+ ["Rejected", "Accepted", False],
+ ["Forfeited", "Accepted", False],
+ ["Revoked", "Accepted", False],
+ ["Pending", "Forfeited", False],
+ ["Pending", "Revoked", False],
+ ["Accepted", "Rejected", False],
+ ["Rejected", "Forfeited", False],
+ ["Rejected", "Revoked", False],
+ ["Forfeited", "Revoked", False],
+ ["Revoked", "Forfeited", False],
+ ]
+
+ support_access: SupportAccess = frappe.new_doc("Support Access")
+ for status_from, status_to, expected in combinations:
+ with self.subTest(status_from=status_from, status_to=status_to, expected=expected):
+ is_valid = support_access.is_valid_status_transition(status_from, status_to)
+ self.assertEqual(is_valid, expected)
+
+
+class IntegrationTestSupportAccess(IntegrationTestCase):
+ """
+ Integration tests for SupportAccess.
+ Use this class for testing interactions between multiple components.
+ """
+
+ pass
diff --git a/press/press/doctype/support_access_resource/__init__.py b/press/press/doctype/support_access_resource/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/support_access_resource/support_access_resource.json b/press/press/doctype/support_access_resource/support_access_resource.json
new file mode 100644
index 00000000000..99d32429bfe
--- /dev/null
+++ b/press/press/doctype/support_access_resource/support_access_resource.json
@@ -0,0 +1,42 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-10-07 16:52:18.845514",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "document_type",
+ "document_name"
+ ],
+ "fields": [
+ {
+ "fieldname": "document_type",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Document Type",
+ "options": "DocType"
+ },
+ {
+ "fieldname": "document_name",
+ "fieldtype": "Dynamic Link",
+ "in_list_view": 1,
+ "label": "Document Name",
+ "options": "document_type"
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-10-07 16:52:18.845514",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Support Access Resource",
+ "owner": "Administrator",
+ "permissions": [],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/support_access_resource/support_access_resource.py b/press/press/doctype/support_access_resource/support_access_resource.py
new file mode 100644
index 00000000000..e5f9a77e40a
--- /dev/null
+++ b/press/press/doctype/support_access_resource/support_access_resource.py
@@ -0,0 +1,24 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class SupportAccessResource(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ document_name: DF.DynamicLink | None
+ document_type: DF.Link | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/team/patches/remove_invalid_email_addresses.py b/press/press/doctype/team/patches/remove_invalid_email_addresses.py
new file mode 100644
index 00000000000..5fb6763abda
--- /dev/null
+++ b/press/press/doctype/team/patches/remove_invalid_email_addresses.py
@@ -0,0 +1,21 @@
+# Copyright (c) 2024, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+
+import frappe
+from frappe.utils import update_progress_bar, validate_email_address
+
+
+def execute():
+ emails = frappe.get_all(
+ "Communication Email",
+ {"parentfield": "communication_emails", "parenttype": "Team", "value": ("is", "set")},
+ ["name", "value"],
+ )
+
+ total_emails = len(emails)
+ for index, email in enumerate(emails):
+ update_progress_bar("Updating emails", index, total_emails)
+ if not validate_email_address(email.value):
+ frappe.db.set_value(
+ "Communication Email", email.name, "value", "", update_modified=False
+ )
diff --git a/press/press/doctype/team/suspend_sites.py b/press/press/doctype/team/suspend_sites.py
index a878e580646..716e1100c8e 100644
--- a/press/press/doctype/team/suspend_sites.py
+++ b/press/press/doctype/team/suspend_sites.py
@@ -8,83 +8,103 @@
Defaulters are identified based on the following conditions:
- Is not a free account
-- Is not an ERPNext Partner
-- Card not added
-- Does not have enough credit balance
+- Is not a Legacy Partner account with payment mode as Partner Credits
+- Has at least one unpaid invoice
+- Has an active site
-The `execute` method is the main method which is run by the scheduler daily.
+The `execute` method is the main method which is run by the scheduler on every day of the month.
"""
-
import frappe
-from frappe.utils.data import flt
+from frappe.utils import add_days, get_first_day, get_last_day, getdate
+
+from press.utils import log_error
def execute():
- teams_with_total_usage = get_teams_with_total_usage()
+ today = getdate()
+ first_day_of_month = get_first_day(today)
+ nineth_day_of_month = add_days(first_day_of_month, 8)
- for d in teams_with_total_usage:
- total_usage = d.total_usage
- team = frappe.get_doc("Team", d.team)
+ if today >= first_day_of_month and today <= nineth_day_of_month:
+ return
- if team.free_account or not total_usage or team.get_balance() > 0:
- continue
+ teams_with_unpaid_invoices = get_teams_with_unpaid_invoices()
- total_usage_limit = get_total_free_usage_limit(team)
+ for d in teams_with_unpaid_invoices[:30]:
+ team = frappe.get_doc("Team", d.team)
- # if total usage has crossed the allotted free credits, suspend their sites
- if total_usage > total_usage_limit:
- suspend_sites_and_send_email(team)
+ # suspend sites
+ suspend_sites_and_send_email(team)
def suspend_sites_and_send_email(team):
- sites = team.suspend_sites(reason="Card not added and free credits exhausted")
+ try:
+ sites = team.suspend_sites(reason="Unpaid Invoices")
+ frappe.db.commit()
+ except Exception:
+ log_error(
+ f"Error while suspending sites for team {team.name}",
+ traceback=frappe.get_traceback(),
+ )
+ frappe.db.rollback()
# send email
if sites:
email = team.user
- account_update_link = frappe.utils.get_url("/dashboard/welcome")
frappe.sendmail(
recipients=email,
subject="Your sites have been suspended on Frappe Cloud",
- template="payment_failed",
+ template="suspended_sites",
args={
"subject": "Your sites have been suspended on Frappe Cloud",
- "account_update_link": account_update_link,
- "card_not_added": True,
"sites": sites,
- "team": team,
},
)
-def get_teams_with_total_usage():
- """Find out teams which don't have a card, not a free account, not an erpnext partner with their total usage"""
- return frappe.db.sql(
- """
- SELECT
- SUM(i.total) as total_usage,
- i.team
- FROM
- `tabInvoice` i
- LEFT JOIN `tabTeam` t ON t.name = i.team
- WHERE
- i.docstatus < 2
- AND ifnull(t.default_payment_method, '') = ''
- AND t.free_account = 0
- AND t.erpnext_partner = 0
- GROUP BY
- i.team
- """,
- as_dict=True,
- )
-
-
-def get_total_free_usage_limit(team):
- """Returns the total free credits allocated to the team"""
- if not team.free_credits_allocated:
- return 0
-
- settings = frappe.get_cached_doc("Press Settings", "Press Settings")
- return flt(
- settings.free_credits_inr if team.currency == "INR" else settings.free_credits_usd
+def get_teams_with_unpaid_invoices():
+ """Find out teams which has active sites and unpaid invoices and not a free account"""
+ today = getdate()
+ # last day of previous month
+ last_day = get_last_day(frappe.utils.add_months(today, -1))
+
+ plan = frappe.qb.DocType("Site Plan")
+ query = (
+ frappe.qb.from_(plan)
+ .select(plan.name)
+ .where((plan.enabled == 1) & ((plan.is_frappe_plan == 1) | (plan.is_trial_plan == 1)))
+ ).run(as_dict=True)
+ ignorable_plans = [d.name for d in query]
+
+ invoice = frappe.qb.DocType("Invoice")
+ team = frappe.qb.DocType("Team")
+ site = frappe.qb.DocType("Site")
+
+ query = (
+ frappe.qb.from_(invoice)
+ .inner_join(team)
+ .on(invoice.team == team.name)
+ .inner_join(site)
+ .on(site.team == team.name)
+ .where(
+ (site.status).isin(["Active", "Inactive"])
+ & (team.enabled == 1)
+ & (team.free_account == 0)
+ & (team.extend_payment_due_suspension == 0)
+ & (invoice.status == "Unpaid")
+ & (invoice.docstatus < 2)
+ & (invoice.type == "Subscription")
+ & (site.free == 0)
+ & (invoice.period_end <= last_day)
+ )
+ .select(invoice.team)
+ .distinct()
)
+ if ignorable_plans:
+ query = query.where((site.plan).notin(ignorable_plans))
+ first_day = get_first_day(today)
+ two_weeks = add_days(first_day, 14) # 15th day of the month
+ if today < two_weeks:
+ query = query.where(team.erpnext_partner == 0)
+
+ return query.run(as_dict=True)
diff --git a/press/press/doctype/team/team.json b/press/press/doctype/team/team.json
index 433c47d0d30..ef9cc127fa6 100644
--- a/press/press/doctype/team/team.json
+++ b/press/press/doctype/team/team.json
@@ -11,56 +11,107 @@
"team_title",
"user",
"account_request",
+ "parent_team",
+ "column_break_wejg",
"is_developer",
+ "is_saas_user",
+ "is_code_server_user",
"free_account",
"via_erpnext",
- "column_break_6",
- "erpnext_partner",
- "partner_email",
- "parent_team",
+ "enforce_2fa",
+ "extend_payment_due_suspension",
+ "apply_npo_discount",
+ "skip_onboarding",
+ "team_members_tab",
"section_break_6",
"team_members",
"section_break_tdm9",
"child_team_members",
- "section_break_9",
- "send_notifications",
- "column_break_9",
- "notify_email",
- "last_used_team",
+ "billing_tab",
"subscription_details_section",
"stripe_customer_id",
"country",
"currency",
"payment_mode",
+ "billing_team",
"default_payment_method",
"billing_name",
"billing_address",
"free_credits_allocated",
"column_break_12",
"address_html",
- "custom_apps_section",
- "github_access_token",
+ "section_break_jzok",
+ "receive_budget_alerts",
+ "monthly_alert_threshold",
+ "column_break_hunh",
+ "notification_tab",
+ "section_break_9",
+ "send_notifications",
+ "column_break_9",
+ "last_used_team",
+ "section_break_28",
+ "communication_infos",
+ "partner_tab",
+ "partner_details_section",
+ "erpnext_partner",
+ "company_name",
+ "partner_email",
+ "partner_manager",
+ "column_break_ooyo",
+ "partner_status",
+ "partner_tier",
+ "website_info_section",
+ "company_logo",
+ "website_link",
+ "introduction",
+ "column_break_akmx",
+ "customers",
+ "partnership_date_section",
+ "start_date",
+ "column_break_egkq",
+ "end_date",
+ "partner_section",
+ "partner_referral_code",
+ "partnership_date",
+ "column_break_ppov",
+ "frappe_partnership_date",
+ "partner_commission",
+ "feature_flags_tab",
"feature_flags_section",
"referrer_id",
"ssh_access_enabled",
"skip_backups",
+ "enable_inplace_updates",
"column_break_31",
"database_access_enabled",
+ "enable_performance_tuning",
"razorpay_enabled",
"servers_enabled",
+ "code_servers_enabled",
+ "hybrid_servers_enabled",
"self_hosted_servers_enabled",
- "section_break_28",
- "communication_emails",
+ "security_portal_enabled",
+ "benches_enabled",
+ "mpesa_enabled",
+ "hetzner_internal_user",
+ "allow_unified_servers",
"discounts_section",
"discounts",
- "is_us_eu"
+ "is_us_eu",
+ "custom_apps_tab",
+ "custom_apps_section",
+ "github_access_token",
+ "column_break_uyxo",
+ "mpesa_tax_id",
+ "mpesa_phone_number"
],
"fields": [
{
"fieldname": "user",
"fieldtype": "Link",
"label": "User",
- "options": "User"
+ "options": "User",
+ "search_index": 1
},
{
"fieldname": "team_members",
@@ -87,7 +138,8 @@
"fieldname": "stripe_customer_id",
"fieldtype": "Data",
"label": "Stripe Customer ID",
- "read_only": 1
+ "read_only": 1,
+ "search_index": 1
},
{
"fieldname": "country",
@@ -100,7 +152,8 @@
"fieldname": "currency",
"fieldtype": "Link",
"label": "Currency",
- "options": "Currency"
+ "options": "Currency",
+ "read_only": 1
},
{
"fieldname": "default_payment_method",
@@ -109,22 +162,14 @@
"label": "Default Payment Method",
"options": "Stripe Payment Method"
},
- {
- "default": "0",
- "description": "If checked, this user can Transfer credits from ERPNext.com",
- "fieldname": "erpnext_partner",
- "fieldtype": "Check",
- "in_standard_filter": 1,
- "label": "ERPNext Partner",
- "read_only": 1
- },
{
"default": "0",
"description": "If checked, usage data will not be sent to Stripe and they won't be charged",
"fieldname": "free_account",
"fieldtype": "Check",
"in_standard_filter": 1,
- "label": "Free Account"
+ "label": "Free Account",
+ "search_index": 1
},
{
"fieldname": "address_html",
@@ -159,8 +204,7 @@
},
{
"fieldname": "custom_apps_section",
- "fieldtype": "Section Break",
- "label": "Custom Apps"
+ "fieldtype": "Section Break"
},
{
"fieldname": "billing_name",
@@ -187,16 +231,7 @@
"fieldname": "payment_mode",
"fieldtype": "Select",
"label": "Payment Mode",
- "options": "\nCard\nPrepaid Credits\nPartner Credits"
- },
- {
- "depends_on": "eval:doc.send_notifications;",
- "fetch_from": "user.email",
- "fetch_if_empty": 1,
- "fieldname": "notify_email",
- "fieldtype": "Data",
- "label": "Notify Email",
- "mandatory_depends_on": "eval:doc.send_notifications;"
+ "options": "\nCard\nPrepaid Credits\nPaid By Partner"
},
{
"fieldname": "column_break_9",
@@ -218,12 +253,6 @@
"label": "Referrer ID",
"read_only": 1
},
- {
- "fieldname": "communication_emails",
- "fieldtype": "Table",
- "label": "Communication Emails",
- "options": "Communication Email"
- },
{
"fieldname": "section_break_28",
"fieldtype": "Section Break"
@@ -244,6 +273,7 @@
"collapsible": 1,
"fieldname": "discounts_section",
"fieldtype": "Section Break",
+ "hidden": 1,
"label": "Discounts"
},
{
@@ -253,11 +283,6 @@
"options": "Invoice Discount"
},
{
- "fieldname": "column_break_6",
- "fieldtype": "Column Break"
- },
- {
- "depends_on": "eval: doc.erpnext_partner",
"fieldname": "partner_email",
"fieldtype": "Data",
"label": "Partner Email"
@@ -301,7 +326,8 @@
"fieldname": "parent_team",
"fieldtype": "Link",
"label": "Parent Team",
- "options": "Team"
+ "options": "Team",
+ "search_index": 1
},
{
"fieldname": "section_break_tdm9",
@@ -316,6 +342,7 @@
{
"fieldname": "team_title",
"fieldtype": "Data",
+ "hidden": 1,
"label": "Team Title"
},
{
@@ -329,8 +356,312 @@
"fieldtype": "Link",
"label": "Account Request",
"options": "Account Request"
+ },
+ {
+ "default": "0",
+ "description": "SaaS user sees a simplified version of the dashboard",
+ "fieldname": "is_saas_user",
+ "fieldtype": "Check",
+ "label": "Is SaaS User"
+ },
+ {
+ "default": "0",
+ "fieldname": "security_portal_enabled",
+ "fieldtype": "Check",
+ "label": "Security Portal Enabled"
+ },
+ {
+ "default": "0",
+ "fieldname": "benches_enabled",
+ "fieldtype": "Check",
+ "label": "Benches Enabled"
+ },
+ {
+ "default": "0",
+ "fieldname": "code_servers_enabled",
+ "fieldtype": "Check",
+ "label": "Code Servers Enabled"
+ },
+ {
+ "depends_on": "eval: !doc.erpnext_partner && doc.payment_mode == 'Paid By Partner'",
+ "fieldname": "billing_team",
+ "fieldtype": "Link",
+ "label": "Billing Team",
+ "mandatory_depends_on": "eval: doc.payment_mode == 'Paid By Partner'",
+ "options": "Team"
+ },
+ {
+ "fieldname": "partner_referral_code",
+ "fieldtype": "Data",
+ "label": "Partner Referral Code",
+ "read_only": 1
+ },
+ {
+ "fieldname": "partner_section",
+ "fieldtype": "Section Break"
+ },
+ {
+ "depends_on": "eval:!doc.erpnext_partner && doc.partner_email",
+ "fieldname": "partnership_date",
+ "fieldtype": "Date",
+ "label": "Customer Partnership Date"
+ },
+ {
+ "fieldname": "column_break_ppov",
+ "fieldtype": "Column Break"
+ },
+ {
+ "depends_on": "eval:doc.erpnext_partner",
+ "description": "Fetched from frappe.io",
+ "fieldname": "frappe_partnership_date",
+ "fieldtype": "Date",
+ "label": "Frappe Partnership Date",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "description": "If checked, code server is enabled on created benches.",
+ "fieldname": "is_code_server_user",
+ "fieldtype": "Check",
+ "label": "Is Code Server User"
+ },
+ {
+ "fieldname": "column_break_wejg",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "false",
+ "fieldname": "enable_performance_tuning",
+ "fieldtype": "Check",
+ "label": "Enable Performance Tuning"
+ },
+ {
+ "default": "0",
+ "description": "Enforces 2FA to all members",
+ "fieldname": "enforce_2fa",
+ "fieldtype": "Check",
+ "label": "Enforce 2FA"
+ },
+ {
+ "default": "0",
+ "description": "Allows bypassing build to update a Bench (if conditions are met)",
+ "fieldname": "enable_inplace_updates",
+ "fieldtype": "Check",
+ "label": "Enable In Place Updates"
+ },
+ {
+ "default": "0",
+ "fieldname": "mpesa_enabled",
+ "fieldtype": "Check",
+ "label": "Mpesa Enabled"
+ },
+ {
+ "fieldname": "column_break_uyxo",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "mpesa_tax_id",
+ "fieldtype": "Data",
+ "label": "Mpesa Tax Id"
+ },
+ {
+ "fieldname": "mpesa_phone_number",
+ "fieldtype": "Data",
+ "label": "Mpesa Phone Number"
+ },
+ {
+ "fieldname": "partner_commission",
+ "fieldtype": "Percent",
+ "label": "Partner Commission"
+ },
+ {
+ "fieldname": "team_members_tab",
+ "fieldtype": "Tab Break",
+ "label": "Team Members"
+ },
+ {
+ "fieldname": "notification_tab",
+ "fieldtype": "Tab Break",
+ "label": "Notification"
+ },
+ {
+ "fieldname": "billing_tab",
+ "fieldtype": "Tab Break",
+ "label": "Billing"
+ },
+ {
+ "fieldname": "partner_tab",
+ "fieldtype": "Tab Break",
+ "label": "Partner"
+ },
+ {
+ "fieldname": "feature_flags_tab",
+ "fieldtype": "Tab Break",
+ "label": "Feature Flags"
+ },
+ {
+ "fieldname": "partner_details_section",
+ "fieldtype": "Section Break",
+ "label": "Partner Details"
+ },
+ {
+ "fieldname": "column_break_ooyo",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "partner_tier",
+ "fieldtype": "Link",
+ "label": "Partner Tier",
+ "options": "Partner Tier"
+ },
+ {
+ "fieldname": "custom_apps_tab",
+ "fieldtype": "Tab Break",
+ "label": "Custom Apps"
+ },
+ {
+ "default": "0",
+ "fieldname": "erpnext_partner",
+ "fieldtype": "Check",
+ "in_standard_filter": 1,
+ "label": "ERPNext Partner",
+ "read_only": 1
+ },
+ {
+ "depends_on": "eval: doc.erpnext_partner == 1",
+ "fieldname": "website_info_section",
+ "fieldtype": "Section Break",
+ "label": "Website Info"
+ },
+ {
+ "fieldname": "company_logo",
+ "fieldtype": "Attach",
+ "label": "Company Logo"
+ },
+ {
+ "fieldname": "website_link",
+ "fieldtype": "Data",
+ "label": "Website Link"
+ },
+ {
+ "fieldname": "introduction",
+ "fieldtype": "Small Text",
+ "label": "Introduction"
+ },
+ {
+ "fieldname": "column_break_akmx",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "customers",
+ "fieldtype": "Small Text",
+ "label": "Customers"
+ },
+ {
+ "default": "0",
+ "fieldname": "extend_payment_due_suspension",
+ "fieldtype": "Check",
+ "label": "Extend Payment Due Suspension",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "description": "If checked, Non Profit Organisation discount will be applied in Invoice",
+ "fieldname": "apply_npo_discount",
+ "fieldtype": "Check",
+ "label": "Apply NPO Discount"
+ },
+ {
+ "fieldname": "partner_status",
+ "fieldtype": "Select",
+ "label": "Partner Status",
+ "options": "Active\nInactive"
+ },
+ {
+ "default": "0",
+ "description": "If checked, members will not be forced to go through the onboarding",
+ "fieldname": "skip_onboarding",
+ "fieldtype": "Check",
+ "label": "Skip Onboarding"
+ },
+ {
+ "fieldname": "communication_infos",
+ "fieldtype": "Table",
+ "label": "Communication Infos",
+ "options": "Communication Info"
+ },
+ {
+ "fieldname": "section_break_jzok",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "column_break_hunh",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "receive_budget_alerts",
+ "fieldtype": "Check",
+ "label": "Receive Budget Alerts"
+ },
+ {
+ "depends_on": "receive_budget_alerts",
+ "description": "Email alert when monthly spend exceeds limit",
+ "fieldname": "monthly_alert_threshold",
+ "fieldtype": "Currency",
+ "label": "Monthly Alert Limit",
+ "mandatory_depends_on": "receive_budget_alerts"
+ },
+ {
+ "fieldname": "partner_manager",
+ "fieldtype": "Link",
+ "label": "Partner Manager",
+ "options": "User"
+ },
+ {
+ "fieldname": "partnership_date_section",
+ "fieldtype": "Section Break",
+ "label": "Partnership Date"
+ },
+ {
+ "fieldname": "start_date",
+ "fieldtype": "Date",
+ "label": "Start Date"
+ },
+ {
+ "fieldname": "column_break_egkq",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "end_date",
+ "fieldtype": "Date",
+ "label": "End Date"
+ },
+ {
+ "fieldname": "company_name",
+ "fieldtype": "Data",
+ "label": "Company Name"
+ },
+ {
+ "default": "0",
+ "fieldname": "hybrid_servers_enabled",
+ "fieldtype": "Check",
+ "label": "Hybrid Servers Enabled"
+ },
+ {
+ "default": "0",
+ "fieldname": "hetzner_internal_user",
+ "fieldtype": "Check",
+ "label": "Hetzner Internal User"
+ },
+ {
+ "default": "0",
+ "fieldname": "allow_unified_servers",
+ "fieldtype": "Check",
+ "label": "Allow Unified Servers"
}
],
+ "grid_page_length": 50,
"links": [
{
"group": "General",
@@ -367,6 +698,11 @@
"link_doctype": "Balance Transaction",
"link_fieldname": "team"
},
+ {
+ "group": "Billing",
+ "link_doctype": "Stripe Webhook Log",
+ "link_fieldname": "team"
+ },
{
"group": "Marketplace",
"link_doctype": "Marketplace App",
@@ -376,9 +712,19 @@
"group": "Marketplace",
"link_doctype": "Marketplace Publisher Profile",
"link_fieldname": "team"
+ },
+ {
+ "group": "Billing",
+ "link_doctype": "Stripe Payment Method",
+ "link_fieldname": "team"
+ },
+ {
+ "group": "Billing",
+ "link_doctype": "Payment Due Extension",
+ "link_fieldname": "team"
}
],
- "modified": "2023-07-18 11:19:01.393975",
+ "modified": "2026-01-13 20:23:37.626353",
"modified_by": "Administrator",
"module": "Press",
"name": "Team",
@@ -402,9 +748,19 @@
"read": 1,
"role": "Press Admin",
"write": 1
+ },
+ {
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "report": 1,
+ "role": "Press Marketplace Manager",
+ "select": 1,
+ "share": 1
}
],
"quick_entry": 1,
+ "row_format": "Dynamic",
"show_title_field_in_link": 1,
"sort_field": "modified",
"sort_order": "DESC",
diff --git a/press/press/doctype/team/team.py b/press/press/doctype/team/team.py
index 73a64762a8d..cc27ce1730a 100644
--- a/press/press/doctype/team/team.py
+++ b/press/press/doctype/team/team.py
@@ -1,81 +1,280 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
import os
-import frappe
+from hashlib import blake2b
+from typing import TYPE_CHECKING
+import frappe
from frappe import _
+from frappe.contacts.address_and_contact import load_address_and_contact
from frappe.core.utils import find
-from typing import List
-from hashlib import blake2b
-from press.utils import log_error
-from frappe.utils import get_fullname
-from frappe.utils import get_url_to_form
-from press.telegram_utils import Telegram
from frappe.model.document import Document
+from frappe.query_builder.functions import Count
+from frappe.rate_limiter import rate_limit
+from frappe.utils import get_fullname, get_last_day, get_url_to_form, getdate, random_string
+
+from press.api.client import dashboard_whitelist
from press.exceptions import FrappeioServerNotSet
-from frappe.contacts.address_and_contact import load_address_and_contact
-from press.press.doctype.account_request.account_request import AccountRequest
-from press.marketplace.doctype.marketplace_app_subscription.marketplace_app_subscription import (
- process_prepaid_marketplace_payment,
-)
+from press.press.doctype.communication_info.communication_info import get_communication_info
+from press.press.doctype.telegram_message.telegram_message import TelegramMessage
+from press.utils import get_valid_teams_for_user, has_role, log_error
from press.utils.billing import (
- get_erpnext_com_connection,
get_frappe_io_connection,
get_stripe,
+ is_frappe_auth_disabled,
process_micro_debit_test_charge,
)
from press.utils.telemetry import capture
+if TYPE_CHECKING:
+ from press.press.doctype.account_request.account_request import AccountRequest
+
class Team(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.child_team_member.child_team_member import ChildTeamMember
+ from press.press.doctype.communication_info.communication_info import CommunicationInfo
+ from press.press.doctype.invoice_discount.invoice_discount import InvoiceDiscount
+ from press.press.doctype.team_member.team_member import TeamMember
+
+ account_request: DF.Link | None
+ allow_unified_servers: DF.Check
+ apply_npo_discount: DF.Check
+ benches_enabled: DF.Check
+ billing_address: DF.Link | None
+ billing_name: DF.Data | None
+ billing_team: DF.Link | None
+ child_team_members: DF.Table[ChildTeamMember]
+ code_servers_enabled: DF.Check
+ communication_infos: DF.Table[CommunicationInfo]
+ company_logo: DF.Attach | None
+ company_name: DF.Data | None
+ country: DF.Link | None
+ currency: DF.Link | None
+ customers: DF.SmallText | None
+ database_access_enabled: DF.Check
+ default_payment_method: DF.Link | None
+ discounts: DF.Table[InvoiceDiscount]
+ enable_inplace_updates: DF.Check
+ enable_performance_tuning: DF.Check
+ enabled: DF.Check
+ end_date: DF.Date | None
+ enforce_2fa: DF.Check
+ erpnext_partner: DF.Check
+ extend_payment_due_suspension: DF.Check
+ frappe_partnership_date: DF.Date | None
+ free_account: DF.Check
+ free_credits_allocated: DF.Check
+ github_access_token: DF.Data | None
+ hetzner_internal_user: DF.Check
+ hybrid_servers_enabled: DF.Check
+ introduction: DF.SmallText | None
+ is_code_server_user: DF.Check
+ is_developer: DF.Check
+ is_saas_user: DF.Check
+ is_us_eu: DF.Check
+ last_used_team: DF.Link | None
+ monthly_alert_threshold: DF.Currency
+ mpesa_enabled: DF.Check
+ mpesa_phone_number: DF.Data | None
+ mpesa_tax_id: DF.Data | None
+ parent_team: DF.Link | None
+ partner_commission: DF.Percent
+ partner_email: DF.Data | None
+ partner_manager: DF.Link | None
+ partner_referral_code: DF.Data | None
+ partner_status: DF.Literal["Active", "Inactive"]
+ partner_tier: DF.Link | None
+ partnership_date: DF.Date | None
+ payment_mode: DF.Literal["", "Card", "Prepaid Credits", "Paid By Partner"]
+ razorpay_enabled: DF.Check
+ receive_budget_alerts: DF.Check
+ referrer_id: DF.Data | None
+ security_portal_enabled: DF.Check
+ self_hosted_servers_enabled: DF.Check
+ send_notifications: DF.Check
+ servers_enabled: DF.Check
+ skip_backups: DF.Check
+ skip_onboarding: DF.Check
+ ssh_access_enabled: DF.Check
+ start_date: DF.Date | None
+ stripe_customer_id: DF.Data | None
+ team_members: DF.Table[TeamMember]
+ team_title: DF.Data | None
+ user: DF.Link | None
+ via_erpnext: DF.Check
+ website_link: DF.Data | None
+ # end: auto-generated types
+
+ dashboard_fields = (
+ "enabled",
+ "team_title",
+ "user",
+ "partner_email",
+ "erpnext_partner",
+ "enforce_2fa",
+ "billing_team",
+ "team_members",
+ "child_team_members",
+ "country",
+ "currency",
+ "payment_mode",
+ "default_payment_method",
+ "skip_backups",
+ "is_saas_user",
+ "billing_name",
+ "referrer_id",
+ "partner_referral_code",
+ "parent_team",
+ "is_developer",
+ "enable_performance_tuning",
+ "enable_inplace_updates",
+ "servers_enabled",
+ "benches_enabled",
+ "mpesa_tax_id",
+ "mpesa_phone_number",
+ "mpesa_enabled",
+ "razorpay_enabled",
+ "account_request",
+ "partner_status",
+ "receive_budget_alerts",
+ "monthly_alert_threshold",
+ "company_name",
+ "hybrid_servers_enabled",
+ )
+
+ def get_doc(self, doc):
+ if (
+ not frappe.local.system_user()
+ and self.user != frappe.session.user
+ and frappe.session.user not in self.get_user_list()
+ ):
+ frappe.throw("You are not allowed to access this document")
+
+ user = frappe.db.get_value(
+ "User",
+ frappe.session.user,
+ ["name", "first_name", "last_name", "user_image", "user_type", "email", "api_key"],
+ as_dict=True,
+ )
+ user.is_2fa_enabled = frappe.db.get_value("User 2FA", {"user": user.name}, "enabled")
+ doc.user_info = user
+ doc.balance = self.get_balance()
+ doc.is_desk_user = user.user_type == "System User"
+ doc.is_support_agent = has_role("Press Support Agent")
+ doc.can_request_access = has_role("Press Support Agent")
+ doc.valid_teams = get_valid_teams_for_user(frappe.session.user)
+ doc.onboarding = self.get_onboarding()
+ doc.billing_info = self.billing_info()
+ doc.billing_details = self.billing_details()
+ doc.trial_sites = self.get_trial_sites()
+ doc.pending_site_request = self.get_pending_saas_site_request()
+ doc.payment_method = frappe.db.get_value(
+ "Stripe Payment Method",
+ {"team": self.name, "name": self.default_payment_method},
+ [
+ "name",
+ "last_4",
+ "name_on_card",
+ "expiry_month",
+ "expiry_year",
+ "brand",
+ "stripe_mandate_id",
+ ],
+ as_dict=True,
+ )
+ doc.communication_infos = self.get_communication_infos()
+ doc.receive_budget_alerts = self.receive_budget_alerts
+ doc.monthly_alert_threshold = self.monthly_alert_threshold
+ doc.is_binlog_indexer_enabled = not frappe.db.get_single_value(
+ "Press Settings", "disable_binlog_indexer_service", cache=True
+ )
+
def onload(self):
load_address_and_contact(self)
+ @frappe.whitelist()
+ def get_home_data(self):
+ return {
+ "sites": frappe.db.get_all(
+ "Site",
+ {"team": self.name, "status": ["!=", "Archived"]},
+ ["name", "host_name", "status"],
+ ),
+ }
+
def validate(self):
self.validate_duplicate_members()
self.set_team_currency()
self.set_default_user()
self.set_billing_name()
self.set_partner_email()
+ self.unset_saas_team_type_if_required()
+ self.validate_disable()
+ self.validate_billing_team()
def before_insert(self):
- if not self.notify_email:
- self.notify_email = self.user
+ self.currency = "INR" if self.country == "India" else "USD"
if not self.referrer_id:
self.set_referrer_id()
- self.set_partner_payment_mode()
-
def set_referrer_id(self):
h = blake2b(digest_size=4)
h.update(self.user.encode())
self.referrer_id = h.hexdigest()
- def set_partner_payment_mode(self):
- if self.erpnext_partner:
- self.payment_mode = "Partner Credits"
-
def set_partner_email(self):
if self.erpnext_partner and not self.partner_email:
self.partner_email = self.user
+ def validate_disable(self):
+ if self.has_value_changed("enabled") and self.enabled == 0 and has_unsettled_invoices(self.name):
+ frappe.throw(
+ "Cannot disable team with Draft or Unpaid invoices. Please finalize and settle the pending invoices first"
+ )
+
+ def validate_billing_team(self):
+ if not (self.billing_team and self.payment_mode == "Paid By Partner"):
+ return
+
+ if self.payment_mode == "Paid By Partner" and not self.billing_team:
+ frappe.throw("Billing Team is mandatory for Paid By Partner payment mode")
+
+ if self.payment_mode == "Paid By Partner" and has_unsettled_invoices(self.name):
+ frappe.throw(
+ "Cannot set payment mode to Paid By Partner. Please finalize and settle the pending invoices first"
+ )
+
def delete(self, force=False, workflow=False):
+ if not (force or workflow):
+ frappe.throw(
+ f"You are only deleting the Team Document for {self.name}. To continue to"
+ " do so, pass force=True with this call. Else, pass workflow=True to raise"
+ " a Team Deletion Request to trigger complete team deletion process."
+ )
+
if force:
return super().delete()
if workflow:
- return frappe.get_doc(
- {"doctype": "Team Deletion Request", "team": self.name}
- ).insert()
+ return frappe.get_doc({"doctype": "Team Deletion Request", "team": self.name}).insert()
frappe.throw(
f"You are only deleting the Team Document for {self.name}. To continue to"
" do so, pass force=True with this call. Else, pass workflow=True to raise"
" a Team Deletion Request to trigger complete team deletion process."
)
+ return None
def disable_account(self):
self.suspend_sites("Account disabled")
@@ -95,14 +294,14 @@ def create_new(
account_request: AccountRequest,
first_name: str,
last_name: str,
- password: str = None,
- country: str = None,
+ password: str | None = None,
+ country: str | None = None,
is_us_eu: bool = False,
via_erpnext: bool = False,
user_exists: bool = False,
):
"""Create new team along with user (user created first)."""
- team = frappe.get_doc(
+ team: "Team" = frappe.get_doc(
{
"doctype": "Team",
"user": account_request.email,
@@ -123,16 +322,18 @@ def create_new(
user.append_roles(account_request.role)
user.save(ignore_permissions=True)
+ if frappe.db.exists("Team", {"user": user.name}):
+ frappe.throw("You have already an account with same email. Please login using the same email.")
+
team.team_title = "Parent Team"
team.insert(ignore_permissions=True, ignore_links=True)
team.append("team_members", {"user": user.name})
- if not account_request.invited_by_parent_team:
- team.append("communication_emails", {"type": "invoices", "value": user.name})
- team.append(
- "communication_emails", {"type": "marketplace_notifications", "value": user.name}
- )
- else:
+ if account_request.invited_by_parent_team:
team.parent_team = account_request.invited_by
+
+ if account_request.product_trial:
+ team.is_saas_user = 1
+
team.save(ignore_permissions=True)
team.create_stripe_customer()
@@ -140,13 +341,8 @@ def create_new(
if account_request.referrer_id:
team.create_referral_bonus(account_request.referrer_id)
- if not team.via_erpnext:
- if not account_request.invited_by_parent_team:
- team.create_upcoming_invoice()
- # TODO: Partner account moved to PRM
- if team.has_partner_account_on_erpnext_com():
- team.enable_erpnext_partner_privileges()
-
+ if not team.via_erpnext and not account_request.invited_by_parent_team:
+ team.create_upcoming_invoice()
return team
@staticmethod
@@ -163,7 +359,14 @@ def create_user(first_name=None, last_name=None, email=None, password=None, role
return user
def create_user_for_member(
- self, first_name=None, last_name=None, email=None, password=None, role=None
+ self,
+ first_name=None,
+ last_name=None,
+ email=None,
+ password=None,
+ role=None,
+ press_roles=None,
+ skip_validations=False,
):
user = frappe.db.get_value("User", email, ["name"], as_dict=True)
if not user:
@@ -172,10 +375,31 @@ def create_user_for_member(
self.append("team_members", {"user": user.name})
self.save(ignore_permissions=True)
+ for role in press_roles or []:
+ frappe.get_doc("Press Role", role.press_role).add_user(
+ user.name,
+ skip_validations=skip_validations,
+ )
+
+ @dashboard_whitelist()
def remove_team_member(self, member):
member_to_remove = find(self.team_members, lambda x: x.user == member)
if member_to_remove:
self.remove(member_to_remove)
+
+ PressRole = frappe.qb.DocType("Press Role")
+ PressRoleUser = frappe.qb.DocType("Press Role User")
+ roles = (
+ frappe.qb.from_(PressRole)
+ .join(PressRoleUser)
+ .on((PressRoleUser.parent == PressRole.name) & (PressRoleUser.user == member))
+ .where(PressRole.team == self.name)
+ .select(PressRole.name)
+ .run(as_dict=True, pluck="name")
+ )
+
+ for role in roles:
+ frappe.get_doc("Press Role", role).remove_user(member)
else:
frappe.throw(f"Team member {frappe.bold(member)} does not exists")
@@ -185,6 +409,10 @@ def set_billing_name(self):
if not self.billing_name:
self.billing_name = frappe.utils.get_fullname(self.user)
+ def unset_saas_team_type_if_required(self):
+ if (self.servers_enabled or self.benches_enabled) and self.is_saas_user:
+ self.is_saas_user = 0
+
def set_default_user(self):
if not self.user and self.team_members:
self.user = self.team_members[0].user
@@ -213,17 +441,18 @@ def validate_duplicate_members(self):
frappe.DuplicateEntryError,
)
- def validate_payment_mode(self):
+ def validate_payment_mode(self): # noqa: C901
if not self.payment_mode and self.get_balance() > 0:
self.payment_mode = "Prepaid Credits"
if self.has_value_changed("payment_mode"):
- if self.payment_mode == "Card":
- if frappe.db.count("Stripe Payment Method", {"team": self.name}) == 0:
- frappe.throw("No card added")
- if self.payment_mode == "Prepaid Credits":
- if self.get_balance() <= 0:
- frappe.throw("Account does not have sufficient balance")
+ if (
+ self.payment_mode == "Card"
+ and frappe.db.count("Stripe Payment Method", {"team": self.name}) == 0
+ ):
+ frappe.throw("No card added")
+ if self.payment_mode == "Prepaid Credits" and self.get_balance() <= 0:
+ frappe.throw("Account does not have sufficient balance")
if not self.is_new() and not self.default_payment_method:
# if default payment method is unset
@@ -236,13 +465,31 @@ def validate_payment_mode(self):
doc.is_default = 0
doc.save()
+ # Telemetry: Payment Mode Changed Event (Only for teams which have came through FC Signup and not via invite)
+ if self.has_value_changed("payment_mode") and self.payment_mode and self.account_request:
+ old_doc = self.get_doc_before_save()
+ # Validate that the team has no payment method set previously
+ if (not old_doc) or (not old_doc.payment_mode):
+ ar: "AccountRequest" = frappe.get_doc("Account Request", self.account_request)
+ # Only capture if it's not a saas signup or invited by parent team
+ if not (ar.is_saas_signup() or ar.invited_by_parent_team):
+ capture("added_card_or_prepaid_credits", "fc_signup", self.user)
+
def on_update(self):
+ if not self.enabled:
+ return
+
self.validate_payment_mode()
self.update_draft_invoice_payment_mode()
+ self.check_budget_alert_threshold()
- if not self.is_new() and self.billing_name and not frappe.conf.allow_tests:
- if self.has_value_changed("billing_name"):
- self.update_billing_details_on_frappeio()
+ if (
+ not self.is_new()
+ and self.billing_name
+ and not frappe.conf.allow_tests
+ and self.has_value_changed("billing_name")
+ ):
+ self.update_billing_details_on_frappeio()
def update_draft_invoice_payment_mode(self):
if self.has_value_changed("payment_mode"):
@@ -253,6 +500,15 @@ def update_draft_invoice_payment_mode(self):
for invoice in draft_invoices:
frappe.db.set_value("Invoice", invoice, "payment_mode", self.payment_mode)
+ def check_budget_alert_threshold(self):
+ if self.receive_budget_alerts and self.has_value_changed("monthly_alert_threshold"):
+ frappe.db.set_value(
+ "Invoice",
+ {"team": self.name, "docstatus": 0, "due_date": get_last_day(getdate())},
+ "budget_alert_sent",
+ 0,
+ )
+
@frappe.whitelist()
def impersonate(self, member, reason):
user = frappe.db.get_value("Team Member", member, "user")
@@ -272,33 +528,38 @@ def impersonate(self, member, reason):
@frappe.whitelist()
def enable_erpnext_partner_privileges(self):
self.erpnext_partner = 1
- self.partner_email = self.user
- self.payment_mode = "Partner Credits"
+ if not self.partner_email:
+ self.partner_email = self.user
+ self.frappe_partnership_date = self.get_partnership_start_date()
+ self.servers_enabled = 1
+ self.partner_status = "Active"
self.save(ignore_permissions=True)
+ frappe.get_doc("User", self.user).add_roles("Partner")
+ self.create_partner_referral_code()
@frappe.whitelist()
def disable_erpnext_partner_privileges(self):
- self.erpnext_partner = 0
+ self.partner_status = "Inactive"
self.save(ignore_permissions=True)
- # TODO: Maybe check if the partner had enough credits
- # for settlement and if not, change payment mode
+ frappe.get_doc("User", self.user).remove_roles("Partner")
- def allocate_free_credits(self):
- if self.via_erpnext:
- # dont allocate free credits for signups via erpnext
- # since they get a 14 day free trial site
- return
+ def create_partner_referral_code(self):
+ if not self.partner_referral_code:
+ self.partner_referral_code = random_string(10).upper()
+ self.save(ignore_permissions=True)
- if not self.free_credits_allocated:
- # allocate free credits on signup
- credits_field = "free_credits_inr" if self.currency == "INR" else "free_credits_usd"
- credit_amount = frappe.db.get_single_value("Press Settings", credits_field)
- if not credit_amount:
- return
- self.allocate_credit_amount(credit_amount, source="Free Credits")
- self.free_credits_allocated = 1
- self.save()
- self.reload()
+ def get_partnership_start_date(self):
+ if frappe.flags.in_test:
+ return frappe.utils.getdate()
+
+ if is_frappe_auth_disabled():
+ return frappe.utils.getdate()
+
+ client = get_frappe_io_connection()
+ data = client.get_value("Partner", "start_date", {"email": self.partner_email})
+ if not data:
+ frappe.throw("Partner not found on frappe.io")
+ return frappe.utils.getdate(data.get("start_date"))
def create_referral_bonus(self, referrer_id):
# Get team name with this this referrer id
@@ -315,13 +576,20 @@ def is_defaulter(self):
return False
try:
- last_invoice = frappe.get_last_doc(
- "Invoice", filters={"docstatus": 0, "team": self.name}
+ unpaid_invoices = frappe.get_all(
+ "Invoice",
+ {
+ "status": "Unpaid",
+ "team": self.name,
+ "docstatus": ("<", 2),
+ "type": "Subscription",
+ },
+ pluck="name",
)
except frappe.DoesNotExistError:
return False
- return last_invoice.status == "Unpaid"
+ return unpaid_invoices
def create_stripe_customer(self):
if not self.stripe_customer_id:
@@ -330,10 +598,35 @@ def create_stripe_customer(self):
self.stripe_customer_id = customer.id
self.save()
+ @dashboard_whitelist()
+ def get_communication_infos(self):
+ return (
+ [{"channel": c.channel, "type": c.type, "value": c.value} for c in self.communication_infos]
+ if hasattr(self, "communication_infos")
+ else []
+ )
+
+ @dashboard_whitelist()
+ def update_communication_infos(self, values: list[dict]):
+ from press.press.doctype.communication_info.communication_info import (
+ update_communication_infos as update_infos,
+ )
+
+ update_infos("Team", self.name, values)
+
+ @frappe.whitelist()
def update_billing_details(self, billing_details):
if self.billing_address:
address_doc = frappe.get_doc("Address", self.billing_address)
+ if (address_doc.country != billing_details.country) and (
+ address_doc.country == "India" or billing_details.country == "India"
+ ):
+ frappe.throw("Cannot change country of billing address")
else:
+ if self.account_request:
+ ar: "AccountRequest" = frappe.get_doc("Account Request", self.account_request)
+ if not (ar.is_saas_signup() or ar.invited_by_parent_team):
+ capture("added_billing_address", "fc_signup", self.user)
address_doc = frappe.new_doc("Address")
address_doc.address_title = billing_details.billing_name or self.billing_name
address_doc.append(
@@ -346,7 +639,7 @@ def update_billing_details(self, billing_details):
"address_line1": billing_details.address,
"city": billing_details.city,
"state": billing_details.state,
- "pincode": billing_details.postal_code,
+ "pincode": billing_details.get("postal_code", "").strip().replace(" ", ""),
"country": billing_details.country,
"gstin": billing_details.gstin,
}
@@ -364,21 +657,24 @@ def update_billing_details(self, billing_details):
self.update_billing_details_on_draft_invoices()
def update_billing_details_on_draft_invoices(self):
- draft_invoices = frappe.get_all(
- "Invoice", {"team": self.name, "docstatus": 0}, pluck="name"
- )
+ draft_invoices = frappe.get_all("Invoice", {"team": self.name, "docstatus": 0}, pluck="name")
for draft_invoice in draft_invoices:
# Invoice.customer_name set by Invoice.validate()
frappe.get_doc("Invoice", draft_invoice).save()
def update_billing_details_on_frappeio(self):
+ if frappe.flags.in_install:
+ return
+
+ if is_frappe_auth_disabled():
+ return
+
try:
frappeio_client = get_frappe_io_connection()
except FrappeioServerNotSet as e:
if frappe.conf.developer_mode or os.environ.get("CI"):
return
- else:
- raise e
+ raise e
previous_version = self.get_doc_before_save()
@@ -391,13 +687,9 @@ def update_billing_details_on_frappeio(self):
if previous_billing_name and previous_billing_name != self.billing_name:
try:
frappeio_client.rename_doc("Customer", previous_billing_name, self.billing_name)
- frappe.msgprint(
- f"Renamed customer from {previous_billing_name} to {self.billing_name}"
- )
+ frappe.msgprint(f"Renamed customer from {previous_billing_name} to {self.billing_name}")
except Exception:
- log_error(
- "Failed to rename customer on frappe.io", traceback=frappe.get_traceback()
- )
+ log_error("Failed to rename customer on frappe.io", traceback=frappe.get_traceback())
def update_billing_details_on_stripe(self, address=None):
stripe = get_stripe()
@@ -416,22 +708,38 @@ def update_billing_details_on_stripe(self, address=None):
},
)
- def create_payment_method(self, payment_method_id, set_default=False):
+ def create_payment_method(
+ self,
+ payment_method_id,
+ setup_intent_id,
+ mandate_id,
+ mandate_reference,
+ set_default=False,
+ verified_with_micro_charge=False,
+ ):
stripe = get_stripe()
payment_method = stripe.PaymentMethod.retrieve(payment_method_id)
- doc = frappe.get_doc(
- {
- "doctype": "Stripe Payment Method",
- "stripe_payment_method_id": payment_method["id"],
- "last_4": payment_method["card"]["last4"],
- "name_on_card": payment_method["billing_details"]["name"],
- "expiry_month": payment_method["card"]["exp_month"],
- "expiry_year": payment_method["card"]["exp_year"],
- "team": self.name,
- }
- )
- doc.insert()
+ try:
+ doc = frappe.get_doc(
+ {
+ "doctype": "Stripe Payment Method",
+ "stripe_payment_method_id": payment_method["id"],
+ "last_4": payment_method["card"]["last4"],
+ "name_on_card": payment_method["billing_details"]["name"],
+ "expiry_month": payment_method["card"]["exp_month"],
+ "expiry_year": payment_method["card"]["exp_year"],
+ "brand": payment_method["card"]["brand"] or "",
+ "team": self.name,
+ "stripe_setup_intent_id": setup_intent_id,
+ "stripe_mandate_id": mandate_id if mandate_id else None,
+ "stripe_mandate_reference": mandate_reference if mandate_reference else None,
+ "is_verified_with_micro_charge": verified_with_micro_charge,
+ }
+ )
+ doc.insert()
+ except Exception:
+ frappe.log_error("Failed to create new Stripe Payment Method")
# unsuspend sites on payment method added
self.unsuspend_sites(reason="Payment method added")
@@ -439,10 +747,7 @@ def create_payment_method(self, payment_method_id, set_default=False):
doc.set_default()
self.reload()
- # allocate credits if not already allocated
- self.allocate_free_credits()
- # Telemetry: Added card
- capture("added_card_or_prepaid_credits", "fc_signup", self.account_request)
+ self.remove_subscription_config_in_trial_sites()
return doc
@@ -456,6 +761,7 @@ def get_payment_methods(self):
"name_on_card",
"expiry_month",
"expiry_year",
+ "brand",
"is_default",
"creation",
],
@@ -492,48 +798,128 @@ def get_past_invoices(self):
invoice.formatted_total = frappe.utils.fmt_money(invoice.total, 2, invoice.currency)
invoice.stripe_link_expired = False
if invoice.status == "Unpaid":
+ invoice.formatted_amount_due = frappe.utils.fmt_money(invoice.amount_due, 2, invoice.currency)
days_diff = frappe.utils.date_diff(frappe.utils.now(), invoice.due_date)
if days_diff > 30:
invoice.stripe_link_expired = True
return invoices
- def allocate_credit_amount(self, amount, source, remark=None):
+ def allocate_credit_amount(self, amount, source, remark=None, type="Adjustment"):
doc = frappe.get_doc(
doctype="Balance Transaction",
team=self.name,
- type="Adjustment",
+ type=type,
source=source,
amount=amount,
description=remark,
)
doc.insert(ignore_permissions=True)
doc.submit()
- # change payment mode to prepaid credits if default is card or not set
- self.payment_mode = (
- "Prepaid Credits" if self.payment_mode != "Partner Credits" else self.payment_mode
- )
- self.save()
+
+ self.reload()
+ if not self.payment_mode:
+ self.validate_payment_mode()
+ self.save(ignore_permissions=True)
return doc
def get_available_credits(self):
def get_stripe_balance():
return self.get_stripe_balance()
- return frappe.cache().hget(
- "customer_available_credits", self.name, generator=get_stripe_balance
- )
+ return frappe.cache().hget("customer_available_credits", self.name, generator=get_stripe_balance)
def get_stripe_balance(self):
stripe = get_stripe()
customer_object = stripe.Customer.retrieve(self.stripe_customer_id)
- balance = (customer_object["balance"] * -1) / 100
- return balance
+ return (customer_object["balance"] * -1) / 100
+
+ def is_team_owner(self) -> bool:
+ """
+ Checks if the current user is the owner of the team.
+ """
+ return bool(frappe.db.get_value("Team", self.name, "user") == frappe.session.user)
+
+ def is_admin_user(self) -> bool:
+ """
+ Checks if the current user has admin access in the team via roles.
+ """
+ PressRole = frappe.qb.DocType("Press Role")
+ PressRoleUser = frappe.qb.DocType("Press Role User")
+ return (
+ frappe.qb.from_(PressRoleUser)
+ .left_join(PressRole)
+ .on(PressRole.name == PressRoleUser.parent)
+ .select(Count(PressRoleUser.name).as_("count"))
+ .where(PressRole.team == self.name)
+ .where(PressRoleUser.user == frappe.session.user)
+ .where(PressRole.admin_access == 1)
+ .run(as_dict=1)
+ .pop()
+ .get("count", 0)
+ > 0
+ )
+
+ @dashboard_whitelist()
+ def get_team_members(self):
+ return get_team_members(self.name)
+
+ @dashboard_whitelist()
+ @rate_limit(limit=10, seconds=60 * 60)
+ def invite_team_member(self, email, roles=None):
+ from frappe.utils.user import is_system_user
+
+ PressRole = frappe.qb.DocType("Press Role")
+ PressRoleUser = frappe.qb.DocType("Press Role User")
+
+ has_admin_access = (
+ frappe.qb.from_(PressRole)
+ .select(PressRole.name)
+ .join(PressRoleUser)
+ .on((PressRole.name == PressRoleUser.parent) & (PressRoleUser.user == frappe.session.user))
+ .where(PressRole.team == self.name)
+ .where(PressRole.admin_access == 1)
+ )
+
+ if not is_system_user() and frappe.session.user != self.user and not has_admin_access.run():
+ frappe.throw(_("Only team owner or admins can invite team members"))
+
+ frappe.utils.validate_email_address(email, True)
+
+ if frappe.db.exists("Team Member", {"user": email, "parent": self.name, "parenttype": "Team"}):
+ frappe.throw(_("Team member already exists"))
+
+ if frappe.db.exists(
+ "Account Request",
+ {
+ "email": email,
+ "team": self.name,
+ "invited_by": ("is", "set"),
+ "request_key": ("is", "set"),
+ },
+ ):
+ frappe.throw("User has already been invited recently. Please try again later.")
+
+ account_request = frappe.get_doc(
+ {
+ "doctype": "Account Request",
+ "team": self.name,
+ "email": email,
+ "role": "Press Member",
+ "invited_by": self.user,
+ "send_email": True,
+ }
+ )
+
+ for role in roles:
+ account_request.append("press_roles", {"press_role": role})
+
+ account_request.insert()
@frappe.whitelist()
def get_balance(self):
- res = frappe.db.get_all(
+ res = frappe.get_all(
"Balance Transaction",
- filters={"team": self.name, "docstatus": 1},
+ filters={"team": self.name, "docstatus": 1, "type": ("!=", "Partnership Fee")},
order_by="creation desc",
limit=1,
pluck="ending_balance",
@@ -542,153 +928,276 @@ def get_balance(self):
return 0
return res[0]
- @frappe.whitelist()
- def get_available_partner_credits(self):
- client = get_frappe_io_connection()
- response = client.session.post(
- f"{client.url}/api/method/partner_relationship_management.api.get_partner_credit_balance",
- data={"email": self.partner_email},
- headers=client.headers,
- )
-
- if response.ok:
- res = response.json()
- message = res.get("message")
-
- if message.get("credit_balance") is not None:
- return message.get("credit_balance")
- else:
- error_message = message.get("error_message")
- log_error(
- "Partner Credit Fetch Error",
- team=self.name,
- email=self.partner_email,
- error_message=error_message,
- )
- frappe.throw(error_message)
-
- else:
- log_error(
- "Problem fetching partner credit balance from frappe.io",
- team=self.name,
- email=self.partner_email,
- response=response.text,
- )
- frappe.throw("Problem fetching partner credit balance.")
-
- def is_partner_and_has_enough_credits(self):
- return self.erpnext_partner and self.get_balance() > 0
-
- def has_partner_account_on_erpnext_com(self):
- if frappe.conf.developer_mode:
- return False
- erpnext_com = get_erpnext_com_connection()
- res = erpnext_com.get_value(
- "ERPNext Partner", "name", filters={"email": self.user, "status": "Approved"}
- )
- return res["name"] if res else None
-
- def can_create_site(self):
+ def can_create_site(self): # noqa: C901
why = ""
allow = (True, "")
- if self.free_account or self.parent_team:
+ if not self.enabled:
+ why = "You cannot create a new site because your account is disabled"
+ return (False, why)
+
+ if self.free_account or self.parent_team or self.billing_team:
return allow
- if self.payment_mode == "Partner Credits":
- if self.get_available_partner_credits() > 0:
+ if self.is_saas_user and not self.payment_mode:
+ if not frappe.db.get_all("Site", {"team": self.name}, limit=1):
return allow
- else:
- why = "Cannot create site due to insufficient partner credits"
+ why = "You have already created trial site in the past"
+
+ # allow user to create their first site without payment method
+ if not frappe.db.get_all("Site", {"team": self.name}, limit=1):
+ return allow
+
+ if not self.payment_mode:
+ why = "You cannot create a new site because your account doesn't have a valid payment method."
+ return (False, why)
if self.payment_mode == "Prepaid Credits":
- if self.get_balance() > 0:
+ # if balance is greater than 0 or have atleast 2 paid invoices, then allow to create site
+ if (
+ self.get_balance() > 0
+ or frappe.db.count(
+ "Invoice",
+ {
+ "team": self.name,
+ "status": "Paid",
+ "amount_paid": ("!=", 0),
+ },
+ )
+ > 2
+ ):
return allow
- else:
- why = "Cannot create site due to insufficient balance"
+ why = "Cannot create site due to insufficient balance"
if self.payment_mode == "Card":
if self.default_payment_method:
return allow
- else:
- why = "Cannot create site without adding a card"
+ why = "Cannot create site without adding a card"
return (False, why)
def can_install_paid_apps(self):
- if self.free_account or self.payment_mode == "Partner Credits":
+ if self.free_account or self.billing_team or self.payment_mode:
return True
return bool(
- frappe.db.exists(
- "Invoice", {"team": self.name, "amount_paid": (">", 0), "status": "Paid"}
- )
+ frappe.db.exists("Invoice", {"team": self.name, "amount_paid": (">", 0), "status": "Paid"})
)
- def get_onboarding(self):
- if self.payment_mode == "Partner Credits":
- billing_setup = True
- else:
- billing_setup = bool(
- self.payment_mode in ["Card", "Prepaid Credits"]
- and (self.default_payment_method or self.get_balance() > 0)
- and self.billing_address
- )
+ def billing_info(self):
+ micro_debit_charge_field = (
+ "micro_debit_charge_usd" if self.currency == "USD" else "micro_debit_charge_inr"
+ )
+ amount = frappe.db.get_single_value("Press Settings", micro_debit_charge_field)
+
+ return {
+ "gst_percentage": frappe.db.get_single_value("Press Settings", "gst_percentage"),
+ "micro_debit_charge_amount": amount,
+ "balance": self.get_balance(),
+ "verified_micro_charge": bool(
+ frappe.db.exists(
+ "Stripe Payment Method", {"team": self.name, "is_verified_with_micro_charge": 1}
+ )
+ ),
+ "has_paid_before": bool(
+ frappe.db.exists("Invoice", {"team": self.name, "amount_paid": (">", 0), "status": "Paid"})
+ ),
+ "has_unpaid_invoices": bool(
+ frappe.db.exists("Invoice", {"team": self.name, "status": "Unpaid", "type": "Subscription"})
+ ),
+ }
+ def billing_details(self, timezone=None):
+ billing_details = frappe._dict()
+ if self.billing_address:
+ billing_details = frappe.get_doc("Address", self.billing_address).as_dict()
+ billing_details.billing_name = self.billing_name
+
+ if not billing_details.country and timezone:
+ from press.utils.country_timezone import get_country_from_timezone
+
+ billing_details.country = get_country_from_timezone(timezone)
+
+ return billing_details
+
+ def get_partner_level(self):
+ # fetch partner level from frappe.io
+ if frappe.flags.in_install:
+ return None
+
+ if is_frappe_auth_disabled():
+ return None
+
+ client = get_frappe_io_connection()
+ response = client.session.get(
+ f"{client.url}/api/method/get_partner_level",
+ headers=client.headers,
+ params={"email": self.partner_email},
+ )
+
+ if response.ok:
+ res = response.json()
+ partner_level = res.get("message")
+ certificate_count = res.get("certificates")
+ if partner_level:
+ return [partner_level, certificate_count]
+ return None
+
+ self.add_comment(text="Failed to fetch partner level" + " " + response.text)
+ return None
+
+ def is_payment_mode_set(self):
+ if self.payment_mode in ("Prepaid Credits", "Paid By Partner") or (
+ self.payment_mode == "Card" and self.default_payment_method and self.billing_address
+ ):
+ return True
+ return False
+
+ def get_onboarding(self):
site_created = frappe.db.count("Site", {"team": self.name}) > 0
+ saas_site_request = self.get_pending_saas_site_request()
+ is_payment_mode_set = self.is_payment_mode_set()
+ if not is_payment_mode_set and self.parent_team:
+ parent_team = frappe.get_cached_doc("Team", self.parent_team)
+ is_payment_mode_set = parent_team.is_payment_mode_set()
+
+ complete = False
+ if (
+ self.skip_onboarding
+ or is_payment_mode_set
+ or frappe.db.get_value("User", self.user, "user_type") == "System User"
+ ):
+ complete = True
+ elif saas_site_request:
+ complete = False
+
+ return frappe._dict(
+ {
+ "site_created": site_created,
+ "is_saas_user": bool(self.via_erpnext or self.is_saas_user),
+ "saas_site_request": saas_site_request,
+ "complete": complete,
+ "is_payment_mode_set": is_payment_mode_set,
+ }
+ )
- if self.via_erpnext:
- erpnext_domain = frappe.db.get_single_value("Press Settings", "erpnext_domain")
- erpnext_site = frappe.db.get_value(
- "Site",
- {"domain": erpnext_domain, "team": self.name, "status": ("!=", "Archived")},
- ["name", "plan"],
- as_dict=1,
- )
+ def get_route_on_login(self):
+ if self.payment_mode or self.skip_onboarding:
+ return "/sites"
- if erpnext_site is None:
- # Case: They have archived their ERPNext trial site
- # and created a frappe.cloud site now
- erpnext_site_plan_set = True
- else:
- erpnext_site_plan_set = erpnext_site.plan != "ERPNext Trial"
- else:
- erpnext_site = None
- erpnext_site_plan_set = True
+ if self.is_saas_user:
+ pending_site_request = self.get_pending_saas_site_request()
+ if pending_site_request:
+ return f"/create-site/{pending_site_request.product_trial}/setup?account_request={pending_site_request.account_request}"
- return {
- "account_created": True,
- "billing_setup": billing_setup,
- "erpnext_site": erpnext_site,
- "erpnext_site_plan_set": erpnext_site_plan_set,
- "site_created": site_created,
- "complete": billing_setup and site_created and erpnext_site_plan_set,
- }
+ return "/welcome"
+
+ def get_pending_saas_site_request(self):
+ if frappe.db.exists("Product Trial Request", {"team": self.name, "status": "Site Created"}):
+ return None
+
+ return frappe.db.get_value(
+ "Product Trial Request",
+ {
+ "team": self.name,
+ "status": ("in", ["Pending", "Wait for Site", "Completing Setup Wizard", "Error"]),
+ },
+ ["name", "product_trial", "product_trial.title", "status", "account_request"],
+ order_by="creation desc",
+ as_dict=True,
+ )
+
+ def get_trial_sites(self):
+ return frappe.db.get_all(
+ "Site",
+ {
+ "team": self.name,
+ "is_standby": False,
+ "trial_end_date": ("is", "set"),
+ "status": ("!=", "Archived"),
+ },
+ ["name", "trial_end_date", "standby_for_product.title as product_title", "host_name"],
+ order_by="`tabSite`.`modified` desc",
+ )
@frappe.whitelist()
def suspend_sites(self, reason=None):
+ from press.press.doctype.site.site import Site
+
sites_to_suspend = self.get_sites_to_suspend()
for site in sites_to_suspend:
- frappe.get_doc("Site", site).suspend(reason)
+ try:
+ Site("Site", site).suspend(reason)
+ except Exception:
+ log_error("Failed to Suspend Sites", traceback=frappe.get_traceback())
return sites_to_suspend
def get_sites_to_suspend(self):
+ plan = frappe.qb.DocType("Site Plan")
+ query = (
+ frappe.qb.from_(plan)
+ .select(plan.name)
+ .where((plan.enabled == 1) & ((plan.is_frappe_plan == 1) | (plan.is_trial_plan == 1)))
+ ).run(as_dict=True)
+ frappe_plans = [d.name for d in query]
+
return frappe.db.get_all(
"Site",
- {"team": self.name, "status": ("in", ("Active", "Inactive")), "free": 0},
+ {
+ "team": self.name,
+ "status": ("not in", ("Archived", "Suspended")),
+ "free": 0,
+ "plan": ("not in", frappe_plans),
+ },
pluck="name",
)
+ def reallocate_workers_if_needed(
+ self, workloads_before: list[tuple[str, float, str]], workloads_after: list[tuple[str, float, str]]
+ ):
+ for before, after in zip(workloads_before, workloads_after, strict=False):
+ if after[1] - before[1] >= 8: # 100 USD equivalent
+ frappe.enqueue_doc(
+ "Server",
+ before[2],
+ method="auto_scale_workers",
+ job_id=f"auto_scale_workers:{before[2]}",
+ deduplicate=True,
+ enqueue_after_commit=True,
+ )
+
@frappe.whitelist()
def unsuspend_sites(self, reason=None):
+ from press.press.doctype.bench.bench import Bench
+ from press.press.doctype.site.site import Site
+
suspended_sites = [
d.name for d in frappe.db.get_all("Site", {"team": self.name, "status": "Suspended"})
]
+ workloads_before = list(Bench.get_workloads(suspended_sites))
for site in suspended_sites:
- frappe.get_doc("Site", site).unsuspend(reason)
+ Site("Site", site).unsuspend(reason)
+ workloads_after = list(Bench.get_workloads(suspended_sites))
+ self.reallocate_workers_if_needed(workloads_before, workloads_after)
+
return suspended_sites
- def get_upcoming_invoice(self):
+ def remove_subscription_config_in_trial_sites(self):
+ for site in frappe.db.get_all(
+ "Site",
+ {"team": self.name, "status": ("!=", "Archived"), "trial_end_date": ("is", "set")},
+ pluck="name",
+ ):
+ try:
+ frappe.get_doc("Site", site).update_site_config(
+ {
+ "subscription": {"status": "Subscribed"},
+ }
+ )
+ except Exception:
+ log_error("Failed to remove subscription config in trial sites")
+
+ def get_upcoming_invoice(self, for_update=False):
# get the current period's invoice
today = frappe.utils.today()
result = frappe.db.get_all(
@@ -705,7 +1214,8 @@ def get_upcoming_invoice(self):
pluck="name",
)
if result:
- return frappe.get_doc("Invoice", result[0])
+ return frappe.get_doc("Invoice", result[0], for_update=for_update)
+ return None
def create_upcoming_invoice(self):
today = frappe.utils.today()
@@ -713,36 +1223,20 @@ def create_upcoming_invoice(self):
doctype="Invoice", team=self.name, period_start=today, type="Subscription"
).insert()
- def notify_with_email(self, recipients: List[str], **kwargs):
- if not self.send_notifications:
- return
- if not recipients:
- recipients = [self.notify_email]
-
- frappe.sendmail(recipients=recipients, **kwargs)
-
@frappe.whitelist()
def send_telegram_alert_for_failed_payment(self, invoice):
- telegram = Telegram()
team_url = get_url_to_form("Team", self.name)
invoice_url = get_url_to_form("Invoice", invoice)
- telegram.send(
- f"Failed Invoice Payment [{invoice}]({invoice_url}) of"
- f" Partner: [{self.name}]({team_url})"
- )
+ message = f"Failed Invoice Payment [{invoice}]({invoice_url}) of Partner: [{self.name}]({team_url})"
+ TelegramMessage.enqueue(message=message)
@frappe.whitelist()
def send_email_for_failed_payment(self, invoice, sites=None):
invoice = frappe.get_doc("Invoice", invoice)
- email = (
- frappe.db.get_value(
- "Communication Email", {"parent": self.user, "type": "invoices"}, ["value"]
- )
- or self.user
- )
+ email = get_communication_info("Email", "Billing", "Team", self.name)
payment_method = self.default_payment_method
last_4 = frappe.db.get_value("Stripe Payment Method", payment_method, "last_4")
- account_update_link = frappe.utils.get_url("/dashboard/welcome")
+ account_update_link = frappe.utils.get_url("/dashboard")
subject = "Invoice Payment Failed for Frappe Cloud Subscription"
frappe.sendmail(
@@ -773,11 +1267,18 @@ def get_team_members(team):
if member_emails:
users = frappe.db.sql(
"""
- select u.name, u.first_name, u.last_name, GROUP_CONCAT(r.`role`) as roles
+ select
+ u.name,
+ u.first_name,
+ u.last_name,
+ u.full_name,
+ u.user_image,
+ u.name as email,
+ GROUP_CONCAT(r.`role`) as roles
from `tabUser` u
left join `tabHas Role` r
on (r.parent = u.name)
- where ifnull(u.name, '') in %s
+ where u.name in %s
group by u.name
""",
[member_emails],
@@ -797,9 +1298,7 @@ def get_child_team_members(team):
if frappe.get_value("Team", team, "parent_team"):
return []
- child_team_members = [
- d.name for d in frappe.db.get_all("Team", {"parent_team": team}, ["name"])
- ]
+ child_team_members = [d.name for d in frappe.db.get_all("Team", {"parent_team": team}, ["name"])]
child_teams = []
if child_team_members:
@@ -807,7 +1306,7 @@ def get_child_team_members(team):
"""
select t.name, t.team_title, t.parent_team, t.user
from `tabTeam` t
- where ifnull(t.name, '') in %s
+ where t.name in %s
and t.enabled = 1
""",
[child_team_members],
@@ -820,11 +1319,11 @@ def get_child_team_members(team):
def get_default_team(user):
if frappe.db.exists("Team", user):
return user
+ return None
def process_stripe_webhook(doc, method):
"""This method runs after a Stripe Webhook Log is created"""
- from datetime import datetime
if doc.event_type not in ["payment_intent.succeeded"]:
return
@@ -838,33 +1337,54 @@ def process_stripe_webhook(doc, method):
metadata = payment_intent.get("metadata")
payment_for = metadata.get("payment_for")
- if payment_for and payment_for == "prepaid_marketplace":
- process_prepaid_marketplace_payment(event)
- return
-
if payment_for and payment_for == "micro_debit_test_charge":
process_micro_debit_test_charge(event)
return
+ if payment_for and payment_for == "partnership_fee":
+ process_partnership_fee(payment_intent)
+ return
+
+ handle_payment_intent_succeeded(payment_intent)
+
+
+def handle_payment_intent_succeeded(payment_intent): # noqa: C901
+ from datetime import datetime
+
+ if isinstance(payment_intent, str):
+ stripe = get_stripe()
+ payment_intent = stripe.PaymentIntent.retrieve(payment_intent)
+
+ metadata = payment_intent.get("metadata")
+ if frappe.db.exists("Invoice", {"stripe_payment_intent_id": payment_intent["id"], "status": "Paid"}):
+ # ignore creating if already allocated
+ return
+
+ if not frappe.db.exists("Team", {"stripe_customer_id": payment_intent["customer"]}):
+ # might be checkout session payment
+ # log the stripe webhook log
+ # TODO: handle checkout session payment
+ return
team: Team = frappe.get_doc("Team", {"stripe_customer_id": payment_intent["customer"]})
- amount = payment_intent["amount"] / 100
+ amount_with_tax = payment_intent["amount"] / 100
+ gst = float(metadata.get("gst", 0))
+ amount = amount_with_tax - gst
balance_transaction = team.allocate_credit_amount(
amount, source="Prepaid Credits", remark=payment_intent["id"]
)
- # Give them free credits too (only first time)
- team.allocate_free_credits()
-
- # Telemetry: Added prepaid credits
- capture("added_card_or_prepaid_credits", "fc_signup", team.account_request)
+ team.remove_subscription_config_in_trial_sites()
invoice = frappe.get_doc(
doctype="Invoice",
team=team.name,
type="Prepaid Credits",
status="Paid",
due_date=datetime.fromtimestamp(payment_intent["created"]),
- amount_paid=amount,
+ total=amount,
amount_due=amount,
+ gst=gst or 0,
+ amount_due_with_tax=amount_with_tax,
+ amount_paid=amount_with_tax,
stripe_payment_intent_id=payment_intent["id"],
)
invoice.append(
@@ -879,13 +1399,35 @@ def process_stripe_webhook(doc, method):
)
invoice.insert()
invoice.reload()
- # there should only be one charge object
- charge = payment_intent["charges"]["data"][0]["id"]
- # update transaction amount, fee and exchange rate
- invoice.update_transaction_details(charge)
- invoice.submit()
- enqueue_finalize_unpaid_for_team(team.name)
+ if not team.payment_mode:
+ frappe.db.set_value("Team", team.name, "payment_mode", "Prepaid Credits")
+ if team.account_request:
+ ar: "AccountRequest" = frappe.get_doc("Account Request", team.account_request)
+ if not (ar.is_saas_signup() or ar.invited_by_parent_team):
+ capture("added_card_or_prepaid_credits", "fc_signup", team.user)
+
+ # latest stripe API sets charge id in latest_charge
+ charge = payment_intent.get("latest_charge")
+ if not charge:
+ # older stripe API sets charge id in charges.data
+ charges = payment_intent.get("charges", {}).get("data", [])
+ charge = charges[0]["id"] if charges else None
+ if charge:
+ # update transaction amount, fee and exchange rate
+ invoice.update_transaction_details(charge)
+ invoice.submit()
+
+ _enqueue_finalize_unpaid_invoices_for_team(team.name)
+
+
+def _enqueue_finalize_unpaid_invoices_for_team(team: str):
+ # Enqueue a background job to call finalize_draft_invoice for unpaid invoices
+ frappe.enqueue(
+ "press.press.doctype.team.team.enqueue_finalize_unpaid_for_team",
+ team=team,
+ enqueue_after_commit=True,
+ )
def enqueue_finalize_unpaid_for_team(team: str):
@@ -896,12 +1438,68 @@ def enqueue_finalize_unpaid_for_team(team: str):
pluck="name",
)
- # Enqueue a background job to call finalize_draft_invoice
+ # Enqueue a background job to call finalize_invoice
for invoice in invoices:
- frappe.enqueue(
- "press.press.doctype.invoice.invoice.finalize_draft_invoice",
- invoice=invoice,
- )
+ doc = frappe.get_doc("Invoice", invoice)
+ doc.finalize_invoice()
+
+
+def process_partnership_fee(payment_intent):
+ from datetime import datetime
+
+ if isinstance(payment_intent, str):
+ stripe = get_stripe()
+ payment_intent = stripe.PaymentIntent.retrieve(payment_intent)
+
+ metadata = payment_intent.get("metadata")
+ if frappe.db.exists("Invoice", {"stripe_payment_intent_id": payment_intent["id"], "status": "Paid"}):
+ # ignore creating duplicate partnership fee invoice
+ return
+
+ team = frappe.get_doc("Team", {"stripe_customer_id": payment_intent["customer"]})
+ amount_with_tax = payment_intent["amount"] / 100
+ gst = float(metadata.get("gst", 0))
+ amount = amount_with_tax - gst
+ balance_transaction = team.allocate_credit_amount(
+ amount, source="Prepaid Credits", remark=payment_intent["id"], type="Partnership Fee"
+ )
+
+ invoice = frappe.get_doc(
+ doctype="Invoice",
+ team=team.name,
+ type="Partnership Fees",
+ status="Paid",
+ due_date=datetime.fromtimestamp(payment_intent["created"]),
+ total=amount,
+ amount_due=amount,
+ gst=gst or 0,
+ amount_due_with_tax=amount_with_tax,
+ amount_paid=amount_with_tax,
+ stripe_payment_intent_id=payment_intent["id"],
+ )
+ invoice.append(
+ "items",
+ {
+ "description": "Partnership Fee",
+ "document_type": "Balance Transaction",
+ "document_name": balance_transaction.name,
+ "quantity": 1,
+ "rate": amount,
+ },
+ )
+ invoice.insert()
+ invoice.reload()
+
+ # latest stripe API sets charge id in latest_charge
+ charge = payment_intent.get("latest_charge")
+ if not charge:
+ # older stripe API sets charge id in charges.data
+ charges = payment_intent.get("charges", {}).get("data", [])
+ charge = charges[0]["id"] if charges else None
+ if charge:
+ # update transaction amount, fee and exchange rate
+ invoice.update_transaction_details(charge)
+ invoice.submit()
def get_permission_query_conditions(user):
@@ -930,9 +1528,7 @@ def has_permission(doc, ptype, user):
return True
team = get_current_team(True)
- child_team_members = [
- d.name for d in frappe.db.get_all("Team", {"parent_team": team.name}, ["name"])
- ]
+ child_team_members = [d.name for d in frappe.db.get_all("Team", {"parent_team": team.name}, ["name"])]
if doc.name == team.name or doc.name in child_team_members:
return True
@@ -944,6 +1540,9 @@ def validate_site_creation(doc, method):
return
if not doc.team:
return
+ # allow product signups
+ if doc.standby_for_product:
+ return
# validate site creation for team
team = frappe.get_doc("Team", doc.team)
@@ -952,6 +1551,27 @@ def validate_site_creation(doc, method):
frappe.throw(why)
+def has_unsettled_invoices(team):
+ if not frappe.db.exists(
+ "Invoice", {"team": team, "status": ("in", ("Unpaid", "Draft")), "type": "Subscription"}
+ ):
+ return False
+
+ currency = frappe.db.get_value("Team", team, "currency")
+ minimum_amount = 5
+ if currency == "INR":
+ minimum_amount = 450
+
+ data = frappe.get_all(
+ "Invoice",
+ {"team": team, "status": ("in", ("Unpaid", "Draft")), "type": "Subscription"},
+ ["sum(amount_due) as amount_due"],
+ )[0]
+ if data.amount_due <= minimum_amount:
+ return False
+ return True
+
+
def is_us_eu():
"""Is the customer from U.S. or European Union"""
from press.utils import get_current_team
@@ -993,3 +1613,96 @@ def is_us_eu():
"Mexico",
]
return frappe.db.get_value("Team", get_current_team(), "country") in countrygroup
+
+
+def check_budget_alerts():
+ """
+ Daily background job to check if teams have exceeded their monthly budget alert limits.
+ Sends email notifications for invoices that have crossed the set limit.
+ """
+ teams_with_budget_alert_enabled = frappe.get_all(
+ "Team",
+ filters={"receive_budget_alerts": 1, "monthly_alert_threshold": (">", 0), "enabled": 1},
+ fields=["name", "monthly_alert_threshold", "currency", "user"],
+ )
+
+ if not teams_with_budget_alert_enabled:
+ return
+
+ team_names = [team["name"] for team in teams_with_budget_alert_enabled]
+ team_dict = {team["name"]: team for team in teams_with_budget_alert_enabled}
+
+ current_month_end = get_last_day(getdate())
+
+ # Fetch current month invoices for all teams, filter out invoices that have already sent alerts
+ current_invoices = frappe.get_all(
+ "Invoice",
+ filters={
+ "team": ("in", team_names),
+ "due_date": current_month_end,
+ "status": "Draft",
+ "budget_alert_sent": 0,
+ },
+ fields=[
+ "name",
+ "team",
+ "total",
+ "period_start",
+ "period_end",
+ ],
+ order_by="creation desc",
+ )
+
+ invoices_to_update = [] # To keep track of invoices that need budget_alert_sent field update
+ for invoice in current_invoices:
+ team_name = invoice["team"]
+ monthly_limit = team_dict[team_name]["monthly_alert_threshold"]
+ if invoice["total"] > monthly_limit:
+ email_sent = send_budget_alert_email(team_dict[team_name], invoice)
+ if email_sent:
+ invoices_to_update.append(invoice["name"])
+
+ if invoices_to_update:
+ Invoice = frappe.qb.DocType("Invoice")
+ (
+ frappe.qb.update(Invoice)
+ .set(Invoice.budget_alert_sent, 1)
+ .where(Invoice.name.isin(invoices_to_update))
+ ).run()
+
+
+def send_budget_alert_email(team_info, invoice):
+ """
+ Args:
+ team_info (dict)
+ invoice (dict): Invoice that exceeded the budget alert threshold
+ """
+ try:
+ team_user = team_info["user"]
+ currency = "₹" if team_info["currency"] == "INR" else "$"
+
+ invoice_amount = f"{currency}{invoice['total']}"
+ alert_threshold = f"{currency}{team_info['monthly_alert_threshold']}"
+ excess_amount = f"{currency}{round(invoice['total'] - team_info['monthly_alert_threshold'], 2)}"
+
+ subject = f"Frappe Cloud Budget Alert for {team_user}"
+
+ frappe.sendmail(
+ recipients=team_user,
+ subject=subject,
+ template="budget_alert",
+ args={
+ "team_user": team_user,
+ "invoice_amount": invoice_amount,
+ "alert_threshold": alert_threshold,
+ "excess_amount": excess_amount,
+ "period_start": invoice["period_start"],
+ "period_end": invoice["period_end"],
+ },
+ reference_doctype="Invoice",
+ reference_name=invoice["name"],
+ )
+ return True
+ except Exception as e:
+ frappe.log_error(f"Failed to send budget alert email: {team_info['user']}", {e})
+ return False
diff --git a/press/press/doctype/team/team_invoice.py b/press/press/doctype/team/team_invoice.py
index 25876c51838..b7802d2c767 100644
--- a/press/press/doctype/team/team_invoice.py
+++ b/press/press/doctype/team/team_invoice.py
@@ -3,9 +3,10 @@
import frappe
-from press.utils import log_error
from frappe.utils import getdate
+from press.utils import log_error
+
class TeamInvoice:
def __init__(self, team, month, year):
diff --git a/press/press/doctype/team/test_team.py b/press/press/doctype/team/test_team.py
index a579cb5916d..7286137be02 100644
--- a/press/press/doctype/team/test_team.py
+++ b/press/press/doctype/team/test_team.py
@@ -1,13 +1,12 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
+from __future__ import annotations
-
-import unittest
from unittest.mock import Mock, patch
import frappe
from frappe.tests.ui_test_helpers import create_test_user
+from frappe.tests.utils import FrappeTestCase
from press.press.doctype.account_request.test_account_request import (
create_test_account_request,
@@ -15,7 +14,9 @@
from press.press.doctype.team.team import Team
-def create_test_press_admin_team(email: str = None) -> Team:
+def create_test_press_admin_team(
+ email: str | None = None, skip_onboarding: bool | None = 0, free_account: bool | None = None
+) -> Team:
"""Create test press admin user."""
if not email:
email = frappe.mock("email")
@@ -23,45 +24,53 @@ def create_test_press_admin_team(email: str = None) -> Team:
user = frappe.get_doc("User", {"email": email})
user.remove_roles(*frappe.get_all("Role", pluck="name"))
user.add_roles("Press Admin")
- return create_test_team(email)
+ return create_test_team(email, skip_onboarding=skip_onboarding, free_account=free_account)
@patch.object(Team, "update_billing_details_on_frappeio", new=Mock())
@patch.object(Team, "create_stripe_customer", new=Mock())
-def create_test_team(email: str = None, country="India") -> Team:
+def create_test_team(
+ email: str | None = None,
+ country="India",
+ free_account: bool | None = None,
+ skip_onboarding: bool | None = None,
+) -> Team:
"""Create test team doc."""
if not email:
email = frappe.mock("email")
create_test_user(email) # ignores if user already exists
user = frappe.get_value("User", {"email": email}, "name")
team = frappe.get_doc(
- {"doctype": "Team", "user": user, "enabled": 1, "country": country}
+ {
+ "doctype": "Team",
+ "user": user,
+ "enabled": 1,
+ "country": country,
+ "free_account": free_account,
+ "skip_onboarding": skip_onboarding,
+ }
).insert(ignore_if_duplicate=True)
team.reload()
+ # Create a fake account request
+ create_test_account_request(frappe.mock("name"), email=email)
return team
-class TestTeam(unittest.TestCase):
+class TestTeam(FrappeTestCase):
def tearDown(self):
frappe.db.rollback()
def test_create_new_method_works(self):
account_request = create_test_account_request("testsubdomain")
team_count_before = frappe.db.count("Team")
- with patch.object(Team, "create_stripe_customer"), patch.object(
- Team, "has_partner_account_on_erpnext_com"
- ):
- Team.create_new(
- account_request, "first name", "last name", "test@email.com", country="India"
- )
+ with patch.object(Team, "create_stripe_customer"):
+ Team.create_new(account_request, "first name", "last name", "test@email.com", country="India")
team_count_after = frappe.db.count("Team")
self.assertGreater(team_count_after, team_count_before)
def test_new_team_has_correct_billing_name(self):
account_request = create_test_account_request("testsubdomain")
- with patch.object(Team, "create_stripe_customer"), patch.object(
- Team, "has_partner_account_on_erpnext_com"
- ):
+ with patch.object(Team, "create_stripe_customer"):
team = Team.create_new(
account_request, "first name", "last name", "test@email.com", country="India"
)
@@ -74,6 +83,17 @@ def test_create_user_for_member_adds_team_member(self):
team = create_test_team()
email = "testuser@frappe.cloud"
team.create_user_for_member("test", "user", "testuser@frappe.cloud")
- self.assertTrue(
- team.has_member(email)
- ) # kinda dumb because we assume has_member method is correct
+ self.assertTrue(team.has_member(email)) # kinda dumb because we assume has_member method is correct
+
+ def test_new_team_has_correct_currency_set(self):
+ account_request1 = create_test_account_request("testsubdomain")
+ with patch.object(Team, "create_stripe_customer"):
+ team1 = Team.create_new(account_request1, "Jon", "Doe", "test@gmail.com", country="India")
+ self.assertEqual(team1.currency, "INR")
+
+ account_request2 = create_test_account_request("testsubdomain2")
+ with patch.object(Team, "create_stripe_customer"):
+ team2 = Team.create_new(
+ account_request2, "John", "Meyer", "jonmeyer@gmail.com", country="Pakistan"
+ )
+ self.assertEqual(team2.currency, "USD")
diff --git a/press/press/doctype/team_change/__init__.py b/press/press/doctype/team_change/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/team_change/team_change.js b/press/press/doctype/team_change/team_change.js
new file mode 100644
index 00000000000..ae8fa020428
--- /dev/null
+++ b/press/press/doctype/team_change/team_change.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Team Change", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/team_change/team_change.json b/press/press/doctype/team_change/team_change.json
new file mode 100644
index 00000000000..9af570dbcf6
--- /dev/null
+++ b/press/press/doctype/team_change/team_change.json
@@ -0,0 +1,118 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-01-16 11:13:22.843011",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "document_type",
+ "document_name",
+ "from_team",
+ "to_team",
+ "transfer_completed",
+ "reason",
+ "key"
+ ],
+ "fields": [
+ {
+ "fieldname": "document_type",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Document Type",
+ "options": "DocType",
+ "reqd": 1
+ },
+ {
+ "fieldname": "from_team",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "From Team",
+ "options": "Team",
+ "reqd": 1
+ },
+ {
+ "fieldname": "to_team",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "To Team",
+ "options": "Team",
+ "reqd": 1
+ },
+ {
+ "fieldname": "document_name",
+ "fieldtype": "Dynamic Link",
+ "in_list_view": 1,
+ "label": "Document Name",
+ "options": "document_type",
+ "reqd": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "transfer_completed",
+ "fieldtype": "Check",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Transfer Completed"
+ },
+ {
+ "fieldname": "reason",
+ "fieldtype": "Long Text",
+ "label": "Reason"
+ },
+ {
+ "fieldname": "key",
+ "fieldtype": "Data",
+ "label": "Key",
+ "read_only": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-02-20 13:46:49.448661",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Team Change",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/team_change/team_change.py b/press/press/doctype/team_change/team_change.py
new file mode 100644
index 00000000000..8326e4a72d8
--- /dev/null
+++ b/press/press/doctype/team_change/team_change.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+import frappe
+from frappe.model.document import Document
+
+
+class TeamChange(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ document_name: DF.DynamicLink
+ document_type: DF.Link
+ from_team: DF.Link
+ key: DF.Data | None
+ reason: DF.LongText | None
+ to_team: DF.Link
+ transfer_completed: DF.Check
+ # end: auto-generated types
+
+ def validate(self):
+ team = frappe.get_doc(self.document_type, self.document_name).team
+ if team != self.from_team:
+ frappe.throw(f"The owner of {self.document_type} is not {self.from_team}")
+
+ def on_update(self):
+ if self.document_type == "Site" and self.transfer_completed:
+ frappe.db.set_value("Site", self.document_name, "team", self.to_team)
+
+ frappe.db.set_value(
+ "Subscription",
+ {"document_name": self.document_name},
+ "team",
+ self.to_team,
+ )
+
+ frappe.db.set_value("Site Domain", {"site": self.document_name}, "team", self.to_team)
+ tls_certificates = frappe.get_all(
+ "Site Domain",
+ filters={"site": self.document_name},
+ fields=["tls_certificate"],
+ pluck="tls_certificate",
+ )
+ frappe.db.set_value("TLS Certificate", {"name": ["in", tls_certificates]}, "team", self.to_team)
+
+ frappe.db.set_value("Site Backup", {"site": self.document_name}, "team", self.to_team)
+
+ frappe.db.set_value("Site Database User", {"site": self.document_name}, "team", self.to_team)
+
+ if self.document_type == "Release Group" and self.transfer_completed:
+ frappe.db.set_value("Release Group", self.document_name, "team", self.to_team)
diff --git a/press/press/doctype/team_change/test_team_change.py b/press/press/doctype/team_change/test_team_change.py
new file mode 100644
index 00000000000..705a19603d0
--- /dev/null
+++ b/press/press/doctype/team_change/test_team_change.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestTeamChange(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/team_deletion_request/team_deletion_request.py b/press/press/doctype/team_deletion_request/team_deletion_request.py
index a8e27220f55..8c2a4a5a6dc 100644
--- a/press/press/doctype/team_deletion_request/team_deletion_request.py
+++ b/press/press/doctype/team_deletion_request/team_deletion_request.py
@@ -1,13 +1,12 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
import frappe
+from frappe.core.utils import find
from frappe.utils.verified_command import get_signed_params
from frappe.website.doctype.personal_data_deletion_request.personal_data_deletion_request import (
PersonalDataDeletionRequest,
)
-from frappe.core.utils import find
def handle_exception(self):
@@ -18,6 +17,31 @@ def handle_exception(self):
class TeamDeletionRequest(PersonalDataDeletionRequest):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+ from frappe.website.doctype.personal_data_deletion_step.personal_data_deletion_step import (
+ PersonalDataDeletionStep,
+ )
+
+ from press.press.doctype.team_member_deletion_request.team_member_deletion_request import (
+ TeamMemberDeletionRequest,
+ )
+
+ data_anonymized: DF.Check
+ deletion_steps: DF.Table[PersonalDataDeletionStep]
+ frappeio_data_deleted: DF.Check
+ status: DF.Literal["Pending Verification", "Deletion Verified", "Processing Deletion", "Deleted"]
+ stripe_data_deleted: DF.Check
+ team: DF.Link
+ team_disabled: DF.Check
+ users_anonymized: DF.Table[TeamMemberDeletionRequest]
+ # end: auto-generated types
+
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.email = frappe.db.get_value("Team", self.team, "user")
@@ -55,28 +79,18 @@ def team_doc(self):
return frappe.get_cached_doc("Team", self.team)
def rename_team_on_data_deletion(self):
- if (
- self.status == "Deleted"
- and self.name != self.team
- and frappe.db.exists("Team", self.team)
- ):
+ if self.status == "Deleted" and self.name != self.team and frappe.db.exists("Team", self.team):
frappe.rename_doc("Team", self.team, self.name)
def validate_team_owner(self):
- if (
- self.team_doc.user == frappe.session.user or "System Manager" in frappe.get_roles()
- ):
+ if self.team_doc.user == frappe.session.user or "System Manager" in frappe.get_roles():
return
- frappe.throw(
- "You need to be a Team owner to request account deletion", exc=frappe.PermissionError
- )
+ frappe.throw("You need to be a Team owner to request account deletion", exc=frappe.PermissionError)
def validate_duplicate_request(self):
if frappe.db.exists(self.doctype, {"team": self.team}):
- frappe.throw(
- f"{self.doctype} for {self.team} already exists!", exc=frappe.DuplicateEntryError
- )
+ frappe.throw(f"{self.doctype} for {self.team} already exists!", exc=frappe.DuplicateEntryError)
def delete_team_data(self):
self.db_set("status", "Processing Deletion")
@@ -142,7 +156,11 @@ def delete_stripe_customer(self):
@handle_exc
def delete_data_on_frappeio(self):
"""Anonymize data on frappe.io"""
- from press.utils.billing import get_frappe_io_connection
+ from press.utils.billing import get_frappe_io_connection, is_frappe_auth_disabled
+
+ if is_frappe_auth_disabled():
+ self.db_set("frappeio_data_deleted", True, commit=True)
+ return
client = get_frappe_io_connection()
response = client.session.delete(
@@ -165,14 +183,10 @@ def numerate_email(x, i):
members_only_in_this_team = [
user
for user in team_members
- if not frappe.db.exists(
- "Team Member", {"user": user, "parent": ("!=", self.team_doc.name)}
- )
+ if not frappe.db.exists("Team Member", {"user": user, "parent": ("!=", self.team_doc.name)})
]
- renamed_dict = {
- x: numerate_email(self.name, i) for i, x in enumerate(members_only_in_this_team)
- }
+ renamed_dict = {x: numerate_email(self.name, i) for i, x in enumerate(members_only_in_this_team)}
for now, then in renamed_dict.items():
self.append(
diff --git a/press/press/doctype/team_deletion_request/test_team_deletion_request.py b/press/press/doctype/team_deletion_request/test_team_deletion_request.py
index 45b68535120..f8f79e4e61e 100644
--- a/press/press/doctype/team_deletion_request/test_team_deletion_request.py
+++ b/press/press/doctype/team_deletion_request/test_team_deletion_request.py
@@ -1,30 +1,31 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2021, Frappe and Contributors
# See license.txt
-import unittest
-
import frappe
-import requests
+from frappe.tests.utils import FrappeTestCase
+
from press.press.doctype.team.test_team import create_test_team
from press.press.doctype.team_deletion_request.team_deletion_request import (
TeamDeletionRequest,
)
+from press.utils.test import request_locally_with_host_rewrite
+
+class TestTeamDeletionRequest(FrappeTestCase):
+ def setUp(self) -> None:
+ super().setUp()
+ self.team = create_test_team()
-class TestTeamDeletionRequest(unittest.TestCase):
- @classmethod
- def setUpClass(cls) -> None:
- cls.team = create_test_team()
- return super().setUpClass()
+ def tearDown(self) -> None:
+ super().tearDown()
+ frappe.db.delete("Team", {"name": self.team.name})
+ frappe.db.truncate("Team Deletion Request")
@property
def team_deletion_request(self):
if not getattr(self, "_tdr", None):
try:
- self._tdr = frappe.get_last_doc(
- "Team Deletion Request", filters={"team": self.team.name}
- )
+ self._tdr = frappe.get_last_doc("Team Deletion Request", filters={"team": self.team.name})
except frappe.DoesNotExistError:
self._tdr = self.team.delete(workflow=True)
return self._tdr
@@ -39,13 +40,11 @@ def test_team_doc_deletion(self):
def test_url_for_verification(self):
deletion_url = self.team_deletion_request.generate_url_for_confirmation()
self.assertTrue(
- deletion_url.startswith(
- frappe.utils.get_url("/api/method/press.api.account.delete_team")
- )
+ deletion_url.startswith(frappe.utils.get_url("/api/method/press.api.account.delete_team"))
)
def test_team_deletion_api(self):
# TODO: Test if the API flow actually sets the status
deletion_url = self.team_deletion_request.generate_url_for_confirmation()
- res = requests.get(deletion_url, allow_redirects=True)
+ res = request_locally_with_host_rewrite(deletion_url, allow_redirects=True)
self.assertTrue(res.ok)
diff --git a/press/press/doctype/team_member/team_member.py b/press/press/doctype/team_member/team_member.py
index 8c40819dafb..47d74dd899f 100644
--- a/press/press/doctype/team_member/team_member.py
+++ b/press/press/doctype/team_member/team_member.py
@@ -8,4 +8,18 @@
class TeamMember(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ user: DF.Link
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/team_member_deletion_request/team_member_deletion_request.py b/press/press/doctype/team_member_deletion_request/team_member_deletion_request.py
index a2f89f5a19c..b437db6f518 100644
--- a/press/press/doctype/team_member_deletion_request/team_member_deletion_request.py
+++ b/press/press/doctype/team_member_deletion_request/team_member_deletion_request.py
@@ -8,4 +8,20 @@
class TeamMemberDeletionRequest(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ anon_team_member: DF.Data | None
+ deletion_status: DF.Literal["Pending", "Deleted"]
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ team_member: DF.Link | None
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/team_member_deletion_request/test_team_member_deletion_request.py b/press/press/doctype/team_member_deletion_request/test_team_member_deletion_request.py
index ac4662f8125..b4c5323a454 100644
--- a/press/press/doctype/team_member_deletion_request/test_team_member_deletion_request.py
+++ b/press/press/doctype/team_member_deletion_request/test_team_member_deletion_request.py
@@ -1,11 +1,10 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2021, Frappe and Contributors
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestTeamMemberDeletionRequest(unittest.TestCase):
+class TestTeamMemberDeletionRequest(FrappeTestCase):
pass
diff --git a/press/press/doctype/team_member_impersonation/team_member_impersonation.py b/press/press/doctype/team_member_impersonation/team_member_impersonation.py
index 3701ff9c0d8..a5700fcd995 100644
--- a/press/press/doctype/team_member_impersonation/team_member_impersonation.py
+++ b/press/press/doctype/team_member_impersonation/team_member_impersonation.py
@@ -8,4 +8,19 @@
class TeamMemberImpersonation(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ impersonator: DF.Link
+ member: DF.Link
+ reason: DF.TextEditor
+ team: DF.Link
+ user: DF.Link
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/team_member_impersonation/test_team_member_impersonation.py b/press/press/doctype/team_member_impersonation/test_team_member_impersonation.py
index 9d254b1b03b..6e012a2c56f 100644
--- a/press/press/doctype/team_member_impersonation/test_team_member_impersonation.py
+++ b/press/press/doctype/team_member_impersonation/test_team_member_impersonation.py
@@ -1,11 +1,10 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestTeamMemberImpersonation(unittest.TestCase):
+class TestTeamMemberImpersonation(FrappeTestCase):
pass
diff --git a/press/press/doctype/team_onboarding/team_onboarding.py b/press/press/doctype/team_onboarding/team_onboarding.py
index 2701a249d0d..7c538db2f05 100644
--- a/press/press/doctype/team_onboarding/team_onboarding.py
+++ b/press/press/doctype/team_onboarding/team_onboarding.py
@@ -8,4 +8,19 @@
class TeamOnboarding(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ status: DF.Literal["Pending", "Skipped", "Completed", "Not Applicable"]
+ step_name: DF.Data
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/telegram_group/telegram_group.json b/press/press/doctype/telegram_group/telegram_group.json
index a3a2a7adaf2..a4711a77705 100644
--- a/press/press/doctype/telegram_group/telegram_group.json
+++ b/press/press/doctype/telegram_group/telegram_group.json
@@ -9,6 +9,7 @@
"engine": "InnoDB",
"field_order": [
"chat_id",
+ "token",
"topics"
],
"fields": [
@@ -24,11 +25,17 @@
"fieldtype": "Table",
"label": "Topics",
"options": "Telegram Group Topic"
+ },
+ {
+ "fieldname": "token",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Token"
}
],
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2023-05-11 17:40:05.778395",
+ "modified": "2023-12-14 12:49:59.132352",
"modified_by": "Administrator",
"module": "Press",
"name": "Telegram Group",
diff --git a/press/press/doctype/telegram_group/telegram_group.py b/press/press/doctype/telegram_group/telegram_group.py
index a8bf6520f67..188d5486009 100644
--- a/press/press/doctype/telegram_group/telegram_group.py
+++ b/press/press/doctype/telegram_group/telegram_group.py
@@ -6,4 +6,21 @@
class TelegramGroup(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.telegram_group_topic.telegram_group_topic import (
+ TelegramGroupTopic,
+ )
+
+ chat_id: DF.Data
+ token: DF.Data | None
+ topics: DF.Table[TelegramGroupTopic]
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/telegram_group_topic/telegram_group_topic.py b/press/press/doctype/telegram_group_topic/telegram_group_topic.py
index 19bff1c6fda..5c64ca3eb1a 100644
--- a/press/press/doctype/telegram_group_topic/telegram_group_topic.py
+++ b/press/press/doctype/telegram_group_topic/telegram_group_topic.py
@@ -6,4 +6,19 @@
class TelegramGroupTopic(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ topic: DF.Data
+ topic_id: DF.Data
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/telegram_message/__init__.py b/press/press/doctype/telegram_message/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/telegram_message/telegram_message.js b/press/press/doctype/telegram_message/telegram_message.js
new file mode 100644
index 00000000000..ca9dc9cd6ed
--- /dev/null
+++ b/press/press/doctype/telegram_message/telegram_message.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Telegram Message", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/telegram_message/telegram_message.json b/press/press/doctype/telegram_message/telegram_message.json
new file mode 100644
index 00000000000..66abf412c3d
--- /dev/null
+++ b/press/press/doctype/telegram_message/telegram_message.json
@@ -0,0 +1,114 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-05-21 16:45:23.323529",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "priority",
+ "status",
+ "column_break_pube",
+ "topic",
+ "group",
+ "section_break_ujme",
+ "message",
+ "section_break_njxf",
+ "error",
+ "retry_count"
+ ],
+ "fields": [
+ {
+ "default": "Queued",
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Status",
+ "options": "Queued\nSent\nError",
+ "read_only": 1,
+ "reqd": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "priority",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Priority",
+ "options": "High\nMedium\nLow",
+ "read_only": 1,
+ "reqd": 1
+ },
+ {
+ "fieldname": "topic",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Topic",
+ "read_only": 1
+ },
+ {
+ "fieldname": "group",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Group",
+ "read_only": 1
+ },
+ {
+ "fieldname": "message",
+ "fieldtype": "Code",
+ "label": "Message",
+ "read_only": 1,
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_pube",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "section_break_ujme",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "section_break_njxf",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "error",
+ "fieldtype": "Code",
+ "label": "Error",
+ "read_only": 1
+ },
+ {
+ "fieldname": "retry_count",
+ "fieldtype": "Int",
+ "label": "Retry Count",
+ "read_only": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2024-05-22 15:25:47.007102",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Telegram Message",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/telegram_message/telegram_message.py b/press/press/doctype/telegram_message/telegram_message.py
new file mode 100644
index 00000000000..cecd3f474dc
--- /dev/null
+++ b/press/press/doctype/telegram_message/telegram_message.py
@@ -0,0 +1,123 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+import traceback
+
+import frappe
+from frappe.model.document import Document
+from telegram.error import NetworkError, RetryAfter
+
+from press.telegram_utils import Telegram
+from frappe.query_builder import Interval
+from frappe.query_builder.functions import Now
+
+
+class TelegramMessage(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ error: DF.Code | None
+ group: DF.Data | None
+ message: DF.Code
+ priority: DF.Literal["High", "Medium", "Low"]
+ retry_count: DF.Int
+ status: DF.Literal["Queued", "Sent", "Error"]
+ topic: DF.Data | None
+ # end: auto-generated types
+
+ def send(self):
+ try:
+ telegram = Telegram(self.topic, self.group)
+ if not self.group:
+ self.group = telegram.group
+ if not self.topic:
+ self.topic = telegram.topic
+ telegram.send(self.message, reraise=True)
+ self.status = "Sent"
+ except RetryAfter:
+ # Raise an exception that will be caught by the scheduler
+ # Try again after some time
+ raise
+ except NetworkError:
+ # Try again. Not more than 5 times
+ self.retry_count += 1
+ self.error = traceback.format_exc()
+ if self.retry_count >= 5:
+ self.status = "Error"
+ raise
+ except Exception:
+ # It's unlinkely that this error will be resolved by retrying
+ # Fail immediately
+ self.error = traceback.format_exc()
+ self.status = "Error"
+ raise
+ finally:
+ self.save()
+
+ @staticmethod
+ def enqueue(
+ message: str,
+ topic: str | None = None,
+ group: str | None = None,
+ priority: str = "Medium",
+ ):
+ """Enqueue message for sending"""
+ return frappe.get_doc(
+ {
+ "doctype": "Telegram Message",
+ "message": message,
+ "priority": priority,
+ "topic": topic,
+ "group": group,
+ }
+ ).insert(ignore_permissions=True)
+
+ @staticmethod
+ def get_one() -> "TelegramMessage | None":
+ first = frappe.get_all(
+ "Telegram Message",
+ filters={"status": "Queued"},
+ order_by="FIELD(priority, 'High', 'Medium', 'Low'), creation ASC",
+ limit=1,
+ pluck="name",
+ )
+ if first:
+ return frappe.get_doc("Telegram Message", first[0])
+
+ @staticmethod
+ def send_one() -> None:
+ message = TelegramMessage.get_one()
+ if message:
+ return message.send()
+
+ @staticmethod
+ def clear_old_logs(days=30):
+ table = frappe.qb.DocType("Telegram Message")
+ frappe.db.delete(table, filters=(table.modified < (Now() - Interval(days=days))))
+ frappe.db.commit()
+
+
+def send_telegram_message():
+ """Send one queued telegram message"""
+
+ # Go through the queue till either of these things happen
+ # 1. There are no more queued messages
+ # 2. We successfully send a message
+ # 3. Telegram asks us to stop (RetryAfter)
+ # 4. We encounter an error that is not recoverable by retrying
+ # (attempt 5 retries and remove the message from queue)
+ while message := TelegramMessage.get_one():
+ try:
+ message.send()
+ return
+ except RetryAfter:
+ # Retry in the next invocation
+ return
+ except Exception:
+ # Try next message
+ pass
diff --git a/press/press/doctype/telegram_message/test_telegram_message.py b/press/press/doctype/telegram_message/test_telegram_message.py
new file mode 100644
index 00000000000..3a974f6875f
--- /dev/null
+++ b/press/press/doctype/telegram_message/test_telegram_message.py
@@ -0,0 +1,133 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+from unittest.mock import Mock, patch
+
+import frappe
+from frappe.tests.utils import FrappeTestCase
+from telegram.error import RetryAfter, TimedOut
+
+from press.press.doctype.telegram_message.telegram_message import (
+ TelegramMessage,
+ send_telegram_message,
+)
+from press.telegram_utils import Telegram
+
+
+@patch.object(Telegram, "send")
+class TestTelegramMessage(FrappeTestCase):
+ def tearDown(self):
+ frappe.db.rollback()
+
+ def test_enqueue_creates_telegram_message(self, mock_send: Mock):
+ """Test if enqueue method creates Telegram Message"""
+
+ before = frappe.db.count("Telegram Message")
+ TelegramMessage.enqueue(message="Test Message")
+ after = frappe.db.count("Telegram Message")
+ self.assertEqual(after, before + 1)
+
+ def test_enqueue_creates_telegram_message_with_queued_status(self, mock_send: Mock):
+ """Test if enqueue method creates Telegram Message with Queued status"""
+ message = TelegramMessage.enqueue(message="Test Message")
+ self.assertEqual(message.status, "Queued")
+
+ def test_send_calls_telegram_send(self, mock_send: Mock):
+ """Test if send method calls Telegram send method"""
+ TelegramMessage.enqueue(message="Test Message")
+ send_telegram_message()
+ mock_send.assert_called_once()
+
+ def test_successful_send_call_sets_sent_status(self, mock_send: Mock):
+ """Test if successful send call sets status to Sent"""
+ first = TelegramMessage.enqueue(message="Test Message")
+ send_telegram_message()
+ first.reload()
+ self.assertEqual(first.status, "Sent")
+
+ def test_failed_send_call_sets_error_status(self, mock_send: Mock):
+ """Test if failed send call sets status to Error"""
+ mock_send.side_effect = Exception()
+ first = TelegramMessage.enqueue(message="Test Message")
+ self.assertRaises(Exception, TelegramMessage.send_one)
+ first.reload()
+ self.assertEqual(first.status, "Error")
+ self.assertIn("Exception", first.error)
+
+ def test_sends_messages_in_priority_order(self, mock_send: Mock):
+ """Test if messages are sent in priority order"""
+ high = TelegramMessage.enqueue(message="Test Message", priority="High")
+ medium = TelegramMessage.enqueue(message="Test Message", priority="Medium")
+ low = TelegramMessage.enqueue(message="Test Message", priority="Low")
+
+ self.assertEqual(TelegramMessage.get_one(), high)
+ send_telegram_message()
+ self.assertEqual(TelegramMessage.get_one(), medium)
+ send_telegram_message()
+ self.assertEqual(TelegramMessage.get_one(), low)
+ send_telegram_message()
+
+ low = TelegramMessage.enqueue(message="Test Message", priority="Low")
+ medium = TelegramMessage.enqueue(message="Test Message", priority="Medium")
+ high = TelegramMessage.enqueue(message="Test Message", priority="High")
+
+ self.assertEqual(TelegramMessage.get_one(), high)
+ send_telegram_message()
+ self.assertEqual(TelegramMessage.get_one(), medium)
+ send_telegram_message()
+ self.assertEqual(TelegramMessage.get_one(), low)
+ send_telegram_message()
+
+ def test_sends_messages_in_creation_order(self, mock_send: Mock):
+ """Test if messages are sent in creation order"""
+ first = TelegramMessage.enqueue(message="Test Message")
+ second = TelegramMessage.enqueue(message="Test Message")
+
+ self.assertEqual(TelegramMessage.get_one(), first)
+ send_telegram_message()
+ self.assertEqual(TelegramMessage.get_one(), second)
+ send_telegram_message()
+
+ def test_failed_send_network_error_increases_retry(self, mock_send: Mock):
+ """Test if failed send call because of network issues increases retry count"""
+ mock_send.side_effect = TimedOut()
+ first = TelegramMessage.enqueue(message="Test Message")
+ self.assertRaises(TimedOut, TelegramMessage.send_one)
+ first.reload()
+ self.assertEqual(first.status, "Queued")
+ self.assertEqual(first.retry_count, 1)
+
+ def test_test_failed_send_after_max_retries_sets_error_status(self, mock_send: Mock):
+ """Test if failed send call after max_errors sets status to Error"""
+ mock_send.side_effect = TimedOut()
+ first = TelegramMessage.enqueue(message="Test Message")
+ first.retry_count = 4
+ first.save()
+ self.assertRaises(TimedOut, TelegramMessage.send_one)
+ first.reload()
+ self.assertEqual(first.status, "Error")
+
+ def test_failed_send_retry_after_doesnt_change_anything(self, mock_send: Mock):
+ """Test if failed send call because of rate limits doesn't change status"""
+ mock_send.side_effect = RetryAfter(10)
+ first = TelegramMessage.enqueue(message="Test Message")
+ self.assertRaises(RetryAfter, TelegramMessage.send_one)
+ first.reload()
+ self.assertEqual(first.status, "Queued")
+
+ def test_send_message_returns_on_empty_queue(self, mock_send: Mock):
+ """Test if send_telegram_message returns on empty queue"""
+ first = TelegramMessage.enqueue(message="Test Message")
+ first.status = "Sent"
+ first.save()
+ send_telegram_message()
+ mock_send.assert_not_called()
+
+ def test_send_message_does_not_raise_on_failure(self, mock_send: Mock):
+ """Test if send_telegram_message does not raise on failure"""
+ mock_send.side_effect = Exception()
+ first = TelegramMessage.enqueue(message="Test Message")
+ send_telegram_message()
+ first.reload()
+ self.assertEqual(first.status, "Error")
+ self.assertIn("Exception", first.error)
diff --git a/press/press/doctype/tls_certificate/test_tls_certificate.py b/press/press/doctype/tls_certificate/test_tls_certificate.py
index 508b2e98136..98dc0b8e314 100644
--- a/press/press/doctype/tls_certificate/test_tls_certificate.py
+++ b/press/press/doctype/tls_certificate/test_tls_certificate.py
@@ -1,30 +1,34 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
+from typing import Literal
+from unittest.mock import Mock, patch
+
+import frappe
+from frappe.tests.utils import FrappeTestCase
+
+from press.press.doctype.agent_job.agent_job import AgentJob
+from press.press.doctype.proxy_server.proxy_server import ProxyServer
+from press.press.doctype.proxy_server.test_proxy_server import create_test_proxy_server
from press.press.doctype.root_domain.test_root_domain import create_test_root_domain
from press.press.doctype.tls_certificate.tls_certificate import (
BaseCA,
LetsEncrypt,
TLSCertificate,
)
-from press.press.doctype.proxy_server.test_proxy_server import create_test_proxy_server
-from press.press.doctype.proxy_server.proxy_server import ProxyServer
-from unittest.mock import Mock, patch
-from press.press.doctype.agent_job.agent_job import AgentJob
-
-import frappe
-import unittest
@patch.object(TLSCertificate, "obtain_certificate", new=Mock())
-def create_test_tls_certificate(domain: str, wildcard: bool = False) -> TLSCertificate:
+def create_test_tls_certificate(
+ domain: str, wildcard: bool = False, provider: Literal["Let's Encrypt", "Other"] = "Let's Encrypt"
+) -> TLSCertificate:
certificate = frappe.get_doc(
{
"doctype": "TLS Certificate",
"domain": domain,
"rsa_key_size": 2048,
"wildcard": wildcard,
+ "provider": provider,
}
).insert(ignore_if_duplicate=True)
certificate.reload()
@@ -43,7 +47,7 @@ def fake_extract(self):
@patch.object(LetsEncrypt, "_obtain", new=Mock())
@patch.object(BaseCA, "_extract", new=fake_extract)
@patch.object(TLSCertificate, "_extract_certificate_details", new=Mock())
-class TestTLSCertificate(unittest.TestCase):
+class TestTLSCertificate(FrappeTestCase):
def tearDown(self):
frappe.db.rollback()
@@ -56,38 +60,43 @@ def test_renewal_of_secondary_wildcard_domains_updates_server(self):
cert = create_test_tls_certificate(erpnext_domain.name, wildcard=True)
- with patch.object(LetsEncrypt, "__init__", new=none_init), patch.object(
- ProxyServer, "setup_wildcard_hosts"
- ) as mock_setup_wildcard_hosts:
+ with (
+ patch.object(LetsEncrypt, "__init__", new=none_init),
+ patch.object(ProxyServer, "setup_wildcard_hosts") as mock_setup_wildcard_hosts,
+ ):
cert._obtain_certificate()
mock_setup_wildcard_hosts.assert_called_once()
def test_renewal_of_primary_wildcard_domains_doesnt_call_setup_wildcard_domains(self):
erpnext_domain = create_test_root_domain("erpnext.xyz")
fc_domain = create_test_root_domain("fc.dev")
- create_test_proxy_server(
- "n1", domains=[{"domain": fc_domain.name}, {"domain": erpnext_domain.name}]
- )
+ create_test_proxy_server("n1", domains=[{"domain": fc_domain.name}, {"domain": erpnext_domain.name}])
cert = create_test_tls_certificate(fc_domain.name, wildcard=True)
cert.reload() # already created with proxy server
- with patch.object(LetsEncrypt, "__init__", new=none_init), patch.object(
- TLSCertificate, "trigger_server_tls_setup_callback", new=Mock()
- ), patch.object(
- ProxyServer, "setup_wildcard_hosts"
- ) as mock_setup_wildcard_hosts:
+ with (
+ patch.object(LetsEncrypt, "__init__", new=none_init),
+ patch.object(TLSCertificate, "trigger_server_tls_setup_callback", new=Mock()),
+ patch.object(ProxyServer, "setup_wildcard_hosts") as mock_setup_wildcard_hosts,
+ ):
cert._obtain_certificate()
mock_setup_wildcard_hosts.assert_not_called()
def test_renewal_of_primary_domain_calls_update_tls_certificates(self):
- cert = create_test_tls_certificate("fc.dev", wildcard=True)
- create_test_proxy_server("n1")
- with patch.object(LetsEncrypt, "__init__", new=none_init), patch.object(
- TLSCertificate, "trigger_server_tls_setup_callback"
- ) as mock_trigger_server_tls_setup, patch.object(
- ProxyServer, "setup_wildcard_hosts", new=Mock()
+ # Use a diffferent domain to avoid any chance of
+ # Reusing same non wildcard domain in tests
+ # Because, in create_test_tls_certificate, we ignore certificate creation if it already exists
+ create_test_root_domain("fc2.dev")
+ cert = create_test_tls_certificate("fc2.dev", wildcard=True)
+ create_test_proxy_server("n2", domain="fc2.dev")
+ with (
+ patch.object(LetsEncrypt, "__init__", new=none_init),
+ patch.object(
+ TLSCertificate, "trigger_server_tls_setup_callback"
+ ) as mock_trigger_server_tls_setup,
+ patch.object(ProxyServer, "setup_wildcard_hosts", new=Mock()),
):
cert._obtain_certificate()
mock_trigger_server_tls_setup.assert_called()
diff --git a/press/press/doctype/tls_certificate/tls_certificate.js b/press/press/doctype/tls_certificate/tls_certificate.js
index 658ab77047a..29a070f843b 100644
--- a/press/press/doctype/tls_certificate/tls_certificate.js
+++ b/press/press/doctype/tls_certificate/tls_certificate.js
@@ -3,21 +3,103 @@
frappe.ui.form.on('TLS Certificate', {
refresh: function (frm) {
- frm.add_custom_button(__('Obtain Certificate'), () => {
- frm.call({
- method: 'obtain_certificate',
- doc: frm.doc,
- callback: (result) => frm.refresh(),
- });
- });
if (frm.doc.wildcard) {
- frm.add_custom_button(__('Trigger Callback'), () => {
+ frm.add_custom_button(__('Trigger Server Setup Callback'), () => {
frm.call({
method: 'trigger_server_tls_setup_callback',
doc: frm.doc,
callback: (result) => frm.refresh(),
});
});
+ } else {
+ frm.add_custom_button(__('Trigger Site Domain Callback'), () => {
+ frm.call({
+ method: 'trigger_site_domain_callback',
+ doc: frm.doc,
+ callback: (result) => frm.refresh(),
+ });
+ });
}
+
+ frm.trigger('show_obtain_certificate');
+ frm.trigger('toggle_read_only');
+ frm.trigger('toggle_hidden');
+ frm.trigger('toggle_copy_private_key');
+ },
+
+ provider: function (frm) {
+ frm.trigger('show_obtain_certificate');
+ frm.trigger('toggle_read_only');
+ frm.trigger('toggle_hidden');
+ frm.trigger('toggle_copy_private_key');
+ },
+
+ wildcard: function (frm) {
+ frm.trigger('toggle_read_only');
+ frm.trigger('toggle_hidden');
+ frm.trigger('toggle_copy_private_key');
+ },
+
+ toggle_copy_private_key: function (frm) {
+ if (!frm.doc.wildcard) {
+ frm.add_custom_button('Copy Private Key', () => {
+ frappe.confirm(
+ `Are you sure you want to copy private
+ key. You should ONLY do this for custom
+ domains. And notify user of their
+ responsibility on handling private
+ key.`,
+ () => frappe.utils.copy_to_clipboard(frm.doc.private_key),
+ );
+ });
+ } else {
+ if (frm.doc.provider == "Let's Encrypt") {
+ console.log("Let's Encrypt");
+ frm.remove_custom_button('Copy Private Key');
+ }
+ }
+ },
+
+ show_obtain_certificate: function (frm) {
+ if (frm.doc.provider == "Let's Encrypt") {
+ frm.add_custom_button(__('Obtain Certificate'), () => {
+ frm.call({
+ method: 'obtain_certificate',
+ doc: frm.doc,
+ callback: (result) => frm.refresh(),
+ });
+ });
+ } else {
+ frm.remove_custom_button(__('Obtain Certificate'));
+ }
+ },
+
+ toggle_read_only: function (frm) {
+ let fields = [
+ 'certificate',
+ 'private_key',
+ 'intermediate_chain',
+ 'full_chain',
+ 'issued_on',
+ 'expires_on',
+ 'team',
+ ];
+ fields.forEach(function (field) {
+ frm.set_df_property(
+ field,
+ 'read_only',
+ frm.doc.provider == "Let's Encrypt",
+ );
+ frm.refresh_field(field);
+ });
+ },
+
+ toggle_hidden: function (frm) {
+ frm.set_df_property(
+ 'private_key',
+ 'hidden',
+ frm.doc.provider == "Let's Encrypt",
+ );
+ frm.refresh_field('private_key');
},
});
diff --git a/press/press/doctype/tls_certificate/tls_certificate.json b/press/press/doctype/tls_certificate/tls_certificate.json
index d167e8fd121..9e9823ff810 100644
--- a/press/press/doctype/tls_certificate/tls_certificate.json
+++ b/press/press/doctype/tls_certificate/tls_certificate.json
@@ -11,6 +11,7 @@
"column_break_3",
"rsa_key_size",
"wildcard",
+ "provider",
"section_break_6",
"issued_on",
"column_break_8",
@@ -18,9 +19,12 @@
"section_break_10",
"decoded_certificate",
"certificate",
- "full_chain",
"intermediate_chain",
- "private_key"
+ "full_chain",
+ "private_key",
+ "section_break_cvcg",
+ "error",
+ "retry_count"
],
"fields": [
{
@@ -42,6 +46,7 @@
"set_only_once": 1
},
{
+ "description": "Output of openssl req -noout -text -in request.csr\n",
"fieldname": "decoded_certificate",
"fieldtype": "Code",
"label": "Decoded Certificate",
@@ -71,8 +76,8 @@
"fieldtype": "Select",
"label": "RSA Key Size",
"options": "2048\n3072\n4096",
- "reqd": 1,
- "set_only_once": 1
+ "read_only_depends_on": "eval: doc.wildcard && doc.provider === 'Other'",
+ "reqd": 1
},
{
"fieldname": "issued_on",
@@ -97,28 +102,37 @@
"hide_border": 1
},
{
+ "description": "Only Domain\u2019s Certificate",
"fieldname": "certificate",
"fieldtype": "Code",
"label": "Certificate",
- "read_only": 1
+ "read_only": 1,
+ "read_only_depends_on": "eval: doc.provider === \"Let's Encrypt\""
},
{
+ "devscription": "Certificate + Intermediate + Trust chain for non lets encrypt certificates",
"fieldname": "full_chain",
"fieldtype": "Code",
"label": "Full Chain",
- "read_only": 1
+ "read_only": 1,
+ "read_only_depends_on": "eval: doc.provider == \"Let's Encrypt\""
},
{
+ "description": "Chain certificate to establish trust (Intermediate + Trust chain for non lets encrypt certificates)",
"fieldname": "intermediate_chain",
"fieldtype": "Code",
"label": "Intermediate Chain",
- "read_only": 1
+ "read_only": 1,
+ "read_only_depends_on": "eval: doc.provider === \"Let's Encrypt\""
},
{
+ "depends_on": "eval: doc.provider !== \"Let's Encrypt\"",
+ "description": "Output of openssl genpkey -algorithm RSA -out private.key -pkeyopt rsa_keygen_bits:4096\n",
"fieldname": "private_key",
"fieldtype": "Code",
"label": "Private Key",
- "read_only": 1
+ "read_only": 1,
+ "read_only_depends_on": "eval: doc.provider === \"Let's Encrypt\""
},
{
"fieldname": "team",
@@ -126,11 +140,38 @@
"label": "Team",
"options": "Team",
"read_only": 1
+ },
+ {
+ "fieldname": "section_break_cvcg",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "error",
+ "fieldtype": "Code",
+ "label": "Error",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "depends_on": "eval: doc.retry_count",
+ "fieldname": "retry_count",
+ "fieldtype": "Int",
+ "label": "Retry Count",
+ "read_only": 1
+ },
+ {
+ "default": "Let's Encrypt",
+ "fieldname": "provider",
+ "fieldtype": "Select",
+ "label": "Provider",
+ "options": "Let's Encrypt\nOther",
+ "reqd": 1
}
],
+ "grid_page_length": 50,
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2021-02-16 11:17:49.280290",
+ "modified": "2025-04-07 14:24:48.585403",
"modified_by": "Administrator",
"module": "Press",
"name": "TLS Certificate",
@@ -163,7 +204,9 @@
"write": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
+ "states": [],
"track_changes": 1
}
\ No newline at end of file
diff --git a/press/press/doctype/tls_certificate/tls_certificate.py b/press/press/doctype/tls_certificate/tls_certificate.py
index 309c944e40d..a6f76af7c31 100644
--- a/press/press/doctype/tls_certificate/tls_certificate.py
+++ b/press/press/doctype/tls_certificate/tls_certificate.py
@@ -1,24 +1,64 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
-
+from __future__ import annotations
import os
+import re
import shlex
import subprocess
+import time
+from contextlib import suppress
from datetime import datetime
+from typing import TYPE_CHECKING
import frappe
import OpenSSL
from frappe.model.document import Document
+from frappe.query_builder.functions import Date
-from press.api.site import check_dns_cname_a
+from press.exceptions import (
+ DNSValidationError,
+ TLSRetryLimitExceeded,
+)
from press.overrides import get_permission_query_conditions_for_doctype
+from press.press.doctype.communication_info.communication_info import get_communication_info
from press.runner import Ansible
from press.utils import get_current_team, log_error
+from press.utils.dns import check_dns_cname_a
+
+if TYPE_CHECKING:
+ from press.press.doctype.ansible_play.ansible_play import AnsiblePlay
+
+AUTO_RETRY_LIMIT = 5
+MANUAL_RETRY_LIMIT = 8
class TLSCertificate(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ certificate: DF.Code | None
+ decoded_certificate: DF.Code | None
+ domain: DF.Data
+ error: DF.Code | None
+ expires_on: DF.Datetime | None
+ full_chain: DF.Code | None
+ intermediate_chain: DF.Code | None
+ issued_on: DF.Datetime | None
+ private_key: DF.Code | None
+ provider: DF.Literal["Let's Encrypt", "Other"]
+ retry_count: DF.Int
+ rsa_key_size: DF.Literal["2048", "3072", "4096"]
+ status: DF.Literal["Pending", "Active", "Expired", "Revoked", "Failure"]
+ team: DF.Link | None
+ wildcard: DF.Check
+ # end: auto-generated types
+
def autoname(self):
if self.wildcard:
self.name = f"*.{self.domain}"
@@ -28,22 +68,56 @@ def autoname(self):
def after_insert(self):
self.obtain_certificate()
+ def validate(self):
+ if self.provider == "Other":
+ if not self.team:
+ frappe.throw("Team is mandatory for custom TLS certificates.")
+
+ self.configure_full_chain()
+ self.validate_key_length()
+ self.validate_key_certificate_association()
+ self._extract_certificate_details()
+
+ def on_update(self):
+ if self.is_new():
+ return
+
+ if self.has_value_changed("rsa_key_size"):
+ self.obtain_certificate()
+
@frappe.whitelist()
def obtain_certificate(self):
- user, session_data, team, = (
+ if self.provider != "Let's Encrypt":
+ return
+
+ if self.retry_count >= MANUAL_RETRY_LIMIT:
+ frappe.throw("Retry limit exceeded. Please check the error and try again.", TLSRetryLimitExceeded)
+ (
+ user,
+ session_data,
+ team,
+ ) = (
frappe.session.user,
frappe.session.data,
get_current_team(),
)
+
frappe.set_user(frappe.get_value("Team", team, "user"))
frappe.enqueue_doc(
- self.doctype, self.name, "_obtain_certificate", enqueue_after_commit=True
+ self.doctype,
+ self.name,
+ "_obtain_certificate",
+ enqueue_after_commit=True,
+ job_id=f"obtain_certificate:{self.name}",
+ deduplicate=True,
)
frappe.set_user(user)
frappe.session.data = session_data
@frappe.whitelist()
def _obtain_certificate(self):
+ if self.provider != "Let's Encrypt":
+ return
try:
settings = frappe.get_doc("Press Settings", "Press Settings")
ca = LetsEncrypt(settings)
@@ -52,12 +126,34 @@ def _obtain_certificate(self):
self.full_chain,
self.intermediate_chain,
self.private_key,
- ) = ca.obtain(
- domain=self.domain, rsa_key_size=self.rsa_key_size, wildcard=self.wildcard
- )
+ ) = ca.obtain(domain=self.domain, rsa_key_size=self.rsa_key_size, wildcard=self.wildcard)
self._extract_certificate_details()
self.status = "Active"
- except Exception:
+ self.retry_count = 0
+ self.error = None
+ except Exception as e:
+ # If certbot is already running, retry after 5 seconds
+ # TODO: Move this to a queue
+ if hasattr(e, "output") and e.output:
+ out = e.output.decode()
+ if "Another instance of Certbot is already running" in out:
+ time.sleep(5)
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_obtain_certificate",
+ job_id=f"obtain_certificate:{self.name}",
+ deduplicate=True,
+ )
+ return
+ if re.search(r"Detail: .*: Invalid response", out):
+ self.error = "Suggestion: You may have updated your DNS records recently. Please wait for the changes to propagate. Please try fetching certificate after some time."
+ self.error += "\n" + out
+ else:
+ self.error = out
+ else:
+ self.error = repr(e)
+ self.retry_count += 1
self.status = "Failure"
log_error("TLS Certificate Exception", certificate=self.name)
self.save()
@@ -76,9 +172,7 @@ def _update_secondary_wildcard_domains(self):
proxies_containing_domain = frappe.get_all(
"Proxy Server Domain", {"domain": self.domain}, pluck="parent"
)
- proxies_using_domain = frappe.get_all(
- "Proxy Server", {"domain": self.domain}, pluck="name"
- )
+ proxies_using_domain = frappe.get_all("Proxy Server", {"domain": self.domain}, pluck="name")
proxies_containing_domain = set(proxies_containing_domain) - set(proxies_using_domain)
for proxy_name in proxies_containing_domain:
proxy = frappe.get_doc("Proxy Server", proxy_name)
@@ -99,22 +193,40 @@ def trigger_server_tls_setup_callback(self):
for server_doctype in server_doctypes:
servers = frappe.get_all(
- server_doctype, {"status": "Active", "name": ("like", f"%.{self.domain}")}
+ server_doctype,
+ filters={
+ "status": ("not in", ["Archived", "Installing"]),
+ "name": ("like", f"%.{self.domain}"),
+ },
+ fields=["name", "status"],
)
for server in servers:
- server_doc = frappe.get_doc(server_doctype, server)
- update_server_tls_certifcate(server_doc, self)
+ if server.status == "Active":
+ frappe.enqueue(
+ "press.press.doctype.tls_certificate.tls_certificate.update_server_tls_certifcate",
+ server=frappe.get_doc(server_doctype, server.name),
+ certificate=self,
+ enqueue_after_commit=True,
+ )
+ else:
+ # If server is not active, mark the tls_certificate_renewal_failed field as True
+ frappe.db.set_value(
+ server_doctype,
+ server.name,
+ "tls_certificate_renewal_failed",
+ 1,
+ update_modified=False,
+ )
+ @frappe.whitelist()
def trigger_site_domain_callback(self):
domain = frappe.db.get_value("Site Domain", {"tls_certificate": self.name}, "name")
if domain:
frappe.get_doc("Site Domain", domain).process_tls_certificate_update()
def trigger_self_hosted_server_callback(self):
- try:
+ with suppress(Exception):
frappe.get_doc("Self Hosted Server", self.name).process_tls_cert_update()
- except Exception:
- pass
def _extract_certificate_details(self):
x509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, self.certificate)
@@ -124,33 +236,156 @@ def _extract_certificate_details(self):
self.issued_on = datetime.strptime(x509.get_notBefore().decode(), "%Y%m%d%H%M%SZ")
self.expires_on = datetime.strptime(x509.get_notAfter().decode(), "%Y%m%d%H%M%SZ")
+ def configure_full_chain(self):
+ if not self.full_chain:
+ self.full_chain = f"{self.certificate}\n{self.intermediate_chain}"
-get_permission_query_conditions = get_permission_query_conditions_for_doctype(
- "TLS Certificate"
-)
+ def _get_private_key_object(self):
+ try:
+ return OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM, self.private_key)
+ except OpenSSL.crypto.Error as e:
+ log_error("TLS Private Key Exception", certificate=self.name)
+ raise e
+
+ def _get_certificate_object(self):
+ try:
+ return OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, self.full_chain)
+ except OpenSSL.crypto.Error as e:
+ log_error("Custom TLS Certificate Exception", certificate=self.name)
+ raise e
+
+ def validate_key_length(self):
+ private_key = self._get_private_key_object()
+
+ if private_key.bits() != int(self.rsa_key_size):
+ frappe.throw(
+ f"Private key length does not match the selected RSA key size. Expected {self.rsa_key_size} bits, got {private_key.bits()} bits."
+ )
+
+ def validate_key_certificate_association(self):
+ context = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)
+ context.use_privatekey(self._get_private_key_object())
+ context.use_certificate(self._get_certificate_object())
+
+ try:
+ context.check_privatekey()
+ self.status = "Active"
+ self.retry_count = 0
+ self.error = None
+ except OpenSSL.SSL.Error as e:
+ self.error = repr(e)
+ log_error("TLS Key Certificate Association Exception", certificate=self.name)
+ frappe.throw("Private Key and Certificate do not match")
+ finally:
+ if self.error:
+ self.status = "Failure"
+
+
+get_permission_query_conditions = get_permission_query_conditions_for_doctype("TLS Certificate")
+
+
+class PendingCertificate(frappe._dict):
+ name: str
+ domain: str
+ wildcard: bool
+ retry_count: int
+
+
+def should_renew(site: str | None, certificate: PendingCertificate) -> bool:
+ if certificate.wildcard:
+ return True
+ if not site:
+ return False
+ if frappe.db.get_value("Site", site, "status") != "Active":
+ return False
+ dns_response = check_dns_cname_a(site, certificate.domain, ignore_proxying=True)
+ if dns_response["matched"]:
+ return True
+ raise DNSValidationError(
+ f"DNS check failed. {dns_response.get('answer')}",
+ )
+
+
+def rollback_and_fail_tls(certificate: PendingCertificate, e: Exception):
+ frappe.db.rollback()
+ frappe.db.set_value(
+ "TLS Certificate",
+ certificate.name,
+ {
+ "status": "Failure",
+ "error": str(e),
+ "retry_count": certificate.retry_count + 1,
+ },
+ )
def renew_tls_certificates():
+ tls_renewal_queue_size = frappe.db.get_single_value("Press Settings", "tls_renewal_queue_size")
pending = frappe.get_all(
"TLS Certificate",
- fields=["name", "domain", "wildcard"],
- filters={"status": "Active", "expires_on": ("<", frappe.utils.add_days(None, 25))},
+ fields=["name", "domain", "wildcard", "retry_count"],
+ filters={
+ "status": ("in", ("Active", "Failure")),
+ "expires_on": ("<", frappe.utils.add_days(None, 25)),
+ "retry_count": ("<", AUTO_RETRY_LIMIT),
+ "provider": "Let's Encrypt",
+ },
+ ignore_ifnull=True,
+ order_by="expires_on ASC, status DESC", # Oldest first, then prefer failures.
)
+ renewals_attempted = 0
for certificate in pending:
- site = frappe.db.get_value(
- "Site Domain", {"tls_certificate": certificate.name, "status": "Active"}, "site"
- )
- if site:
- site_status = frappe.db.get_value("Site", site, "status")
- if (
- site_status == "Active" and check_dns_cname_a(site, certificate.domain)["matched"]
- ):
- certificate_doc = frappe.get_doc("TLS Certificate", certificate.name)
- certificate_doc._obtain_certificate()
- frappe.db.commit()
- if certificate.wildcard:
- certificate_doc = frappe.get_doc("TLS Certificate", certificate.name)
+ if tls_renewal_queue_size and (renewals_attempted >= tls_renewal_queue_size):
+ break
+
+ site = frappe.db.get_value("Site Domain", {"tls_certificate": certificate.name}, "site")
+
+ try:
+ if not should_renew(site, certificate):
+ continue
+ renewals_attempted += 1
+ certificate_doc = TLSCertificate("TLS Certificate", certificate.name)
certificate_doc._obtain_certificate()
+ frappe.db.commit()
+ except DNSValidationError as e:
+ rollback_and_fail_tls(certificate, e) # has to come first as it has frappe.db.rollback()
+ frappe.db.set_value(
+ "Site Domain",
+ {"tls_certificate": certificate.name},
+ {"status": "Broken", "dns_response": str(e)},
+ )
+ frappe.db.commit()
+ except Exception as e:
+ rollback_and_fail_tls(certificate, e)
+ log_error("TLS Renewal Exception", certificate=certificate, site=site)
+ frappe.db.commit()
+
+
+def notify_custom_tls_renewal():
+ seven_days = frappe.utils.add_days(None, 7).date()
+ fifteen_days = frappe.utils.add_days(None, 15).date()
+
+ tls_cert = frappe.qb.DocType("TLS Certificate")
+
+ # Notify team members 15 days and 7 days before expiry
+
+ query = (
+ frappe.qb.from_(tls_cert)
+ .select(tls_cert.name, tls_cert.domain, tls_cert.team, tls_cert.expires_on)
+ .where(tls_cert.status.isin(["Active", "Failure"]))
+ .where((Date(tls_cert.expires_on) == seven_days) | (Date(tls_cert.expires_on) == fifteen_days))
+ .where(tls_cert.provider == "Other")
+ )
+
+ pending = query.run(as_dict=True)
+
+ for certificate in pending:
+ if certificate.team:
+ frappe.sendmail(
+ recipients=get_communication_info("Email", "Site Activity", "Team", certificate.team),
+ subject=f"TLS Certificate Renewal Required: {certificate.name}",
+ message=f"TLS Certificate {certificate.name} is due for renewal on {certificate.expires_on}. Please renew the certificate to avoid service disruption.",
+ )
def update_server_tls_certifcate(server, certificate):
@@ -160,6 +395,8 @@ def update_server_tls_certifcate(server, certificate):
proxysql_admin_password = server.get_password("proxysql_admin_password")
ansible = Ansible(
playbook="tls.yml",
+ user=server.get("ssh_user") or "root",
+ port=server.get("ssh_port") or 22,
server=server,
variables={
"certificate_private_key": certificate.private_key,
@@ -169,11 +406,56 @@ def update_server_tls_certifcate(server, certificate):
"proxysql_admin_password": proxysql_admin_password,
},
)
- ansible.run()
+ play: "AnsiblePlay" = ansible.run()
+ frappe.db.set_value(
+ server.doctype,
+ server.name,
+ "tls_certificate_renewal_failed",
+ play.status != "Success",
+ # to avoid causing TimestampMismatchError in other important tasks
+ update_modified=False,
+ )
except Exception:
log_error("TLS Setup Exception", server=server.as_dict())
+def retrigger_failed_wildcard_tls_callbacks():
+ server_doctypes = [
+ "Proxy Server",
+ "Server",
+ "Database Server",
+ "Log Server",
+ "Monitor Server",
+ "Registry Server",
+ "Analytics Server",
+ "Trace Server",
+ ]
+ for server_doctype in server_doctypes:
+ servers = frappe.get_all(
+ server_doctype, filters={"status": "Active"}, fields=["name", "tls_certificate_renewal_failed"]
+ )
+ for server in servers:
+ previous_attempt_failed = server.tls_certificate_renewal_failed
+ if not previous_attempt_failed:
+ plays = frappe.get_all(
+ "Ansible Play",
+ {"play": "Setup TLS Certificates", "server": server.name},
+ pluck="status",
+ limit=1,
+ order_by="creation DESC",
+ )
+ if plays and plays[0] != "Success":
+ previous_attempt_failed = True
+
+ if previous_attempt_failed:
+ server_doc = frappe.get_doc(server_doctype, server)
+ frappe.enqueue(
+ "press.press.doctype.tls_certificate.tls_certificate.update_server_tls_certifcate",
+ server=server_doc,
+ certificate=server_doc.get_certificate(),
+ )
+
+
class BaseCA:
def __init__(self, settings):
self.settings = settings
@@ -185,16 +467,37 @@ def obtain(self, domain, rsa_key_size=2048, wildcard=False):
self._obtain()
return self._extract()
- def _extract(self):
- with open(self.certificate_file) as f:
- certificate = f.read()
- with open(self.full_chain_file) as f:
- full_chain = f.read()
- with open(self.intermediate_chain_file) as f:
- intermediate_chain = f.read()
- with open(self.private_key_file) as f:
- private_key = f.read()
+ def _read_latest_certificate_file(self, file_path):
+ import glob
+ import os
+ import re
+
+ # Split path into directory and filename
+ dir_path = os.path.dirname(file_path)
+ file_name = os.path.basename(file_path)
+ parent_dir = os.path.dirname(dir_path)
+ base_dir_name = os.path.basename(dir_path)
+
+ # Look for indexed directories first (e.g., dir-0000, dir-0001, etc.)
+ indexed_dirs = glob.glob(os.path.join(parent_dir, f"{base_dir_name}-[0-9][0-9][0-9][0-9]"))
+
+ if indexed_dirs:
+ # Find directory with highest index
+ latest_dir = max(indexed_dirs, key=lambda p: int(re.search(r"-(\d+)$", p).group(1)))
+ latest_path = os.path.join(latest_dir, file_name)
+ elif os.path.exists(file_path):
+ latest_path = file_path
+ else:
+ raise FileNotFoundError(f"Certificate file not found: {file_path}")
+
+ with open(latest_path) as f:
+ return f.read()
+ def _extract(self):
+ certificate = self._read_latest_certificate_file(self.certificate_file)
+ full_chain = self._read_latest_certificate_file(self.full_chain_file)
+ intermediate_chain = self._read_latest_certificate_file(self.intermediate_chain_file)
+ private_key = self._read_latest_certificate_file(self.private_key_file)
return certificate, full_chain, intermediate_chain, private_key
@@ -226,19 +529,21 @@ def _obtain(self):
def _obtain_wildcard(self):
domain = frappe.get_doc("Root Domain", self.domain[2:])
- environment = os.environ
+ environment = os.environ.copy()
environment.update(
{
"AWS_ACCESS_KEY_ID": domain.aws_access_key_id,
"AWS_SECRET_ACCESS_KEY": domain.get_password("aws_secret_access_key"),
}
)
+ if domain.aws_region:
+ environment["AWS_DEFAULT_REGION"] = domain.aws_region
self.run(self._certbot_command(), environment=environment)
def _obtain_naked_with_dns(self):
domain = frappe.get_all("Root Domain", pluck="name", limit=1)[0]
domain = frappe.get_doc("Root Domain", domain)
- environment = os.environ
+ environment = os.environ.copy()
environment.update(
{
"AWS_ACCESS_KEY_ID": domain.aws_access_key_id,
@@ -261,24 +566,26 @@ def _certbot_command(self):
staging = "--staging" if self.staging else ""
force_renewal = "--keep" if frappe.conf.developer_mode else "--force-renewal"
- command = (
+ return (
f"certbot certonly {plugin} {staging} --logs-dir"
f" {self.directory}/logs --work-dir {self.directory} --config-dir"
f" {self.directory} {force_renewal} --agree-tos --eff-email --email"
f" {self.eff_registration_email} --staple-ocsp"
+ " --key-type rsa"
f" --rsa-key-size {self.rsa_key_size} --cert-name {self.domain} --domains"
f" {self.domain}"
)
- return command
-
def run(self, command, environment=None):
try:
- subprocess.check_output(
- shlex.split(command), stderr=subprocess.STDOUT, env=environment
- )
+ subprocess.check_output(shlex.split(command), stderr=subprocess.STDOUT, env=environment)
+ except subprocess.CalledProcessError as e:
+ output = (e.output or b"").decode()
+ if "Another instance of Certbot is already running" not in output:
+ log_error("Certbot Exception", command=command, output=output)
+ raise e
except Exception as e:
- log_error("Certbot Exception", command=command, output=e.output.decode())
+ log_error("Certbot Exception", command=command, exception=e)
raise e
@property
diff --git a/press/press/doctype/trace_server/trace_server.js b/press/press/doctype/trace_server/trace_server.js
index 7e9e5dd6d9e..0023410b338 100644
--- a/press/press/doctype/trace_server/trace_server.js
+++ b/press/press/doctype/trace_server/trace_server.js
@@ -10,6 +10,7 @@ frappe.ui.form.on('Trace Server', {
[__('Update Agent'), 'update_agent', true, frm.doc.is_server_setup],
[__('Prepare Server'), 'prepare_server', true, !frm.doc.is_server_setup],
[__('Setup Server'), 'setup_server', true, !frm.doc.is_server_setup],
+ [__('Upgrade Server'), 'upgrade_server', true, frm.doc.is_server_setup],
[
__('Reconfigure Monitor Server'),
'reconfigure_monitor_server',
diff --git a/press/press/doctype/trace_server/trace_server.json b/press/press/doctype/trace_server/trace_server.json
index 4a61375eed6..cb0f448e82b 100644
--- a/press/press/doctype/trace_server/trace_server.json
+++ b/press/press/doctype/trace_server/trace_server.json
@@ -8,6 +8,7 @@
"status",
"hostname",
"domain",
+ "tls_certificate_renewal_failed",
"column_break_4",
"provider",
"virtual_machine",
@@ -77,7 +78,7 @@
"fieldname": "provider",
"fieldtype": "Select",
"label": "Provider",
- "options": "Generic\nScaleway\nAWS EC2",
+ "options": "Generic\nScaleway\nAWS EC2\nOCI",
"set_only_once": 1
},
{
@@ -259,15 +260,23 @@
{
"fieldname": "column_break_33",
"fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "tls_certificate_renewal_failed",
+ "fieldtype": "Check",
+ "label": "TLS Certificate Renewal Failed",
+ "read_only": 1
}
],
+ "grid_page_length": 50,
"links": [
{
"link_doctype": "Ansible Play",
"link_fieldname": "server"
}
],
- "modified": "2022-06-28 13:50:52.368111",
+ "modified": "2025-09-02 16:44:01.317526",
"modified_by": "Administrator",
"module": "Press",
"name": "Trace Server",
@@ -286,8 +295,9 @@
"write": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/trace_server/trace_server.py b/press/press/doctype/trace_server/trace_server.py
index 964a19d7da3..1a5a64fc04f 100644
--- a/press/press/doctype/trace_server/trace_server.py
+++ b/press/press/doctype/trace_server/trace_server.py
@@ -9,6 +9,41 @@
class TraceServer(BaseServer):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ agent_password: DF.Password | None
+ domain: DF.Link | None
+ frappe_public_key: DF.Code | None
+ frappe_user_password: DF.Password | None
+ hostname: DF.Data
+ ip: DF.Data
+ is_server_setup: DF.Check
+ monitoring_password: DF.Password | None
+ private_ip: DF.Data
+ private_mac_address: DF.Data | None
+ private_vlan_id: DF.Data | None
+ provider: DF.Literal["Generic", "Scaleway", "AWS EC2", "OCI"]
+ root_public_key: DF.Code | None
+ sentry_admin_email: DF.Data | None
+ sentry_admin_password: DF.Password | None
+ sentry_mail_login: DF.Data | None
+ sentry_mail_password: DF.Password | None
+ sentry_mail_port: DF.Int
+ sentry_mail_server: DF.Data | None
+ sentry_oauth_client_id: DF.Data | None
+ sentry_oauth_client_secret: DF.Data | None
+ sentry_oauth_server_url: DF.Data | None
+ status: DF.Literal["Pending", "Installing", "Active", "Broken", "Archived"]
+ tls_certificate_renewal_failed: DF.Check
+ virtual_machine: DF.Link | None
+ # end: auto-generated types
+
def validate(self):
self.validate_agent_password()
self.validate_monitoring_password()
@@ -31,9 +66,7 @@ def _setup_server(self):
log_server = frappe.db.get_single_value("Press Settings", "log_server")
if log_server:
- kibana_password = frappe.get_doc("Log Server", log_server).get_password(
- "kibana_password"
- )
+ kibana_password = frappe.get_doc("Log Server", log_server).get_password("kibana_password")
else:
kibana_password = None
@@ -77,6 +110,40 @@ def _setup_server(self):
log_error("Trace Server Setup Exception", server=self.as_dict())
self.save()
+ @frappe.whitelist()
+ def upgrade_server(self):
+ self.status = "Installing"
+ self.save()
+ frappe.enqueue_doc(self.doctype, self.name, "_upgrade_server", queue="long", timeout=2400)
+
+ def _upgrade_server(self):
+ try:
+ ansible = Ansible(
+ playbook="trace_upgrade.yml",
+ server=self,
+ variables={
+ "server": self.name,
+ "sentry_admin_email": self.sentry_admin_email,
+ "sentry_mail_server": self.sentry_mail_server,
+ "sentry_mail_port": self.sentry_mail_port,
+ "sentry_mail_login": self.sentry_mail_login,
+ "sentry_mail_password": self.get_password("sentry_mail_password"),
+ "sentry_oauth_server_url": self.sentry_oauth_server_url,
+ "sentry_oauth_client_id": self.sentry_oauth_client_id,
+ "sentry_oauth_client_secret": self.get_password("sentry_oauth_client_secret"),
+ },
+ )
+ play = ansible.run()
+ self.reload()
+ if play.status == "Success":
+ self.status = "Active"
+ else:
+ self.status = "Broken"
+ except Exception:
+ self.status = "Broken"
+ log_error("Trace Server Upgrade Exception", server=self.as_dict())
+ self.save()
+
@frappe.whitelist()
def show_sentry_password(self):
return self.get_password("sentry_admin_password")
diff --git a/press/press/doctype/usage_record/test_usage_record.py b/press/press/doctype/usage_record/test_usage_record.py
index 9bea150554d..18d1ed3e40d 100644
--- a/press/press/doctype/usage_record/test_usage_record.py
+++ b/press/press/doctype/usage_record/test_usage_record.py
@@ -1,11 +1,10 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and Contributors
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestUsageRecord(unittest.TestCase):
+class TestUsageRecord(FrappeTestCase):
pass
diff --git a/press/press/doctype/usage_record/usage_record.json b/press/press/doctype/usage_record/usage_record.json
index eb0bc2bbe8a..8c7f449b76a 100644
--- a/press/press/doctype/usage_record/usage_record.json
+++ b/press/press/doctype/usage_record/usage_record.json
@@ -12,8 +12,8 @@
"column_break_4",
"date",
"time",
- "prepaid",
"section_break_7",
+ "plan_type",
"plan",
"currency",
"amount",
@@ -71,8 +71,9 @@
},
{
"fieldname": "invoice",
- "fieldtype": "Data",
- "label": "Invoice"
+ "fieldtype": "Link",
+ "label": "Invoice",
+ "options": "Invoice"
},
{
"fieldname": "column_break_4",
@@ -85,7 +86,8 @@
{
"fieldname": "date",
"fieldtype": "Date",
- "label": "Date"
+ "label": "Date",
+ "search_index": 1
},
{
"fieldname": "time",
@@ -94,8 +96,9 @@
},
{
"fieldname": "subscription",
- "fieldtype": "Data",
- "label": "Subscription"
+ "fieldtype": "Link",
+ "label": "Subscription",
+ "options": "Subscription"
},
{
"fieldname": "column_break_13",
@@ -117,9 +120,9 @@
},
{
"fieldname": "plan",
- "fieldtype": "Link",
+ "fieldtype": "Dynamic Link",
"label": "Plan",
- "options": "Plan"
+ "options": "plan_type"
},
{
"fieldname": "payout",
@@ -133,16 +136,17 @@
"options": "Site"
},
{
- "default": "0",
- "fieldname": "prepaid",
- "fieldtype": "Check",
- "label": "Prepaid"
+ "fieldname": "plan_type",
+ "fieldtype": "Link",
+ "label": "Plan Type",
+ "options": "DocType",
+ "search_index": 1
}
],
"index_web_pages_for_search": 1,
"is_submittable": 1,
"links": [],
- "modified": "2023-03-31 13:52:00.471001",
+ "modified": "2024-08-23 16:46:05.290651",
"modified_by": "Administrator",
"module": "Press",
"name": "Usage Record",
diff --git a/press/press/doctype/usage_record/usage_record.py b/press/press/doctype/usage_record/usage_record.py
index 0711793fa5a..5f6a4460e0c 100644
--- a/press/press/doctype/usage_record/usage_record.py
+++ b/press/press/doctype/usage_record/usage_record.py
@@ -1,14 +1,38 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
-
+from __future__ import annotations
import frappe
from frappe.model.document import Document
-from press.utils import log_error
class UsageRecord(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ amended_from: DF.Link | None
+ amount: DF.Currency
+ currency: DF.Link | None
+ date: DF.Date | None
+ document_name: DF.DynamicLink | None
+ document_type: DF.Link | None
+ interval: DF.Data | None
+ invoice: DF.Link | None
+ payout: DF.Data | None
+ plan: DF.DynamicLink | None
+ plan_type: DF.Link | None
+ remark: DF.SmallText | None
+ site: DF.Link | None
+ subscription: DF.Link | None
+ team: DF.Link | None
+ time: DF.Time | None
+ # end: auto-generated types
+
def validate(self):
if not self.date:
self.date = frappe.utils.today()
@@ -16,15 +40,14 @@ def validate(self):
if not self.time:
self.time = frappe.utils.nowtime()
+ def before_submit(self):
+ self.validate_duplicate_usage_record()
+
def on_submit(self):
- try:
- self.update_usage_in_invoice()
- except Exception:
- log_error(title="Usage Record Invoice Update Error", name=self.name)
+ self.update_usage_in_invoice()
def on_cancel(self):
- if not self.prepaid:
- self.remove_usage_from_invoice()
+ self.remove_usage_from_invoice()
def update_usage_in_invoice(self):
team = frappe.get_doc("Team", self.team)
@@ -32,9 +55,14 @@ def update_usage_in_invoice(self):
if team.parent_team:
team = frappe.get_doc("Team", team.parent_team)
+ if team.billing_team:
+ team = frappe.get_doc("Team", team.billing_team)
+
if team.free_account:
return
- invoice = team.get_upcoming_invoice()
+ # Get a read lock on this invoice
+ # We're going to update the invoice and we don't want any other process to update it
+ invoice = team.get_upcoming_invoice(for_update=True)
if not invoice:
invoice = team.create_upcoming_invoice()
@@ -45,3 +73,62 @@ def remove_usage_from_invoice(self):
invoice = team.get_upcoming_invoice()
if invoice:
invoice.remove_usage_record(self)
+
+ def validate_duplicate_usage_record(self):
+ # Can skip duplicate usage record check if this is a autoscale usage record
+ if self.document_type == "Server":
+ is_primary = frappe.db.get_value("Server", self.document_name, "is_primary")
+ if not is_primary:
+ return
+
+ usage_record = frappe.get_all(
+ "Usage Record",
+ {
+ "name": ("!=", self.name),
+ "team": self.team,
+ "document_type": self.document_type,
+ "document_name": self.document_name,
+ "interval": self.interval,
+ "date": self.date,
+ "plan": self.plan,
+ "docstatus": 1,
+ "subscription": self.subscription,
+ "amount": self.amount,
+ },
+ pluck="name",
+ )
+
+ if usage_record:
+ frappe.throw(
+ f"Usage Record {usage_record[0]} already exists for this document",
+ frappe.DuplicateEntryError,
+ )
+
+
+def link_unlinked_usage_records():
+ td = frappe.utils.today()
+ fd = frappe.utils.get_first_day(td)
+ ld = frappe.utils.get_last_day(td)
+ free_teams = frappe.db.get_all("Team", {"free_account": 1}, pluck="name")
+
+ usage_records = frappe.get_all(
+ "Usage Record",
+ filters={
+ "invoice": ("is", "not set"),
+ "date": ("between", (fd, ld)),
+ "team": ("not in", free_teams),
+ "docstatus": 1,
+ },
+ pluck="name",
+ ignore_ifnull=True,
+ )
+
+ for usage_record in usage_records:
+ try:
+ frappe.get_doc("Usage Record", usage_record).update_usage_in_invoice()
+ except Exception:
+ frappe.log_error("Failed to Link UR to Invoice")
+
+
+def on_doctype_update():
+ frappe.db.add_index("Usage Record", ["subscription", "date"])
diff --git a/press/press/doctype/user_2fa/__init__.py b/press/press/doctype/user_2fa/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/user_2fa/patches/generate_recovery_codes.py b/press/press/doctype/user_2fa/patches/generate_recovery_codes.py
new file mode 100644
index 00000000000..ebcf54c6f77
--- /dev/null
+++ b/press/press/doctype/user_2fa/patches/generate_recovery_codes.py
@@ -0,0 +1,30 @@
+import frappe
+import frappe.utils
+from frappe.query_builder import JoinType
+from frappe.query_builder.functions import Count
+
+
+def execute():
+ """Generate recovery codes for already existing `User 2FA` records."""
+
+ User2FA = frappe.qb.DocType("User 2FA")
+ User2FARecoveryCode = frappe.qb.DocType("User 2FA Recovery Code")
+
+ records = (
+ frappe.qb.from_(User2FA)
+ .join(User2FARecoveryCode, JoinType.left)
+ .on(User2FARecoveryCode.parent == User2FA.name)
+ .select(User2FA.name)
+ .groupby(User2FA.name)
+ .having(Count(User2FARecoveryCode.name) == 0)
+ .run(as_dict=True)
+ )
+
+ for record in records:
+ doc = frappe.get_doc("User 2FA", record.name)
+ doc.recovery_codes = []
+ for code in doc.generate_recovery_codes():
+ doc.append("recovery_codes", {"code": code})
+ doc.recovery_codes_last_viewed_at = frappe.utils.now_datetime()
+ doc.save()
+ frappe.db.commit()
diff --git a/press/press/doctype/user_2fa/test_user_2fa.py b/press/press/doctype/user_2fa/test_user_2fa.py
new file mode 100644
index 00000000000..0c71b5140c9
--- /dev/null
+++ b/press/press/doctype/user_2fa/test_user_2fa.py
@@ -0,0 +1,15 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+from press.press.doctype.user_2fa.user_2fa import User2FA
+
+
+class TestUser2FA(FrappeTestCase):
+ def test_generate_secret(self):
+ recovery_codes = list(User2FA.generate_recovery_codes())
+ self.assertEqual(len(recovery_codes), User2FA.recovery_codes_max)
+ self.assertTrue(all(len(code) == User2FA.recovery_codes_length for code in recovery_codes))
+ self.assertTrue(all(code.isupper() for code in recovery_codes))
diff --git a/press/press/doctype/user_2fa/user_2fa.js b/press/press/doctype/user_2fa/user_2fa.js
new file mode 100644
index 00000000000..14ba4927ef8
--- /dev/null
+++ b/press/press/doctype/user_2fa/user_2fa.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("User 2FA", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/press/doctype/user_2fa/user_2fa.json b/press/press/doctype/user_2fa/user_2fa.json
new file mode 100644
index 00000000000..66f00e5a6a0
--- /dev/null
+++ b/press/press/doctype/user_2fa/user_2fa.json
@@ -0,0 +1,103 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "autoname": "field:user",
+ "creation": "2024-08-21 16:10:57.634579",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "user",
+ "totp_secret",
+ "enabled",
+ "last_verified_at",
+ "recovery_codes",
+ "recovery_codes_last_viewed_at"
+ ],
+ "fields": [
+ {
+ "fieldname": "user",
+ "fieldtype": "Link",
+ "label": "User",
+ "options": "User",
+ "unique": 1
+ },
+ {
+ "fieldname": "totp_secret",
+ "fieldtype": "Password",
+ "label": "TOTP Secret"
+ },
+ {
+ "default": "0",
+ "fieldname": "enabled",
+ "fieldtype": "Check",
+ "label": "Enabled"
+ },
+ {
+ "fieldname": "last_verified_at",
+ "fieldtype": "Datetime",
+ "label": "Last Verified At"
+ },
+ {
+ "fieldname": "recovery_codes",
+ "fieldtype": "Table",
+ "label": "Recovery Codes",
+ "options": "User 2FA Recovery Code"
+ },
+ {
+ "fieldname": "recovery_codes_last_viewed_at",
+ "fieldtype": "Datetime",
+ "label": "Recovery Codes Last Viewed At"
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-06-21 16:04:54.476967",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "User 2FA",
+ "naming_rule": "By fieldname",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/user_2fa/user_2fa.py b/press/press/doctype/user_2fa/user_2fa.py
new file mode 100644
index 00000000000..c359513eb6f
--- /dev/null
+++ b/press/press/doctype/user_2fa/user_2fa.py
@@ -0,0 +1,127 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+import random
+import string
+
+import frappe
+import frappe.utils
+from frappe.model.document import Document
+
+
+class User2FA(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.user_2fa_recovery_code.user_2fa_recovery_code import User2FARecoveryCode
+
+ enabled: DF.Check
+ last_verified_at: DF.Datetime | None
+ recovery_codes: DF.Table[User2FARecoveryCode]
+ recovery_codes_last_viewed_at: DF.Datetime | None
+ totp_secret: DF.Password | None
+ user: DF.Link | None
+ # end: auto-generated types
+
+ # Maximum number of recovery codes.
+ recovery_codes_max = 9
+
+ # Length of each recovery code.
+ recovery_codes_length = 16
+
+ def validate(self):
+ if self.enabled and not self.totp_secret:
+ self.generate_secret()
+
+ def generate_secret(self):
+ import pyotp
+
+ self.totp_secret = pyotp.random_base32()
+
+ def generate_random_alphanum(length: int) -> str:
+ if length < 2:
+ raise ValueError("Length must be at least 2")
+
+ letters = string.ascii_letters
+ digits = string.digits
+ all_chars = letters + digits
+ # ensure at least one letter and one non-letter
+ result = [random.choice(letters), random.choice(digits)]
+ # fill the rest randomly
+ result += [random.choice(all_chars) for _ in range(length - 2)]
+ random.shuffle(result)
+ return "".join(result).upper()
+
+ @classmethod
+ def generate_recovery_codes(self):
+ counter = 0
+ while counter < self.recovery_codes_max:
+ code = self.generate_random_alphanum(self.recovery_codes_length)
+ has_upper = code.isupper()
+ has_digit = any(c.isdigit() for c in code)
+ if has_upper and has_digit:
+ counter += 1
+ yield code
+
+ def mark_recovery_codes_viewed(self):
+ """
+ Mark recovery codes as viewed by updating the last viewed timestamp.
+ Also, send an email notification to the user.
+ """
+
+ # Update the time.
+ self.recovery_codes_last_viewed_at = frappe.utils.now_datetime()
+
+ # Send email notification.
+ try:
+ args = {
+ "viewed_at": frappe.utils.format_datetime(self.recovery_codes_last_viewed_at),
+ "link": frappe.utils.get_url("/dashboard/settings/profile"),
+ }
+
+ frappe.sendmail(
+ recipients=[self.user],
+ subject="Your 2FA Recovery Codes Were Viewed",
+ template="2fa_recovery_codes_viewed",
+ args=args,
+ )
+ except Exception:
+ frappe.log_error("Failed to send recovery codes viewed notification email")
+
+
+def yearly_2fa_recovery_code_reminder():
+ """Check and send yearly recovery code reminders"""
+
+ # Construct email args.
+ args = {
+ "link": frappe.utils.get_url("/dashboard/settings/profile"),
+ }
+
+ # Get all users who have not viewed their recovery codes in the last year.
+ users = frappe.get_all(
+ "User 2FA",
+ filters={
+ "recovery_codes_last_viewed_at": [
+ "<=",
+ frappe.utils.add_to_date(frappe.utils.now_datetime(), years=-1),
+ ],
+ "enabled": 1,
+ },
+ pluck="name",
+ )
+
+ for user in users:
+ # Send mail.
+ frappe.sendmail(
+ recipients=[user],
+ subject="Review Your 2FA Recovery Codes",
+ template="2fa_recovery_codes_yearly_reminder",
+ args=args,
+ )
diff --git a/press/press/doctype/user_2fa_recovery_code/__init__.py b/press/press/doctype/user_2fa_recovery_code/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/user_2fa_recovery_code/user_2fa_recovery_code.json b/press/press/doctype/user_2fa_recovery_code/user_2fa_recovery_code.json
new file mode 100644
index 00000000000..e10d5db7202
--- /dev/null
+++ b/press/press/doctype/user_2fa_recovery_code/user_2fa_recovery_code.json
@@ -0,0 +1,40 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-06-21 13:22:03.233018",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "code",
+ "used_at"
+ ],
+ "fields": [
+ {
+ "fieldname": "code",
+ "fieldtype": "Password",
+ "in_list_view": 1,
+ "label": "Code",
+ "reqd": 1
+ },
+ {
+ "fieldname": "used_at",
+ "fieldtype": "Datetime",
+ "in_list_view": 1,
+ "label": "Used At"
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-06-21 16:20:54.624042",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "User 2FA Recovery Code",
+ "owner": "Administrator",
+ "permissions": [],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/press/doctype/user_2fa_recovery_code/user_2fa_recovery_code.py b/press/press/doctype/user_2fa_recovery_code/user_2fa_recovery_code.py
new file mode 100644
index 00000000000..6f3caea8157
--- /dev/null
+++ b/press/press/doctype/user_2fa_recovery_code/user_2fa_recovery_code.py
@@ -0,0 +1,24 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class User2FARecoveryCode(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ code: DF.Password
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ used_at: DF.Datetime | None
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/user_ssh_certificate/test_user_ssh_certificate.py b/press/press/doctype/user_ssh_certificate/test_user_ssh_certificate.py
index a989102babe..8b2ccc07729 100644
--- a/press/press/doctype/user_ssh_certificate/test_user_ssh_certificate.py
+++ b/press/press/doctype/user_ssh_certificate/test_user_ssh_certificate.py
@@ -1,11 +1,10 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe and Contributors
# See license.txt
# import frappe
-import unittest
+from frappe.tests.utils import FrappeTestCase
-class TestUserSSHCertificate(unittest.TestCase):
+class TestUserSSHCertificate(FrappeTestCase):
pass
diff --git a/press/press/doctype/user_ssh_certificate/user_ssh_certificate.py b/press/press/doctype/user_ssh_certificate/user_ssh_certificate.py
index baadc617bcb..6fa5930904c 100644
--- a/press/press/doctype/user_ssh_certificate/user_ssh_certificate.py
+++ b/press/press/doctype/user_ssh_certificate/user_ssh_certificate.py
@@ -6,7 +6,6 @@
import base64
import binascii
import hashlib
-from press.utils import log_error
import re
import shlex
import subprocess
@@ -15,8 +14,34 @@
from frappe import safe_decode
from frappe.model.document import Document
+from press.utils import log_error
+
class UserSSHCertificate(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ access_server: DF.DynamicLink | None
+ all_servers: DF.Check
+ amended_from: DF.Link | None
+ certificate_details: DF.Code | None
+ reason: DF.SmallText
+ server_type: DF.Literal["Server", "Proxy Server", "Database Server"]
+ ssh_certificate: DF.Code | None
+ ssh_command: DF.Code | None
+ ssh_fingerprint: DF.Data | None
+ ssh_public_key: DF.Code | None
+ user: DF.Link
+ user_ssh_key: DF.Link
+ valid_until: DF.Datetime | None
+ validity: DF.Literal["3h", "6h", "12h", "1d"]
+ # end: auto-generated types
+
def validate(self):
if not self.ssh_public_key:
frappe.throw("Please make sure that a valid public key has been added in team doc.")
diff --git a/press/press/doctype/user_ssh_key/test_user_ssh_key.py b/press/press/doctype/user_ssh_key/test_user_ssh_key.py
index 1b8f24ea315..c2eb0690895 100644
--- a/press/press/doctype/user_ssh_key/test_user_ssh_key.py
+++ b/press/press/doctype/user_ssh_key/test_user_ssh_key.py
@@ -1,33 +1,104 @@
# Copyright (c) 2021, Frappe and Contributors
# See license.txt
-import unittest
+from __future__ import annotations
import cryptography
import frappe
from cryptography.hazmat.primitives.asymmetric import rsa
+from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey
+from frappe.tests.utils import FrappeTestCase
+from press.press.doctype.team.test_team import create_test_press_admin_team
-def create_test_user_ssh_key(user: str):
- """Create a test SSH key for the given user."""
+
+def create_rsa_key() -> str:
key = rsa.generate_private_key(
public_exponent=65537,
key_size=2048,
backend=cryptography.hazmat.backends.default_backend(),
)
+ str_key = (
+ key.public_key().public_bytes(
+ encoding=cryptography.hazmat.primitives.serialization.Encoding.OpenSSH,
+ format=cryptography.hazmat.primitives.serialization.PublicFormat.OpenSSH,
+ ),
+ )
+ return str_key[0].decode("utf-8")
+
+
+def create_ed25519_key() -> str:
+ key = Ed25519PrivateKey.generate()
+ str_key = (
+ key.public_key().public_bytes(
+ encoding=cryptography.hazmat.primitives.serialization.Encoding.OpenSSH,
+ format=cryptography.hazmat.primitives.serialization.PublicFormat.OpenSSH,
+ ),
+ )
+ return str_key[0].decode("utf-8")
+
+
+def create_test_user_ssh_key(user: str, str_key: str | None = None):
+ """Create a test SSH key for the given user."""
+ if not str_key:
+ str_key = create_rsa_key()
ssh_key = frappe.get_doc(
{
"doctype": "User SSH Key",
"user": user,
- "ssh_public_key": key.public_key().public_bytes(
- encoding=cryptography.hazmat.primitives.serialization.Encoding.OpenSSH,
- format=cryptography.hazmat.primitives.serialization.PublicFormat.OpenSSH,
- ),
+ "ssh_public_key": str_key,
}
).insert(ignore_if_duplicate=True)
ssh_key.reload()
return ssh_key
-class TestUserSSHKey(unittest.TestCase):
- pass
+class TestUserSSHKey(FrappeTestCase):
+ def tearDown(self):
+ frappe.db.rollback()
+
+ def test_create_valid_ssh_key_works_with_rsa_key(self):
+ team = create_test_press_admin_team()
+ user = frappe.get_doc("User", team.user)
+ try:
+ create_test_user_ssh_key(user.name)
+ except Exception:
+ self.fail("Adding a valid RSA SSH key failed")
+
+ def test_create_valid_ssh_key_works_with_ed25519(self):
+ """Test that creating a valid SSH key works."""
+ team = create_test_press_admin_team()
+ user = frappe.get_doc("User", team.user)
+ try:
+ create_test_user_ssh_key(user.name, create_ed25519_key())
+ except Exception:
+ self.fail("Adding a valid Ed25519 SSH key failed")
+
+ def test_adding_certificate_as_key_fails(self):
+ """Test that creating an invalid SSH key fails."""
+ team = create_test_press_admin_team()
+ user = frappe.get_doc("User", team.user)
+ with self.assertRaisesRegex(frappe.ValidationError, "Key type has to be one of.*"):
+ create_test_user_ssh_key(user.name, "ssh-ed25519-cert-v01@openssh.com FAKE_KEY")
+
+ def test_adding_single_word_fails(self):
+ team = create_test_press_admin_team()
+ user = frappe.get_doc("User", team.user)
+ with self.assertRaisesRegex(
+ frappe.ValidationError, "You must supply a key in OpenSSH public key format"
+ ):
+ create_test_user_ssh_key(user.name, "ubuntu@frappe.cloud")
+
+ def test_adding_partial_of_valid_key_with_valid_number_of_data_characters_fails(
+ self,
+ ):
+ team = create_test_press_admin_team()
+ user = frappe.get_doc("User", team.user)
+ with self.assertRaisesRegex(
+ frappe.ValidationError,
+ "copy/pasting the key using one of the commands in documentation",
+ ):
+ create_test_user_ssh_key(
+ user.name,
+ "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDB3zVjTzHQSEHQG7OD3bYi7V1xk+PCwko0W3+d1fSUvSDCxSMKtR31+CfMKmjnvoHubOHYI9wvLpx6KdZUl2uO",
+ )
diff --git a/press/press/doctype/user_ssh_key/user_ssh_key.json b/press/press/doctype/user_ssh_key/user_ssh_key.json
index 6ffc56f3a0c..5aae72bdea8 100644
--- a/press/press/doctype/user_ssh_key/user_ssh_key.json
+++ b/press/press/doctype/user_ssh_key/user_ssh_key.json
@@ -9,6 +9,7 @@
"user",
"column_break_2",
"is_default",
+ "is_removed",
"section_break_4",
"ssh_public_key",
"ssh_fingerprint"
@@ -48,11 +49,17 @@
{
"fieldname": "section_break_4",
"fieldtype": "Section Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_removed",
+ "fieldtype": "Check",
+ "label": "Is Removed"
}
],
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2022-10-11 15:57:00.071186",
+ "modified": "2025-01-30 17:39:14.574515",
"modified_by": "Administrator",
"module": "Press",
"name": "User SSH Key",
@@ -87,6 +94,7 @@
"write": 1
}
],
+ "show_title_field_in_link": 1,
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
diff --git a/press/press/doctype/user_ssh_key/user_ssh_key.py b/press/press/doctype/user_ssh_key/user_ssh_key.py
index 93d1c75b60e..70085da47fc 100644
--- a/press/press/doctype/user_ssh_key/user_ssh_key.py
+++ b/press/press/doctype/user_ssh_key/user_ssh_key.py
@@ -1,16 +1,78 @@
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
-import frappe
import base64
-import hashlib
-from frappe import safe_decode
+import shlex
+import struct
+import subprocess
+from typing import ClassVar
+
+import frappe
from frappe.model.document import Document
+from press.api.client import dashboard_whitelist
+
+
+class SSHKeyValueError(ValueError):
+ pass
+
+
+class SSHFingerprintError(ValueError):
+ pass
+
class UserSSHKey(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ is_default: DF.Check
+ is_removed: DF.Check
+ ssh_fingerprint: DF.Data | None
+ ssh_public_key: DF.Code
+ user: DF.Link
+ # end: auto-generated types
+
+ dashboard_fields: ClassVar = ["ssh_fingerprint", "is_default", "user", "is_removed"]
+
+ valid_key_types: ClassVar = [
+ "ssh-rsa",
+ "ssh-ed25519",
+ "ecdsa-sha2-nistp256",
+ "ecdsa-sha2-nistp384",
+ "ecdsa-sha2-nistp521",
+ "sk-ecdsa-sha2-nistp256@openssh.com",
+ "sk-ssh-ed25519@openssh.com",
+ ]
+
+ def check_embedded_key_type(self, key_type: str, key_bytes: bytes):
+ type_len = struct.unpack(">I", key_bytes[:4])[0] # >I is big endian unsigned int
+ offset = 4 + type_len
+ embedded_type = key_bytes[4:offset]
+ if embedded_type.decode("utf-8") != key_type:
+ raise SSHKeyValueError(f"Key type {key_type} does not match key")
+
def validate(self):
- self.generate_ssh_fingerprint()
+ if self.is_removed: # to allow removing invalid keys
+ return
+ msg = "You must supply a key in OpenSSH public key format. Please try copy/pasting the key using one of the commands in documentation."
+ try:
+ key_type, key, *comment = self.ssh_public_key.strip().split()
+ if key_type not in self.valid_key_types:
+ raise SSHKeyValueError(f"Key type has to be one of {', '.join(self.valid_key_types)}")
+ key_bytes = base64.b64decode(key)
+ self.check_embedded_key_type(key_type, key_bytes)
+ self.generate_ssh_fingerprint(self.ssh_public_key.encode())
+ except SSHKeyValueError as e:
+ frappe.throw(
+ f"{e!s}\n{msg}",
+ )
+ except Exception:
+ frappe.throw(msg)
def after_insert(self):
if self.is_default:
@@ -20,6 +82,25 @@ def on_update(self):
if self.has_value_changed("is_default") and self.is_default:
self.make_other_keys_non_default()
+ @dashboard_whitelist()
+ def delete(self):
+ if self.is_default:
+ other_key = frappe.get_all(
+ "User SSH Key",
+ filters={"user": self.user, "name": ("!=", self.name)},
+ fields=["name"],
+ limit=1,
+ )
+ if other_key:
+ frappe.db.set_value("User SSH Key", other_key[0].name, "is_default", True)
+
+ if frappe.db.exists("SSH Certificate", {"user_ssh_key": self.name}):
+ self.is_removed = 1
+ self.save()
+
+ else:
+ super().delete()
+
def make_other_keys_non_default(self):
frappe.db.set_value(
"User SSH Key",
@@ -28,11 +109,15 @@ def make_other_keys_non_default(self):
False,
)
- def generate_ssh_fingerprint(self):
+ def generate_ssh_fingerprint(self, key_bytes: bytes):
try:
- ssh_key_b64 = base64.b64decode(self.ssh_public_key.strip().split()[1])
- sha256_sum = hashlib.sha256()
- sha256_sum.update(ssh_key_b64)
- self.ssh_fingerprint = safe_decode(base64.b64encode(sha256_sum.digest()))
- except Exception:
- frappe.throw("Key is invalid. You must supply a key in OpenSSH public key format")
+ self.ssh_fingerprint = (
+ subprocess.check_output(
+ shlex.split("ssh-keygen -lf -"), stderr=subprocess.STDOUT, input=key_bytes
+ )
+ .decode()
+ .split()[1]
+ .split(":")[1]
+ )
+ except subprocess.CalledProcessError as e:
+ raise SSHKeyValueError(f"Error generating fingerprint: {e.output.decode()}") from e
diff --git a/press/press/doctype/version_upgrade/test_version_upgrade.py b/press/press/doctype/version_upgrade/test_version_upgrade.py
index a708287a325..f9527343cf5 100644
--- a/press/press/doctype/version_upgrade/test_version_upgrade.py
+++ b/press/press/doctype/version_upgrade/test_version_upgrade.py
@@ -2,8 +2,10 @@
# See license.txt
from unittest.mock import Mock, patch
+
import frappe
from frappe.tests.utils import FrappeTestCase
+
from press.press.doctype.agent_job.agent_job import AgentJob
from press.press.doctype.app.test_app import create_test_app
from press.press.doctype.release_group.test_release_group import (
@@ -11,7 +13,7 @@
)
from press.press.doctype.server.test_server import create_test_server
from press.press.doctype.site.test_site import create_test_bench, create_test_site
-
+from press.press.doctype.site_update.test_site_update import create_test_site_update
from press.press.doctype.version_upgrade.version_upgrade import VersionUpgrade
@@ -38,18 +40,40 @@ def test_version_upgrade_creation_throws_when_destination_doesnt_have_all_apps_i
group2 = create_test_release_group([app1])
source_bench = create_test_bench(group=group1, server=server.name)
- dest_bench = create_test_bench(group=group2, server=server.name)
+ create_test_bench(group=group2, server=server.name)
site = create_test_site(bench=source_bench.name)
site.install_app(app2.name)
- destination_group = frappe.get_doc("Release Group", dest_bench.group)
- destination_group.add_server(server.name)
+ group2.append("servers", {"server": server.name})
+ group2.save()
self.assertRaisesRegex(
frappe.ValidationError,
f".*apps installed on {site.name}: app., app.$",
create_test_version_upgrade,
site.name,
- destination_group.name,
+ group2.name,
)
+
+ def test_version_upgrade_creates_site_update_even_when_past_updates_failed(self):
+ server = create_test_server()
+ app1 = create_test_app() # frappe
+
+ group1 = create_test_release_group([app1])
+ group2 = create_test_release_group([app1])
+
+ source_bench = create_test_bench(group=group1, server=server.name)
+ create_test_bench(group=group2, server=server.name)
+
+ site = create_test_site(bench=source_bench.name)
+
+ group2.append("servers", {"server": server.name})
+ group2.save()
+
+ create_test_site_update(site.name, group2.name, "Recovered") # cause of failure not resolved
+ site_updates_before = frappe.db.count("Site Update", {"site": site.name})
+ version_upgrade = create_test_version_upgrade(site.name, group2.name)
+ version_upgrade.start() # simulate scheduled one. User will be admin
+ site_updates_after = frappe.db.count("Site Update", {"site": site.name})
+ self.assertEqual(site_updates_before + 1, site_updates_after)
diff --git a/press/press/doctype/version_upgrade/version_upgrade.json b/press/press/doctype/version_upgrade/version_upgrade.json
index 3826152d8c5..39966c03748 100644
--- a/press/press/doctype/version_upgrade/version_upgrade.json
+++ b/press/press/doctype/version_upgrade/version_upgrade.json
@@ -14,6 +14,7 @@
"site_update",
"destination_group",
"skip_failing_patches",
+ "skip_backups",
"section_break_8",
"last_output",
"last_traceback"
@@ -90,11 +91,17 @@
"label": "Destination Group",
"options": "Release Group",
"reqd": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "skip_backups",
+ "fieldtype": "Check",
+ "label": "Skip Backups"
}
],
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2022-09-08 09:58:18.202905",
+ "modified": "2024-10-23 22:33:56.984926",
"modified_by": "Administrator",
"module": "Press",
"name": "Version Upgrade",
@@ -111,6 +118,26 @@
"role": "System Manager",
"share": 1,
"write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1,
+ "write": 1
}
],
"sort_field": "modified",
diff --git a/press/press/doctype/version_upgrade/version_upgrade.py b/press/press/doctype/version_upgrade/version_upgrade.py
index 9d02a4b666c..bbbeda8d2ea 100644
--- a/press/press/doctype/version_upgrade/version_upgrade.py
+++ b/press/press/doctype/version_upgrade/version_upgrade.py
@@ -1,17 +1,50 @@
# Copyright (c) 2022, Frappe and contributors
# For license information, please see license.txt
-from typing import List
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
import frappe
from frappe.model.document import Document
+
+from press.press.doctype.communication_info.communication_info import get_communication_info
+from press.press.doctype.press_notification.press_notification import (
+ create_new_notification,
+)
+from press.press.doctype.site.site import TRANSITORY_STATES
from press.utils import log_error
+if TYPE_CHECKING:
+ from press.press.doctype.site.site import Site
+
class VersionUpgrade(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ destination_group: DF.Link
+ last_output: DF.Code | None
+ last_traceback: DF.Code | None
+ scheduled_time: DF.Datetime | None
+ site: DF.Link
+ site_update: DF.Link | None
+ skip_backups: DF.Check
+ skip_failing_patches: DF.Check
+ source_group: DF.Link | None
+ status: DF.Literal["Scheduled", "Pending", "Running", "Success", "Failure"]
+ # end: auto-generated types
+
doctype = "Version Upgrade"
def validate(self):
+ if self.status == "Failure":
+ return
self.validate_versions()
self.validate_same_server()
self.validate_apps()
@@ -19,8 +52,7 @@ def validate(self):
def validate_same_server(self):
site_server = frappe.get_doc("Site", self.site).server
destination_servers = [
- server.server
- for server in frappe.get_doc("Release Group", self.destination_group).servers
+ server.server for server in frappe.get_doc("Release Group", self.destination_group).servers
]
if site_server not in destination_servers:
@@ -31,9 +63,7 @@ def validate_same_server(self):
def validate_apps(self):
site_apps = [app.app for app in frappe.get_doc("Site", self.site).apps]
- bench_apps = [
- app.app for app in frappe.get_doc("Release Group", self.destination_group).apps
- ]
+ bench_apps = [app.app for app in frappe.get_doc("Release Group", self.destination_group).apps]
if diff := set(site_apps) - set(bench_apps):
frappe.throw(
f"Destination Group {self.destination_group} doesn't have some of the apps installed on {self.site}: {', '.join(diff)}",
@@ -48,7 +78,7 @@ def validate_versions(self):
"You are upgrading the site to Nightly Branch. Please note that Nightly might not be stable"
)
return
- elif source_version == "Nightly":
+ if source_version == "Nightly":
frappe.throw(
f"Downgrading from Nightly to {dest_version.title()} is not allowed",
frappe.ValidationError,
@@ -63,23 +93,51 @@ def validate_versions(self):
@frappe.whitelist()
def start(self):
- site = frappe.get_doc("Site", self.site)
- if site.status.endswith("ing"):
+ site: "Site" = frappe.get_doc("Site", self.site)
+ if site.status in TRANSITORY_STATES:
frappe.throw("Site is under maintenance. Cannot Update")
try:
self.site_update = site.move_to_group(
- self.destination_group, self.skip_failing_patches
+ self.destination_group, self.skip_failing_patches, self.skip_backups
).name
except Exception as e:
frappe.db.rollback()
self.status = "Failure"
self.add_comment(text=str(e))
+
+ site = frappe.get_doc("Site", self.site)
+ next_version = frappe.get_value("Release Group", self.destination_group, "version")
+
+ message = f"Version Upgrade for site {site.host_name} to {next_version} failed"
+ agent_job_id = frappe.get_value("Site Update", self.site_update, "update_job")
+
+ create_new_notification(
+ site.team,
+ "Version Upgrade",
+ "Agent Job",
+ agent_job_id,
+ message,
+ )
else:
self.status = frappe.db.get_value("Site Update", self.site_update, "status")
+ if self.status == "Success":
+ site = frappe.get_doc("Site", self.site)
+ next_version = frappe.get_value("Release Group", self.destination_group, "version")
+
+ message = f"Version Upgrade for site {site.host_name} to {next_version} has completed successfully"
+ agent_job_id = frappe.get_value("Site Update", self.site_update, "update_job")
+
+ create_new_notification(
+ site.team,
+ "Version Upgrade",
+ "Agent Job",
+ agent_job_id,
+ message,
+ )
self.save()
@classmethod
- def get_all_scheduled_before_now(cls) -> List[Document]:
+ def get_all_scheduled_before_now(cls) -> list["VersionUpgrade"]:
upgrades = frappe.get_all(
cls.doctype,
{"scheduled_time": ("<=", frappe.utils.now()), "status": "Scheduled"},
@@ -89,12 +147,12 @@ def get_all_scheduled_before_now(cls) -> List[Document]:
return cls.get_docs(upgrades)
@classmethod
- def get_all_ongoing_version_upgrades(cls) -> List[Document]:
+ def get_all_ongoing_version_upgrades(cls) -> list[Document]:
upgrades = frappe.get_all(cls.doctype, {"status": ("in", ["Pending", "Running"])})
return cls.get_docs(upgrades)
@classmethod
- def get_docs(cls, names: List[str]) -> List[Document]:
+ def get_docs(cls, names: list[str]) -> list[Document]:
return [frappe.get_doc(cls.doctype, name) for name in names]
@@ -110,11 +168,9 @@ def update_from_site_update():
version_upgrade.last_traceback = last_traceback
version_upgrade.last_output = last_output
version_upgrade.status = "Failure"
- site = frappe.get_doc("Site", version_upgrade.site)
- recipient = site.notify_email or frappe.get_doc("Team", site.team).user
frappe.sendmail(
- recipients=[recipient],
+ recipients=get_communication_info("Email", "Site Activity", "Site", version_upgrade.site),
subject=f"Automated Version Upgrade Failed for {version_upgrade.site}",
reference_doctype="Version Upgrade",
reference_name=version_upgrade.name,
@@ -127,14 +183,19 @@ def update_from_site_update():
)
version_upgrade.save()
frappe.db.commit()
- except Exception as e:
- frappe.log_error(f"Error while updating Version Upgrade {version_upgrade.name}", e)
+ except Exception:
+ frappe.log_error(f"Error while updating Version Upgrade {version_upgrade.name}")
frappe.db.rollback()
def run_scheduled_upgrades():
for upgrade in VersionUpgrade.get_all_scheduled_before_now():
try:
+ site_status = frappe.db.get_value("Site", upgrade.site, "status")
+ if site_status in TRANSITORY_STATES:
+ # If we attempt to start the upgrade now, it will fail
+ # This will be picked up in the next iteration
+ continue
upgrade.start()
frappe.db.commit()
except Exception:
diff --git a/press/press/doctype/virtual_disk_snapshot/patches/rename_aws_fields.py b/press/press/doctype/virtual_disk_snapshot/patches/rename_aws_fields.py
new file mode 100644
index 00000000000..826ddbcc45c
--- /dev/null
+++ b/press/press/doctype/virtual_disk_snapshot/patches/rename_aws_fields.py
@@ -0,0 +1,11 @@
+# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+
+import frappe
+from frappe.model.utils.rename_field import rename_field
+
+
+def execute():
+ frappe.reload_doctype("Virtual Disk Snapshot")
+ rename_field("Virtual Disk Snapshot", "aws_snapshot_id", "snapshot_id")
+ rename_field("Virtual Disk Snapshot", "aws_volume_id", "volume_id")
diff --git a/press/press/doctype/virtual_disk_snapshot/virtual_disk_snapshot.js b/press/press/doctype/virtual_disk_snapshot/virtual_disk_snapshot.js
index 155a84053d3..10230e5718e 100644
--- a/press/press/doctype/virtual_disk_snapshot/virtual_disk_snapshot.js
+++ b/press/press/doctype/virtual_disk_snapshot/virtual_disk_snapshot.js
@@ -6,6 +6,8 @@ frappe.ui.form.on('Virtual Disk Snapshot', {
[
[__('Sync'), 'sync'],
[__('Delete'), 'delete_snapshot'],
+ [__('Lock'), 'lock'],
+ [__('Unlock'), 'unlock'],
].forEach(([label, method]) => {
frm.add_custom_button(
label,
@@ -15,9 +17,9 @@ frappe.ui.form.on('Virtual Disk Snapshot', {
__('Actions'),
);
});
- if (frm.doc.aws_snapshot_id) {
+ if (frm.doc.snapshot_id) {
frm.add_web_link(
- `https://${frm.doc.region}.console.aws.amazon.com/ec2/v2/home?region=${frm.doc.region}#SnapshotDetails:snapshotId=${frm.doc.aws_snapshot_id}`,
+ `https://${frm.doc.region}.console.aws.amazon.com/ec2/v2/home?region=${frm.doc.region}#SnapshotDetails:snapshotId=${frm.doc.snapshot_id}`,
__('Visit AWS Dashboard'),
);
}
diff --git a/press/press/doctype/virtual_disk_snapshot/virtual_disk_snapshot.json b/press/press/doctype/virtual_disk_snapshot/virtual_disk_snapshot.json
index 60490c49f11..f00f10c4279 100644
--- a/press/press/doctype/virtual_disk_snapshot/virtual_disk_snapshot.json
+++ b/press/press/doctype/virtual_disk_snapshot/virtual_disk_snapshot.json
@@ -7,34 +7,29 @@
"engine": "InnoDB",
"field_order": [
"virtual_machine",
- "aws_snapshot_id",
+ "snapshot_id",
"status",
+ "expired",
"column_break_4",
"cluster",
"region",
- "aws_volume_id",
+ "volume_id",
"section_break_41e4",
"size",
"start_time",
"column_break_7lcz",
"progress",
+ "duration",
"section_break_12",
- "mariadb_root_password"
+ "mariadb_root_password",
+ "snapshot_purpose_section",
+ "physical_backup",
+ "column_break_xgrp",
+ "rolling_snapshot",
+ "column_break_jyxw",
+ "dedicated_snapshot"
],
"fields": [
- {
- "fieldname": "aws_snapshot_id",
- "fieldtype": "Data",
- "label": "AWS Snapshot ID",
- "read_only": 1,
- "reqd": 1
- },
- {
- "fieldname": "aws_volume_id",
- "fieldtype": "Data",
- "label": "AWS Volume ID",
- "read_only": 1
- },
{
"fieldname": "size",
"fieldtype": "Int",
@@ -49,7 +44,8 @@
"label": "Virtual Machine",
"options": "Virtual Machine",
"read_only": 1,
- "reqd": 1
+ "reqd": 1,
+ "search_index": 1
},
{
"fetch_from": "virtual_machine.cluster",
@@ -64,9 +60,11 @@
{
"fetch_from": "virtual_machine.region",
"fieldname": "region",
- "fieldtype": "Data",
+ "fieldtype": "Link",
"label": "Region",
- "read_only": 1
+ "options": "Cloud Region",
+ "read_only": 1,
+ "reqd": 1
},
{
"fieldname": "column_break_4",
@@ -88,13 +86,15 @@
"in_standard_filter": 1,
"label": "Status",
"options": "Pending\nCompleted\nError\nRecovering\nRecoverable\nUnavailable",
- "read_only": 1
+ "read_only": 1,
+ "search_index": 1
},
{
"fieldname": "start_time",
"fieldtype": "Datetime",
"label": "Start Time",
- "read_only": 1
+ "read_only": 1,
+ "search_index": 1
},
{
"fieldname": "section_break_12",
@@ -113,11 +113,77 @@
{
"fieldname": "column_break_7lcz",
"fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "snapshot_id",
+ "fieldtype": "Data",
+ "label": "Snapshot ID",
+ "read_only": 1,
+ "reqd": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "volume_id",
+ "fieldtype": "Data",
+ "label": "Volume ID",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "duration",
+ "fieldtype": "Duration",
+ "label": "Duration",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "physical_backup",
+ "fieldtype": "Check",
+ "in_standard_filter": 1,
+ "label": "Physical Backup"
+ },
+ {
+ "fieldname": "snapshot_purpose_section",
+ "fieldtype": "Section Break",
+ "label": "Snapshot Purpose"
+ },
+ {
+ "fieldname": "column_break_xgrp",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "rolling_snapshot",
+ "fieldtype": "Check",
+ "in_standard_filter": 1,
+ "label": "Rolling Snapshot"
+ },
+ {
+ "default": "0",
+ "fieldname": "expired",
+ "fieldtype": "Check",
+ "hidden": 1,
+ "label": "Expired"
+ },
+ {
+ "fieldname": "column_break_jyxw",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "dedicated_snapshot",
+ "fieldtype": "Check",
+ "label": "Dedicated Snapshot"
}
],
"index_web_pages_for_search": 1,
- "links": [],
- "modified": "2023-04-01 15:58:51.156501",
+ "links": [
+ {
+ "link_doctype": "Site Backup",
+ "link_fieldname": "database_snapshot"
+ }
+ ],
+ "modified": "2025-09-12 18:31:18.904020",
"modified_by": "Administrator",
"module": "Press",
"name": "Virtual Disk Snapshot",
@@ -134,10 +200,32 @@
"role": "System Manager",
"share": 1,
"write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1
}
],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"title_field": "virtual_machine"
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/virtual_disk_snapshot/virtual_disk_snapshot.py b/press/press/doctype/virtual_disk_snapshot/virtual_disk_snapshot.py
index 55cd6134a15..3bc2715fc35 100644
--- a/press/press/doctype/virtual_disk_snapshot/virtual_disk_snapshot.py
+++ b/press/press/doctype/virtual_disk_snapshot/virtual_disk_snapshot.py
@@ -1,13 +1,53 @@
# Copyright (c) 2022, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
+import math
+import time
+
+import boto3
+import botocore
import frappe
+import frappe.utils
+import pytz
+import rq
+from botocore.exceptions import ClientError
from frappe.model.document import Document
-import boto3
+from frappe.utils.data import cint
+from hcloud import Client as HetznerClient
+from hcloud.images.domain import Image as HetznerImage
+from oci.core import BlockstorageClient
+
from press.utils import log_error
+from press.utils.jobs import has_job_timeout_exceeded
class VirtualDiskSnapshot(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ cluster: DF.Link | None
+ dedicated_snapshot: DF.Check
+ duration: DF.Duration | None
+ expired: DF.Check
+ mariadb_root_password: DF.Password | None
+ physical_backup: DF.Check
+ progress: DF.Data | None
+ region: DF.Link
+ rolling_snapshot: DF.Check
+ size: DF.Int
+ snapshot_id: DF.Data
+ start_time: DF.Datetime | None
+ status: DF.Literal["Pending", "Completed", "Error", "Recovering", "Recoverable", "Unavailable"]
+ virtual_machine: DF.Link
+ volume_id: DF.Data | None
+ # end: auto-generated types
+
def before_insert(self):
self.set_credentials()
@@ -17,38 +57,146 @@ def after_insert(self):
def set_credentials(self):
series = frappe.db.get_value("Virtual Machine", self.virtual_machine, "series")
if series == "m" and frappe.db.exists("Database Server", self.virtual_machine):
- self.mariadb_root_password = frappe.get_doc(
- "Database Server", self.virtual_machine
- ).get_password("mariadb_root_password")
+ self.mariadb_root_password = frappe.get_doc("Database Server", self.virtual_machine).get_password(
+ "mariadb_root_password"
+ )
+
+ def on_update(self): # noqa: C901
+ if self.has_value_changed("status") and self.status == "Unavailable":
+ site_backup_name = frappe.db.exists(
+ "Site Backup", {"database_snapshot": self.name, "files_availability": ("!=", "Unavailable")}
+ )
+ if site_backup_name:
+ frappe.db.set_value("Site Backup", site_backup_name, "files_availability", "Unavailable")
+
+ if self.has_value_changed("status") and self.status == "Completed":
+ old_doc = self.get_doc_before_save()
+ if old_doc is None or old_doc.status != "Pending":
+ return
+
+ self.duration = frappe.utils.cint(
+ frappe.utils.time_diff_in_seconds(frappe.utils.now_datetime(), self.creation)
+ )
+ self.save(ignore_version=True)
+
+ if self.physical_backup:
+ # Trigger execution of restoration
+ physical_restore_name = frappe.db.exists(
+ "Physical Backup Restoration", {"disk_snapshot": self.name, "status": "Running"}
+ )
+ if physical_restore_name:
+ frappe.get_doc("Physical Backup Restoration", physical_restore_name).next()
+
+ if self.rolling_snapshot:
+ # Find older rolling snapshots than current snapshot
+ # If exists, delete that
+ older_snapshots = frappe.db.get_all(
+ self.doctype,
+ {
+ "virtual_machine": self.virtual_machine,
+ "volume_id": self.volume_id,
+ "name": ["!=", self.name],
+ "creation": ["<", self.creation],
+ "status": ["in", ("Pending", "Completed")],
+ "physical_backup": 0,
+ "rolling_snapshot": 1,
+ },
+ pluck="name",
+ )
+ for older_snapshot_name in older_snapshots:
+ frappe.enqueue_doc(
+ self.doctype,
+ name=older_snapshot_name,
+ method="delete_snapshot",
+ enqueue_after_commit=True,
+ deduplicate=True,
+ job_id=f"virtual_disk_snapshot||delete_snapshot||{older_snapshot_name}",
+ )
@frappe.whitelist()
def sync(self):
- try:
- snapshots = self.client.describe_snapshots(SnapshotIds=[self.aws_snapshot_id])[
- "Snapshots"
- ]
- if snapshots:
- snapshot = snapshots[0]
- self.aws_volume_id = snapshot["VolumeId"]
- self.aws_snapshot_id = snapshot["SnapshotId"]
-
- self.status = self.get_status_map(snapshot["State"])
- self.description = snapshot["Description"]
- self.size = snapshot["VolumeSize"]
- self.start_time = frappe.utils.format_datetime(
- snapshot["StartTime"], "yyyy-MM-dd HH:mm:ss"
- )
- self.progress = snapshot["Progress"]
- except Exception:
- self.status = "Unavailable"
- self.save()
+ cluster = frappe.get_doc("Cluster", self.cluster)
+ if cluster.cloud_provider == "AWS EC2":
+ try:
+ snapshots = self.client.describe_snapshots(SnapshotIds=[self.snapshot_id])["Snapshots"]
+ if snapshots:
+ snapshot = snapshots[0]
+ self.volume_id = snapshot["VolumeId"]
+ self.snapshot_id = snapshot["SnapshotId"]
+
+ self.status = self.get_aws_status_map(snapshot["State"])
+ self.description = snapshot["Description"]
+ self.size = snapshot["VolumeSize"]
+ self.start_time = frappe.utils.format_datetime(
+ snapshot["StartTime"], "yyyy-MM-dd HH:mm:ss"
+ )
+ self.progress = snapshot["Progress"]
+ except Exception:
+ self.status = "Unavailable"
+ elif cluster.cloud_provider == "OCI":
+ if ".bootvolumebackup." in self.snapshot_id:
+ snapshot = self.client.get_boot_volume_backup(self.snapshot_id).data
+ self.volume_id = snapshot.boot_volume_id
+ else:
+ snapshot = self.client.get_volume_backup(self.snapshot_id).data
+ self.volume_id = snapshot.volume_id
+ self.status = self.get_oci_status_map(snapshot.lifecycle_state)
+ self.description = snapshot.display_name
+ self.size = snapshot.size_in_gbs
+
+ self.start_time = frappe.utils.format_datetime(
+ snapshot.time_created.astimezone(pytz.timezone(frappe.utils.get_system_timezone())),
+ "yyyy-MM-dd HH:mm:ss",
+ )
+
+ elif cluster.cloud_provider == "Hetzner":
+ try:
+ client: HetznerClient = self.client
+ snapshot = client.images.get_by_id(self.snapshot_id)
+ self.status = self.get_hetzner_status_map(snapshot.status)
+ self.size = math.ceil(snapshot.image_size or 0)
+ self.start_time = frappe.utils.format_datetime(snapshot.created, "yyyy-MM-dd HH:mm:ss")
+ self.progress = 100 if self.status == "Completed" else 0
+ except Exception:
+ self.status = "Unavailable"
+
+ self.save(ignore_version=True)
+ self.sync_server_snapshot()
@frappe.whitelist()
- def delete_snapshot(self):
- self.client.delete_snapshot(SnapshotId=self.aws_snapshot_id)
+ def delete_snapshot(self, ignore_validation: bool | None = None): # noqa: C901
+ if ignore_validation is None:
+ ignore_validation = False
+
self.sync()
+ if self.status == "Unavailable":
+ return
+
+ if self.dedicated_snapshot and not ignore_validation:
+ frappe.throw(
+ "Dedicated snapshots cannot be deleted directly. Please delete from Server Snapshot.",
+ )
- def get_status_map(self, status):
+ cluster = frappe.get_doc("Cluster", self.cluster)
+ if cluster.cloud_provider == "AWS EC2":
+ try:
+ self.client.delete_snapshot(SnapshotId=self.snapshot_id)
+ except ClientError as e:
+ if e.response["Error"]["Code"] == "InvalidSnapshot.InUse":
+ raise SnapshotInUseError(e.response["Error"]["Message"]) from e
+ if e.response["Error"]["Code"] == "SnapshotLocked":
+ raise SnapshotLockedError(e.response["Error"]["Message"]) from e
+ raise e
+ elif cluster.cloud_provider == "OCI":
+ if ".bootvolumebackup." in self.snapshot_id:
+ self.client.delete_boot_volume_backup(self.snapshot_id)
+ else:
+ self.client.delete_volume_backup(self.snapshot_id)
+ elif cluster.cloud_provider == "Hetzner":
+ self.client.images.delete(HetznerImage(id=cint(self.snapshot_id)))
+ self.sync()
+
+ def get_aws_status_map(self, status):
return {
"pending": "Pending",
"completed": "Completed",
@@ -57,40 +205,429 @@ def get_status_map(self, status):
"recoverable": "Recoverable",
}.get(status, "Unavailable")
+ def get_oci_status_map(self, status):
+ return {
+ "CREATING": "Pending",
+ "AVAILABLE": "Completed",
+ "TERMINATING": "Pending",
+ "TERMINATED": "Unavailable",
+ "FAULTY": "Error",
+ "REQUEST_RECEIVED": "Pending",
+ }.get(status, "Unavailable")
+
+ def get_hetzner_status_map(self, status):
+ return {
+ "creating": "Pending",
+ "available": "Completed",
+ }.get(status, "Unavailable")
+
+ @frappe.whitelist()
+ def lock(self):
+ cluster = frappe.get_doc("Cluster", self.cluster)
+ if cluster.cloud_provider == "AWS EC2":
+ self.client.lock_snapshot(
+ SnapshotId=self.snapshot_id,
+ LockMode="governance",
+ LockDuration=365, # Lock for 1 year
+ # After this period, the snapshot will be automatically unlocked
+ )
+ elif cluster.cloud_provider == "Hetzner":
+ self.client.images.change_protection(HetznerImage(cint(self.snapshot_id)), delete=True)
+ else:
+ frappe.throw("Only AWS and Hetzner Providers support snapshot locking/unlocking")
+
+ @frappe.whitelist()
+ def unlock(self):
+ cluster = frappe.get_doc("Cluster", self.cluster)
+ if cluster.cloud_provider == "AWS EC2":
+ try:
+ self.client.unlock_snapshot(SnapshotId=self.snapshot_id)
+ except botocore.exceptions.ClientError as e:
+ if e.response.get("Error", {}).get("Code") == "SnapshotLockNotFound":
+ return
+ raise e
+
+ elif cluster.cloud_provider == "Hetzner":
+ self.client.images.change_protection(HetznerImage(cint(self.snapshot_id)), delete=False)
+
+ else:
+ frappe.throw("Only AWS and Hetzner Providers support snapshot locking/unlocking")
+
+ def create_volume(
+ self,
+ availability_zone: str,
+ iops: int = 3000,
+ throughput: int | None = None,
+ size: int | None = None,
+ volume_initialization_rate: int | None = None,
+ ) -> str:
+ self.sync()
+ if self.status != "Completed":
+ raise Exception("Snapshot is unavailable")
+ if throughput is None:
+ throughput = 125
+ if volume_initialization_rate is None:
+ volume_initialization_rate = 100
+ if size is None:
+ size = 0
+
+ size = max(self.size, size) # Sanity
+ response = self.client.create_volume(
+ SnapshotId=self.snapshot_id,
+ AvailabilityZone=availability_zone,
+ VolumeType="gp3",
+ Size=size,
+ TagSpecifications=[
+ {
+ "ResourceType": "volume",
+ "Tags": [{"Key": "Name", "Value": f"Frappe Cloud Snapshot - {self.name}"}],
+ },
+ ],
+ Iops=iops,
+ Throughput=throughput,
+ VolumeInitializationRate=volume_initialization_rate,
+ )
+ return response["VolumeId"]
+
+ def sync_server_snapshot(self):
+ if not self.dedicated_snapshot:
+ return
+
+ server_snapshot = frappe.db.get_value(
+ "Server Snapshot", filters={"app_server_snapshot": self.name}, pluck="name"
+ )
+ if not server_snapshot:
+ server_snapshot = frappe.db.get_value(
+ "Server Snapshot", filters={"database_server_snapshot": self.name}, pluck="name"
+ )
+ if not server_snapshot:
+ return
+
+ doc = frappe.get_doc("Server Snapshot", server_snapshot, for_update=True)
+ doc.sync(now=True, trigger_snapshot_sync=False)
+
@property
def client(self):
cluster = frappe.get_doc("Cluster", self.cluster)
- return boto3.client(
- "ec2",
- region_name=self.region,
- aws_access_key_id=cluster.aws_access_key_id,
- aws_secret_access_key=cluster.get_password("aws_secret_access_key"),
- )
+ if cluster.cloud_provider == "AWS EC2":
+ return boto3.client(
+ "ec2",
+ region_name=self.region,
+ aws_access_key_id=cluster.aws_access_key_id,
+ aws_secret_access_key=cluster.get_password("aws_secret_access_key"),
+ )
+ if cluster.cloud_provider == "OCI":
+ return BlockstorageClient(cluster.get_oci_config())
+
+ if cluster.cloud_provider == "Hetzner":
+ api_token = cluster.get_password("hetzner_api_token")
+ return HetznerClient(token=api_token)
+ return None
+
+
+class SnapshotInUseError(Exception):
+ pass
+
+
+class SnapshotLockedError(Exception):
+ pass
def sync_snapshots():
- snapshots = frappe.get_all("Virtual Disk Snapshot", {"status": "Pending"})
+ snapshots = frappe.get_all(
+ "Virtual Disk Snapshot", {"status": "Pending", "physical_backup": 0, "rolling_snapshot": 0}
+ )
for snapshot in snapshots:
+ if has_job_timeout_exceeded():
+ return
try:
frappe.get_doc("Virtual Disk Snapshot", snapshot.name).sync()
frappe.db.commit()
+ except rq.timeouts.JobTimeoutException:
+ return
except Exception:
frappe.db.rollback()
log_error(title="Virtual Disk Snapshot Sync Error", virtual_snapshot=snapshot.name)
+def sync_rolling_snapshots():
+ snapshots = frappe.get_all(
+ "Virtual Disk Snapshot",
+ {"status": "Pending", "physical_backup": 0, "rolling_snapshot": 1, "dedicated_snapshot": 0},
+ )
+ start_time = time.time()
+ for snapshot in snapshots:
+ if has_job_timeout_exceeded():
+ return
+ if time.time() - start_time > 600:
+ break
+ try:
+ frappe.get_doc("Virtual Disk Snapshot", snapshot.name).sync()
+ frappe.db.commit()
+ except rq.timeouts.JobTimeoutException:
+ return
+ except Exception:
+ frappe.db.rollback()
+ log_error(title="Virtual Disk Rolling Snapshot Sync Error", virtual_snapshot=snapshot.name)
+
+
+def sync_physical_backup_snapshots():
+ snapshots = frappe.get_all(
+ "Virtual Disk Snapshot",
+ {"status": "Pending", "physical_backup": 1, "rolling_snapshot": 0, "dedicated_snapshot": 0},
+ order_by="modified asc",
+ )
+ start_time = time.time()
+ for snapshot in snapshots:
+ if has_job_timeout_exceeded():
+ return
+ # if already spent more than 1 minute, then don't do sync anymore
+ # because this function will be executed every minute
+ # we don't want to run two syncs at the same time
+ if time.time() - start_time > 60:
+ break
+ try:
+ frappe.get_doc("Virtual Disk Snapshot", snapshot.name).sync()
+ frappe.db.commit()
+ except rq.timeouts.JobTimeoutException:
+ return
+ except Exception:
+ frappe.db.rollback()
+ log_error(
+ title="Physical Restore : Virtual Disk Snapshot Sync Error", virtual_snapshot=snapshot.name
+ )
+
+
def delete_old_snapshots():
snapshots = frappe.get_all(
"Virtual Disk Snapshot",
- {"status": "Completed", "creation": ("<=", frappe.utils.add_days(None, -2))},
+ {
+ "status": "Completed",
+ "creation": ("<=", frappe.utils.add_days(None, -2)),
+ "physical_backup": False,
+ "rolling_snapshot": False,
+ "dedicated_snapshot": False,
+ },
+ pluck="name",
+ order_by="creation asc",
+ limit=500,
+ )
+ for snapshot in snapshots:
+ try:
+ frappe.get_doc("Virtual Disk Snapshot", snapshot).delete_snapshot()
+ frappe.db.commit()
+ except (SnapshotLockedError, SnapshotInUseError):
+ pass
+ except Exception:
+ log_error("Virtual Disk Snapshot Delete Error", snapshot=snapshot)
+ frappe.db.rollback()
+
+
+def delete_expired_snapshots():
+ snapshots = frappe.get_all(
+ "Virtual Disk Snapshot",
+ filters={
+ "status": "Completed",
+ "physical_backup": True,
+ "rolling_snapshot": False,
+ "expired": True,
+ "dedicated_snapshot": False,
+ },
pluck="name",
order_by="creation asc",
- limit=50,
+ limit=500,
)
for snapshot in snapshots:
try:
+ # Ensure there is no Restoration which is using / can use this snapshot
+ if (
+ frappe.db.count(
+ "Physical Backup Restoration",
+ filters={
+ "disk_snapshot": snapshot,
+ "status": ["in", ["Pending", "Scheduled", "Running"]],
+ },
+ )
+ > 0
+ ) or (
+ frappe.db.count(
+ "Physical Backup Restoration",
+ filters={
+ "disk_snapshot": snapshot,
+ "is_failure_resolved": False,
+ "status": "Failure",
+ },
+ )
+ > 0
+ ):
+ continue
+
frappe.get_doc("Virtual Disk Snapshot", snapshot).delete_snapshot()
frappe.db.commit()
+ except (SnapshotLockedError, SnapshotInUseError):
+ pass
except Exception:
log_error("Virtual Disk Snapshot Delete Error", snapshot=snapshot)
frappe.db.rollback()
+
+
+def sync_all_snapshots_from_aws():
+ regions = frappe.get_all("Cloud Region", {"provider": "AWS EC2"}, pluck="name")
+ for region in regions:
+ if not frappe.db.exists("Virtual Disk Snapshot", {"region": region}):
+ continue
+ random_snapshot = frappe.get_doc(
+ "Virtual Disk Snapshot",
+ {
+ "region": region,
+ },
+ )
+ client = random_snapshot.client
+ paginator = client.get_paginator("describe_snapshots")
+ for page in paginator.paginate(OwnerIds=["self"], Filters=[{"Name": "tag-key", "Values": ["Name"]}]):
+ for snapshot in page["Snapshots"]:
+ if _should_skip_snapshot(snapshot):
+ continue
+ try:
+ delete_duplicate_snapshot_docs(snapshot)
+ if _update_snapshot_if_exists(snapshot, random_snapshot):
+ continue
+ tag_name = next(tag["Value"] for tag in snapshot["Tags"] if tag["Key"] == "Name")
+ virtual_machine = tag_name.split(" - ")[1]
+ _insert_snapshot(snapshot, virtual_machine, random_snapshot)
+ frappe.db.commit()
+ except Exception:
+ log_error(
+ title="Virtual Disk Snapshot Sync Error",
+ snapshot=snapshot,
+ )
+ frappe.db.rollback()
+
+
+def _insert_snapshot(snapshot, virtual_machine, random_snapshot):
+ start_time = frappe.utils.format_datetime(snapshot["StartTime"], "yyyy-MM-dd HH:mm:ss")
+ new_snapshot = frappe.get_doc(
+ {
+ "doctype": "Virtual Disk Snapshot",
+ "snapshot_id": snapshot["SnapshotId"],
+ "virtual_machine": virtual_machine,
+ "volume_id": snapshot["VolumeId"],
+ "status": random_snapshot.get_aws_status_map(snapshot["State"]),
+ "description": snapshot["Description"],
+ "size": snapshot["VolumeSize"],
+ "start_time": start_time,
+ "progress": snapshot["Progress"],
+ }
+ ).insert()
+ frappe.db.set_value(
+ "Virtual Disk Snapshot",
+ new_snapshot.name,
+ {"creation": start_time, "modified": start_time},
+ update_modified=False,
+ )
+ return new_snapshot
+
+
+def _should_skip_snapshot(snapshot):
+ tag_names = [tag["Value"] for tag in snapshot["Tags"] if tag["Key"] == "Name"]
+ if not tag_names:
+ return True
+ tag_name_parts = tag_names[0].split(" - ")
+ if len(tag_name_parts) != 3:
+ return True
+ identifier, virtual_machine, _ = tag_name_parts
+ if identifier != "Frappe Cloud":
+ return True
+ if not frappe.db.exists("Virtual Machine", virtual_machine):
+ return True
+
+ return False
+
+
+def delete_duplicate_snapshot_docs(snapshot):
+ # Delete all except one snapshot document
+ snapshot_id = snapshot["SnapshotId"]
+ snapshot_count = frappe.db.count("Virtual Disk Snapshot", {"snapshot_id": snapshot_id})
+ if snapshot_count > 1:
+ tags = snapshot.get("Tags", [])
+ physical_backup = any(tag["Key"] == "Physical Backup" and tag["Value"] == "Yes" for tag in tags)
+ server_snapshot = any(tag["Key"] == "Dedicated Snapshot" and tag["Value"] == "Yes" for tag in tags)
+
+ snapshot_to_keep = None
+ existing_snapshots = []
+ if physical_backup or server_snapshot:
+ existing_snapshots = frappe.get_all(
+ "Virtual Disk Snapshot",
+ filters={"snapshot_id": snapshot_id},
+ order_by="creation desc",
+ pluck="name",
+ )
+
+ if (
+ physical_backup
+ and existing_snapshots
+ and (
+ site_backup := frappe.db.exists(
+ "Site Backup",
+ {
+ "database_snapshot": ("in", existing_snapshots),
+ "files_availability": ("!=", "Unavailable"),
+ },
+ )
+ )
+ ):
+ snapshot_to_keep = frappe.get_value("Site Backup", site_backup, "database_snapshot")
+
+ if server_snapshot and existing_snapshots:
+ if not snapshot_to_keep:
+ snapshot_to_keep = frappe.db.get_value(
+ "Server Snapshot",
+ {
+ "app_server_snapshot": ("in", existing_snapshots),
+ "status": ("!=", "Unavailable"),
+ },
+ "app_server_snapshot",
+ )
+
+ if not snapshot_to_keep:
+ snapshot_to_keep = frappe.db.get_value(
+ "Server Snapshot",
+ {
+ "database_server_snapshot": ("in", existing_snapshots),
+ "status": ("!=", "Unavailable"),
+ },
+ "database_server_snapshot",
+ )
+
+ if snapshot_to_keep:
+ frappe.db.sql(
+ """
+ DELETE
+ FROM `tabVirtual Disk Snapshot`
+ WHERE snapshot_id=%s AND name!=%s
+ """,
+ (snapshot_id, snapshot_to_keep),
+ )
+ else:
+ frappe.db.sql(
+ """
+ DELETE
+ FROM `tabVirtual Disk Snapshot`
+ WHERE snapshot_id=%s
+ LIMIT %s
+ """,
+ (snapshot_id, snapshot_count - 1),
+ )
+
+
+def _update_snapshot_if_exists(snapshot, random_snapshot):
+ snapshot_id = snapshot["SnapshotId"]
+ if frappe.db.exists("Virtual Disk Snapshot", {"snapshot_id": snapshot_id}):
+ frappe.db.set_value(
+ "Virtual Disk Snapshot",
+ {"snapshot_id": snapshot_id},
+ "status",
+ random_snapshot.get_aws_status_map(snapshot["State"]),
+ )
+ return True
+ return False
diff --git a/press/press/doctype/virtual_machine/cloud-init.yml.jinja2 b/press/press/doctype/virtual_machine/cloud-init.yml.jinja2
index e0de1fb3eb8..c5d3f5acdd2 100644
--- a/press/press/doctype/virtual_machine/cloud-init.yml.jinja2
+++ b/press/press/doctype/virtual_machine/cloud-init.yml.jinja2
@@ -15,24 +15,56 @@ users:
runcmd:
- mkdir /etc/ssh/auth_principals
-- sed -i 's/^(#)?X11Forwarding.*/X11Forwarding no/g' /etc/ssh/sshd_config
-- sed -i 's/^(#)?MaxAuthTries.*/MaxAuthTries 6/g' /etc/ssh/sshd_config
-- sed -i 's/^(#)?PermitEmptyPasswords.*/PermitEmptyPasswords no/g' /etc/ssh/sshd_config
-- sed -i 's/^(#)?PermitUserEnvironment.*/PermitUserEnvironment no/g' /etc/ssh/sshd_config
-- sed -i 's/^(#)?ClientAliveInterval.*/ClientAliveInterval 300/g' /etc/ssh/sshd_config
-- sed -i 's/^(#)?ClientAliveCountMax.*/ClientAliveCountMax 3/g' /etc/ssh/sshd_config
-- sed -i 's/^(#)?Banner.*/Banner \/etc\/login.warn/g' /etc/ssh/sshd_config
-- sed -i 's/^(#)?MaxStartups.*/MaxStartups 10:30:60/g' /etc/ssh/sshd_config
-- sed -i 's/^(#)?MaxSessions.*/MaxSessions 10/g' /etc/ssh/sshd_config
- curl https://frappecloud.com/files/ca.pub > /etc/ssh/ca.pub && chmod 644 /etc/ssh/ca.pub
-- systemctl restart sshd
- su - frappe -c "cd /home/frappe/agent && env/bin/agent setup config --name {{ server.name }} --workers 2"
- su - frappe -c "cd /home/frappe/agent && env/bin/agent setup authentication --password {{ agent_password }}"
- su - frappe -c "htpasswd -Bbc /home/frappe/agent/nginx/monitoring.htpasswd frappe {{ monitoring_password }}"
- supervisorctl restart all
-{% if server.doctype == 'Database Server' %}
+{% if server.doctype == 'Database Server' and server.provider != 'Hetzner' and server.provider != 'DigitalOcean' %}
+- resize2fs $(findmnt /opt/volumes/mariadb --noheadings --output SOURCE)
- systemctl daemon-reload
- systemctl restart mariadb
+- systemctl restart mysqld_exporter
+- systemctl restart deadlock_logger
+{% elif server.doctype == 'Server' and server.provider != 'Hetzner' and server.provider != 'DigitalOcean' %}
+- resize2fs $(findmnt /opt/volumes/benches --noheadings --output SOURCE)
+{% endif %}
+{% if server.provider == 'OCI' %}
+- iptables -D INPUT -j REJECT --reject-with icmp-host-prohibited
+- sed -i 's/^-A INPUT -j REJECT --reject-with icmp-host-prohibited$/#-A INPUT -j REJECT --reject-with icmp-host-prohibited/g' /etc/iptables/rules.v4
+{% endif %}
+
+{% if server.provider == 'DigitalOcean' %}
+- |
+ for i in {1..20}; do
+ curl -sf http://169.254.169.254/metadata/v1/id && break
+ sleep 1
+ done
+
+ PRIVATE_IP=$(curl -s http://169.254.169.254/metadata/v1/interfaces/private/0/ipv4/address)
+
+ FILES="
+ /etc/systemd/system/statsd_exporter.service
+ /etc/filebeat/filebeat.yml
+ /etc/mysql/conf.d/frappe.cnf
+ /etc/systemd/system/mariadb.service.d/memory.conf
+ /etc/systemd/system/mysqld_exporter.service
+ /etc/systemd/system/deadlock_logger.service
+ /root/.my.cnf
+ "
+
+ for f in $FILES; do
+ if [ -f "$f" ]; then
+ sed -i "s/__PRIVATE_IP__/${PRIVATE_IP}/g" "$f"
+ fi
+ done
+
+ systemctl daemon-reload
+
+ systemctl restart mariadb || true
+ systemctl restart mysqld_exporter || true
+ systemctl restart deadlock_logger || true
+ systemctl restart statsd_exporter || true
{% endif %}
write_files:
@@ -57,7 +89,7 @@ write_files:
TrustedUserCAKeys /etc/ssh/ca.pub
AuthorizedPrincipalsFile /etc/ssh/auth_principals/%u
-{% if server.doctype == 'Database Server' %}
+{% if server.doctype == 'Database Server' or is_unified_server %}
- path: /etc/mysql/conf.d/frappe.cnf
content: |
{{ mariadb_config | indent(4) }}
@@ -65,6 +97,18 @@ write_files:
- path: /etc/systemd/system/mariadb.service.d/memory.conf
content: |
{{ mariadb_systemd_config | indent(4) }}
+
+- path: /etc/systemd/system/mysqld_exporter.service
+ content: |
+ {{ mariadb_exporter_config | indent(4) }}
+
+- path: /root/.my.cnf
+ content: |
+ {{ mariadb_root_config | indent(4) }}
+
+- path: /etc/systemd/system/deadlock_logger.service
+ content: |
+ {{ deadlock_logger_config | indent(4) }}
{% endif %}
swap:
diff --git a/press/press/doctype/virtual_machine/patches/populate_volumes_table.py b/press/press/doctype/virtual_machine/patches/populate_volumes_table.py
index ad46acd3ba9..4b74fd30db9 100644
--- a/press/press/doctype/virtual_machine/patches/populate_volumes_table.py
+++ b/press/press/doctype/virtual_machine/patches/populate_volumes_table.py
@@ -9,7 +9,7 @@ def execute():
machine = frappe.get_doc("Virtual Machine", machine)
for volume in machine.get_volumes():
row = {
- "aws_volume_id": volume["VolumeId"],
+ "volume_id": volume["VolumeId"],
"volume_type": volume["VolumeType"],
"size": volume["Size"],
"iops": volume["Iops"],
diff --git a/press/press/doctype/virtual_machine/patches/rename_aws_fields.py b/press/press/doctype/virtual_machine/patches/rename_aws_fields.py
new file mode 100644
index 00000000000..bfbf4926a0e
--- /dev/null
+++ b/press/press/doctype/virtual_machine/patches/rename_aws_fields.py
@@ -0,0 +1,12 @@
+# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+
+import frappe
+from frappe.model.utils.rename_field import rename_field
+
+
+def execute():
+ frappe.reload_doctype("Virtual Machine")
+ rename_field("Virtual Machine", "aws_subnet_id", "subnet_id")
+ rename_field("Virtual Machine", "aws_security_group_id", "security_group_id")
+ rename_field("Virtual Machine", "aws_instance_id", "instance_id")
diff --git a/press/press/doctype/virtual_machine/patches/set_root_disk_size.py b/press/press/doctype/virtual_machine/patches/set_root_disk_size.py
new file mode 100644
index 00000000000..a00e1fc0c9c
--- /dev/null
+++ b/press/press/doctype/virtual_machine/patches/set_root_disk_size.py
@@ -0,0 +1,32 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+import frappe
+
+
+def execute():
+ # Set `root_disk_size` to `disk_size`
+ frappe.db.sql("UPDATE `tabVirtual Machine` SET `root_disk_size` = `disk_size`")
+
+ # Set `disk_size` and `root_disk_size` on machines with multiple volumes
+ multi_volume_machines = frappe.db.sql(
+ """
+ SELECT machine.name
+ FROM `tabVirtual Machine` machine
+ LEFT JOIN `tabVirtual Machine Volume` volume
+ ON volume.parent = machine.name
+ WHERE machine.status in ('Running', 'Stopped', 'Pending')
+ GROUP BY machine.name
+ HAVING COUNT(volume.name) > 1
+ """,
+ as_dict=True,
+ )
+ for machine_name in multi_volume_machines:
+ machine = frappe.get_doc("Virtual Machine", machine_name)
+ machine.has_data_volume = True
+ machine.save()
+ disk_size = machine.get_data_volume().size
+ root_disk_size = machine.get_root_volume().size
+ frappe.db.set_value("Virtual Machine", machine.name, "disk_size", disk_size)
+ frappe.db.set_value("Virtual Machine", machine.name, "root_disk_size", root_disk_size)
diff --git a/press/press/doctype/virtual_machine/test_virtual_machine.py b/press/press/doctype/virtual_machine/test_virtual_machine.py
index 2f8394d776c..31318b8571e 100644
--- a/press/press/doctype/virtual_machine/test_virtual_machine.py
+++ b/press/press/doctype/virtual_machine/test_virtual_machine.py
@@ -1,41 +1,92 @@
# Copyright (c) 2021, Frappe and Contributors
# See license.txt
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from unittest.mock import MagicMock, patch
import frappe
from frappe.tests.utils import FrappeTestCase
-from unittest.mock import MagicMock, patch
from press.press.doctype.cluster.test_cluster import create_test_cluster
from press.press.doctype.root_domain.test_root_domain import create_test_root_domain
from press.press.doctype.virtual_machine.virtual_machine import VirtualMachine
-from press.press.doctype.cluster.cluster import Cluster
+if TYPE_CHECKING:
+ from press.press.doctype.cluster.cluster import Cluster
+@patch.object(VirtualMachine, "client", new=MagicMock())
def create_test_virtual_machine(
- ip: str = None,
- cluster: Cluster = None,
+ ip: str | None = None,
+ cluster: Cluster | None = None,
series: str = "m",
+ platform: str = "x86_64",
+ cloud_provider: str = "AWS EC2",
+ disk_size: int = 100,
+ has_data_volume: bool = False,
) -> VirtualMachine:
"""Create test Virtual Machine doc"""
if not ip:
ip = frappe.mock("ipv4")
if not cluster:
cluster = create_test_cluster()
- return frappe.get_doc(
+ vm = frappe.get_doc(
{
"doctype": "Virtual Machine",
"domain": create_test_root_domain("fc.dev", cluster.name).name,
"series": series,
"status": "Running",
"machine_type": "r5.xlarge",
- "disk_size": 100,
+ "disk_size": disk_size,
"cluster": cluster.name,
- "aws_instance_id": "i-1234567890",
+ "instance_id": "i-1234567890",
+ "vcpu": 4,
+ "platform": platform,
+ "cloud_provider": cloud_provider,
}
).insert(ignore_if_duplicate=True)
+ volumes = []
+ # Root volume
+ volumes.append(
+ frappe.get_doc(
+ {
+ "doctype": "Virtual Machine Volume",
+ "parenttype": "Virtual Machine",
+ "parent": vm.name,
+ "parentfield": "volumes",
+ "volume_type": "gp3",
+ "throughput": 125,
+ "device": "/dev/sdf",
+ "size": disk_size if not has_data_volume else 8,
+ "volume_id": f"vol-{frappe.generate_hash(11)}",
+ }
+ )
+ )
+ if has_data_volume:
+ volumes.append(
+ frappe.get_doc(
+ {
+ "doctype": "Virtual Machine Volume",
+ "parenttype": "Virtual Machine",
+ "parent": vm.name,
+ "parentfield": "volumes",
+ "volume_type": "gp3",
+ "throughput": 125,
+ "device": "/dev/sdg",
+ "size": disk_size,
+ "volume_id": f"vol-{frappe.generate_hash(11)}",
+ }
+ )
+ )
+
+ for volume in volumes:
+ volume.insert()
+
+ return vm
+
@patch.object(VirtualMachine, "client", new=MagicMock())
class TestVirtualMachine(FrappeTestCase):
diff --git a/press/press/doctype/virtual_machine/virtual_machine.js b/press/press/doctype/virtual_machine/virtual_machine.js
index b548f1a4e25..3c40dee31cb 100644
--- a/press/press/doctype/virtual_machine/virtual_machine.js
+++ b/press/press/doctype/virtual_machine/virtual_machine.js
@@ -4,79 +4,451 @@
frappe.ui.form.on('Virtual Machine', {
refresh: function (frm) {
[
- [__('Sync'), 'sync'],
- [__('Provision'), 'provision', frm.doc.status == 'Draft'],
- [__('Reboot'), 'reboot', frm.doc.status == 'Running'],
- [__('Stop'), 'stop', frm.doc.status == 'Running'],
- [__('Start'), 'start', frm.doc.status == 'Stopped'],
- [__('Terminate'), 'terminate', !frm.doc.termination_protection],
+ [__('Sync'), 'sync', false, frm.doc.status != 'Draft'],
+ [__('Provision'), 'provision', true, frm.doc.status == 'Draft'],
+ [__('Reboot'), 'reboot', true, frm.doc.status == 'Running'],
+ [__('Stop'), 'stop', true, frm.doc.status == 'Running'],
+ [__('Force Stop'), 'force_stop', true, frm.doc.status == 'Running'],
+ [__('Start'), 'start', true, frm.doc.status == 'Stopped'],
+ [__('Terminate'), 'terminate', true, !frm.doc.termination_protection],
+ [
+ __('Force Terminate'),
+ 'force_terminate',
+ true,
+ Boolean(frappe.boot.developer_mode),
+ ],
[
__('Disable Termination Protection'),
'disable_termination_protection',
+ true,
frm.doc.termination_protection,
],
[
__('Enable Termination Protection'),
'enable_termination_protection',
+ true,
!frm.doc.termination_protection,
],
- [__('Create Image'), 'create_image', frm.doc.status == 'Stopped'],
- [__('Create Snapshots'), 'create_snapshots', frm.doc.status == 'Running'],
- [__('Create Server'), 'create_server', frm.doc.series === 'f'],
+ [__('Increase Disk Size'), 'increase_disk_size', true],
+ [__('Create Image'), 'create_image', true, frm.doc.status == 'Stopped'],
+ [
+ __('Create Snapshots'),
+ 'create_snapshots',
+ true,
+ frm.doc.status == 'Running',
+ ],
+ [
+ __('Create Server'),
+ 'create_server',
+ true,
+ frm.doc.series === 'f' || frm.doc.series === 'u',
+ ],
[
__('Create Database Server'),
'create_database_server',
- frm.doc.series === 'm',
+ false,
+ frm.doc.series === 'm' || frm.doc.series === 'u',
],
[
__('Create Proxy Server'),
'create_proxy_server',
+ false,
frm.doc.series === 'n',
],
+ [
+ __('Create Registry Server'),
+ 'create_registry_server',
+ false,
+ frm.doc.series === 'r',
+ ],
+ [
+ __('Create Monitor Server'),
+ 'create_monitor_server',
+ false,
+ frm.doc.series === 'p',
+ ],
+ [
+ __('Create Log Server'),
+ 'create_log_server',
+ false,
+ frm.doc.series === 'e',
+ ],
+ [
+ __('Reboot with serial console'),
+ 'reboot_with_serial_console',
+ true,
+ frm.doc.status === 'Running' && frm.doc.cloud_provider === 'AWS EC2',
+ ],
+ ].forEach(([label, method, confirm, condition]) => {
+ if (typeof condition === 'undefined' || condition) {
+ frm.add_custom_button(
+ label,
+ () => {
+ if (confirm) {
+ frappe.confirm(
+ `Are you sure you want to ${label.toLowerCase()}?`,
+ () =>
+ frm.call(method).then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ } else {
+ frm.refresh();
+ }
+ }),
+ );
+ } else {
+ frm.call(method).then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ } else {
+ frm.refresh();
+ }
+ });
+ }
+ },
+ __('Actions'),
+ );
+ }
+ });
+ [
+ [
+ __('Resize'),
+ 'resize',
+ frm.doc.status == 'Stopped' ||
+ (frm.doc.cloud_provider == 'OCI' && frm.doc.status != 'Draft'),
+ ],
].forEach(([label, method, condition]) => {
if (typeof condition === 'undefined' || condition) {
+ let fields = [
+ {
+ fieldtype: 'Data',
+ label: 'Machine Type',
+ fieldname: 'machine_type',
+ reqd: 1,
+ },
+ ];
+ if (frm.doc.cloud_provider == 'Hetzner') {
+ fields.push({
+ fieldtype: 'Check',
+ label: 'Upgrade Disk ?',
+ fieldname: 'upgrade_disk',
+ default: 0,
+ });
+ }
frm.add_custom_button(
label,
() => {
- frm.call(method).then((r) => frm.refresh());
+ frappe.prompt(
+ fields,
+ ({ machine_type, upgrade_disk }) => {
+ frm
+ .call(method, {
+ machine_type,
+ upgrade_disk,
+ })
+ .then((r) => frm.refresh());
+ },
+ __('Resize Virtual Machine'),
+ );
},
__('Actions'),
);
}
});
- [[__('Resize'), 'resize', frm.doc.status == 'Stopped']].forEach(
- ([label, method]) => {
- if (typeof condition === 'undefined' || condition) {
- frm.add_custom_button(
- label,
- () => {
- frappe.prompt(
+ [
+ [
+ __('Update OCI Volume Performance'),
+ 'update_oci_volume_performance',
+ frm.doc.cloud_provider == 'OCI',
+ ],
+ ].forEach(([label, method, condition]) => {
+ if (typeof condition === 'undefined' || condition) {
+ frm.add_custom_button(
+ label,
+ () => {
+ frappe.prompt(
+ [
+ {
+ fieldtype: 'Int',
+ label: 'VPUs / GB',
+ fieldname: 'vpus',
+ reqd: 1,
+ default:
+ (frm.doc.volumes[0].iops / frm.doc.volumes[0].size - 45) /
+ 1.5,
+ },
+ ],
+ ({ vpus }) => {
+ frm
+ .call(method, {
+ vpus,
+ })
+ .then((r) => frm.refresh());
+ },
+ __('Update OCI Volume Performance'),
+ );
+ },
+ __('Actions'),
+ );
+ }
+ });
+ [
+ [
+ __('Convert to ARM'),
+ 'convert_to_arm',
+ frm.doc.cloud_provider == 'AWS EC2' && frm.doc.platform == 'x86_64',
+ ],
+ ].forEach(([label, method, condition]) => {
+ if (typeof condition === 'undefined' || condition) {
+ frm.add_custom_button(
+ label,
+ () => {
+ frappe.prompt(
+ [
+ {
+ fieldtype: 'Link',
+ label: 'Virtual Machine Image',
+ fieldname: 'virtual_machine_image',
+ options: 'Virtual Machine Image',
+ reqd: 1,
+ get_query: function () {
+ return {
+ filters: {
+ platform: 'arm64',
+ cluster: frm.doc.cluster,
+ status: 'Available',
+ series: frm.doc.series,
+ },
+ };
+ },
+ },
{
fieldtype: 'Data',
label: 'Machine Type',
fieldname: 'machine_type',
reqd: 1,
},
- ({ machine_type }) => {
- frm
- .call(method, {
- machine_type,
- })
- .then((r) => frm.refresh());
+ ],
+ ({ virtual_machine_image, machine_type }) => {
+ frm
+ .call(method, {
+ virtual_machine_image,
+ machine_type,
+ })
+ .then((r) => frm.refresh());
+ },
+ __(label),
+ );
+ },
+ __('Actions'),
+ );
+ }
+ });
+ if (frm.doc.platform == 'x86_64') {
+ frm.add_custom_button(
+ 'Convert to AMD',
+ () => {
+ frappe.prompt(
+ [
+ {
+ fieldtype: 'Link',
+ label: 'Virtual Machine Image',
+ fieldname: 'virtual_machine_image',
+ options: 'Virtual Machine Image',
+ reqd: 1,
+ get_query: function () {
+ return {
+ filters: {
+ platform: 'x86_64',
+ cluster: frm.doc.cluster,
+ status: 'Available',
+ series: frm.doc.series,
+ },
+ };
},
- __('Resize Virtual Machine'),
- );
+ },
+ {
+ fieldtype: 'Data',
+ label: 'Machine Type',
+ fieldname: 'machine_type',
+ reqd: 1,
+ },
+ ],
+ ({ virtual_machine_image, machine_type }) => {
+ frm
+ .call('convert_to_amd', {
+ virtual_machine_image,
+ machine_type,
+ })
+ .then((r) => frm.refresh());
},
- __('Actions'),
+ __('Convert to AMD'),
);
- }
- },
- );
- if (frm.doc.aws_instance_id) {
- frm.add_web_link(
- `https://${frm.doc.region}.console.aws.amazon.com/ec2/v2/home?region=${frm.doc.region}#InstanceDetails:instanceId=${frm.doc.aws_instance_id}`,
- __('Visit AWS Dashboard'),
+ },
+ __('Actions'),
+ );
+ }
+ if (frm.doc.status == 'Running') {
+ frm.add_custom_button(
+ __('Attach New Volume'),
+ () => {
+ frappe.prompt(
+ [
+ {
+ fieldtype: 'Int',
+ label: 'Size',
+ fieldname: 'size',
+ reqd: 1,
+ default: 10,
+ },
+ {
+ fieldtype: 'Int',
+ label: 'IOPS',
+ fieldname: 'iops',
+ reqd: 1,
+ default: 3000,
+ },
+ {
+ fieldtype: 'Int',
+ label: 'Throughput (MB/s)',
+ fieldname: 'throughput',
+ reqd: 1,
+ default: 125,
+ },
+ ],
+ ({ size, iops, throughput }) => {
+ frm
+ .call('attach_new_volume', {
+ size,
+ iops,
+ throughput,
+ })
+ .then((r) => frm.refresh());
+ },
+ __('Attach New Volume'),
+ );
+ },
+ __('Actions'),
);
+
+ frm.add_custom_button(
+ 'Attach Volume',
+ () => {
+ frappe.prompt(
+ [
+ {
+ fieldtype: 'Data',
+ label: 'Volume ID',
+ fieldname: 'volume_id',
+ reqd: 1,
+ },
+ {
+ fieldtype: 'Check',
+ label: 'Is Temporary Volume ?',
+ fieldname: 'is_temporary_volume',
+ default: 1,
+ },
+ ],
+ ({ volume_id, is_temporary_volume }) => {
+ frm
+ .call('attach_volume', {
+ volume_id,
+ is_temporary_volume,
+ })
+ .then((r) => frm.refresh());
+ },
+ __('Attach Volume'),
+ );
+ },
+ __('Actions'),
+ );
+ }
+ if (frm.doc.instance_id) {
+ if (frm.doc.cloud_provider === 'AWS EC2') {
+ frm.add_web_link(
+ `https://${frm.doc.region}.console.aws.amazon.com/ec2/v2/home?region=${frm.doc.region}#InstanceDetails:instanceId=${frm.doc.instance_id}`,
+ __('Visit AWS Dashboard'),
+ );
+ } else if (frm.doc.cloud_provider === 'OCI') {
+ frm.add_web_link(
+ `https://cloud.oracle.com/compute/instances/${frm.doc.instance_id}?region=${frm.doc.region}`,
+ __('Visit OCI Dashboard'),
+ );
+ }
}
},
});
+
+frappe.ui.form.on('Virtual Machine Volume', {
+ detach(frm, cdt, cdn) {
+ let row = frm.selected_doc;
+ frappe.confirm(
+ `Are you sure you want to detach volume ${row.volume_id}?`,
+ () =>
+ frm
+ .call('detach', { volume_id: row.volume_id })
+ .then((r) => frm.refresh()),
+ );
+ },
+ delete_volume(frm, cdt, cdn) {
+ let row = frm.selected_doc;
+ frappe.confirm(
+ `Are you sure you want to delete volume ${row.volume_id}?`,
+ () =>
+ frm
+ .call('delete_volume', { volume_id: row.volume_id })
+ .then((r) => frm.refresh()),
+ );
+ },
+ increase_disk_size(frm, cdt, cdn) {
+ let row = frm.selected_doc;
+ frappe.prompt(
+ {
+ fieldtype: 'Int',
+ label: 'Increment (GB)',
+ fieldname: 'increment',
+ reqd: 1,
+ },
+ ({ increment }) => {
+ frm
+ .call('increase_disk_size', {
+ volume_id: row.volume_id,
+ increment,
+ })
+ .then((r) => frm.refresh());
+ },
+ __('Increase Disk Size'),
+ );
+ },
+ update_ebs_performance(frm, cdt, cdn) {
+ let row = frm.selected_doc;
+ frappe.prompt(
+ [
+ {
+ fieldtype: 'Int',
+ label: 'IOPS',
+ fieldname: 'iops',
+ reqd: 1,
+ default: row.iops,
+ },
+ {
+ fieldtype: 'Int',
+ label: 'Throughput (MB/s)',
+ fieldname: 'throughput',
+ reqd: 1,
+ default: row.throughput,
+ },
+ ],
+ ({ iops, throughput }) => {
+ frm
+ .call('update_ebs_performance', {
+ volume_id: row.volume_id,
+ iops,
+ throughput,
+ })
+ .then((r) => frm.refresh());
+ },
+ __('Update EBS Performance'),
+ );
+ },
+});
diff --git a/press/press/doctype/virtual_machine/virtual_machine.json b/press/press/doctype/virtual_machine/virtual_machine.json
index 1f0b5787579..977cc5b4a97 100644
--- a/press/press/doctype/virtual_machine/virtual_machine.json
+++ b/press/press/doctype/virtual_machine/virtual_machine.json
@@ -20,26 +20,43 @@
"index",
"section_break_5",
"machine_type",
+ "instance_id",
+ "platform",
+ "kms_key_id",
+ "ssh_key",
+ "ready_for_conversion",
+ "column_break_hgcr",
"disk_size",
- "aws_instance_id",
+ "root_disk_size",
+ "vcpu",
+ "ram",
"column_break_8",
"virtual_machine_image",
"machine_image",
- "ssh_key",
+ "data_disk_snapshot",
+ "data_disk_snapshot_volume_id",
+ "data_disk_snapshot_attached",
"networking_section",
- "aws_subnet_id",
+ "subnet_id",
"private_ip_address",
"public_ip_address",
+ "is_static_ip",
"column_break_15",
"subnet_cidr_block",
"public_dns_name",
"private_dns_name",
"security_section",
- "aws_security_group_id",
+ "security_group_id",
"column_break_18",
"termination_protection",
"volumes_section",
- "volumes"
+ "has_data_volume",
+ "volumes",
+ "temporary_volumes",
+ "snapshots_section",
+ "skip_automated_snapshot",
+ "column_break_zozh",
+ "disable_server_snapshot"
],
"fields": [
{
@@ -48,31 +65,32 @@
"fieldtype": "Select",
"in_list_view": 1,
"label": "Cloud Provider",
- "options": "\nAWS EC2",
+ "options": "\nAWS EC2\nOCI\nHetzner\nDigitalOcean",
"read_only": 1,
"reqd": 1
},
{
"fetch_from": "cluster.region",
"fieldname": "region",
- "fieldtype": "Data",
+ "fieldtype": "Link",
"label": "Region",
- "read_only": 1,
- "reqd": 1
+ "options": "Cloud Region",
+ "read_only": 1
},
{
"fetch_from": "cluster.availability_zone",
"fieldname": "availability_zone",
"fieldtype": "Data",
"label": "Availability Zone",
- "read_only": 1,
- "reqd": 1
+ "read_only": 1
},
{
"fieldname": "cluster",
"fieldtype": "Link",
+ "in_standard_filter": 1,
"label": "Cluster",
"options": "Cluster",
+ "reqd": 1,
"set_only_once": 1
},
{
@@ -88,38 +106,25 @@
"fieldtype": "Column Break"
},
{
- "fetch_from": "virtual_machine_image.aws_ami_id",
+ "fetch_from": "virtual_machine_image.image_id",
"fetch_if_empty": 1,
"fieldname": "machine_image",
"fieldtype": "Data",
- "label": "Machine Image",
- "set_only_once": 1
+ "label": "Machine Image"
},
{
"fieldname": "machine_type",
"fieldtype": "Data",
"in_list_view": 1,
+ "in_standard_filter": 1,
"label": "Machine Type",
"reqd": 1
},
- {
- "fieldname": "aws_instance_id",
- "fieldtype": "Data",
- "label": "AWS Instance ID",
- "read_only": 1
- },
{
"fieldname": "networking_section",
"fieldtype": "Section Break",
"label": "Networking"
},
- {
- "fetch_from": "cluster.aws_subnet_id",
- "fieldname": "aws_subnet_id",
- "fieldtype": "Data",
- "label": "AWS Subnet ID",
- "read_only": 1
- },
{
"fetch_from": "cluster.subnet_cidr_block",
"fieldname": "subnet_cidr_block",
@@ -151,13 +156,6 @@
"fieldname": "column_break_18",
"fieldtype": "Column Break"
},
- {
- "fetch_from": "cluster.aws_security_group_id",
- "fieldname": "aws_security_group_id",
- "fieldtype": "Data",
- "label": "AWS Security Group ID",
- "read_only": 1
- },
{
"fieldname": "public_ip_address",
"fieldtype": "Data",
@@ -185,6 +183,7 @@
"fieldname": "status",
"fieldtype": "Select",
"in_list_view": 1,
+ "in_standard_filter": 1,
"label": "Status",
"options": "Draft\nPending\nRunning\nStopped\nTerminated",
"read_only": 1,
@@ -207,14 +206,15 @@
"fieldtype": "Table",
"label": "Volumes",
"options": "Virtual Machine Volume",
- "read_only": 1
+ "read_only_depends_on": "eval: doc.volumes.length > 0"
},
{
"fieldname": "virtual_machine_image",
"fieldtype": "Link",
"label": "Virtual Machine Image",
+ "link_filters": "[[\"Virtual Machine Image\",\"status\",\"=\",\"Available\"]]",
"options": "Virtual Machine Image",
- "set_only_once": 1
+ "read_only_depends_on": "eval: doc.virtual_machine_image"
},
{
"default": "0",
@@ -249,7 +249,7 @@
"fieldname": "series",
"fieldtype": "Select",
"label": "Series",
- "options": "n\nf\nm",
+ "options": "n\nf\nm\nc\np\ne\nr\nu\nt\nnfs\nfs",
"reqd": 1
},
{
@@ -258,6 +258,132 @@
"label": "Domain",
"options": "Root Domain",
"reqd": 1
+ },
+ {
+ "fieldname": "column_break_hgcr",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "ram",
+ "fieldtype": "Int",
+ "label": "RAM (MB)",
+ "read_only": 1
+ },
+ {
+ "fieldname": "vcpu",
+ "fieldtype": "Int",
+ "label": "vCPU",
+ "read_only": 1
+ },
+ {
+ "fetch_from": "cluster.subnet_id",
+ "fieldname": "subnet_id",
+ "fieldtype": "Data",
+ "label": "Subnet ID",
+ "read_only": 1
+ },
+ {
+ "fetch_from": "cluster.security_group_id",
+ "fieldname": "security_group_id",
+ "fieldtype": "Data",
+ "label": "Security Group ID",
+ "read_only": 1
+ },
+ {
+ "fieldname": "instance_id",
+ "fieldtype": "Data",
+ "in_standard_filter": 1,
+ "label": "Instance ID",
+ "read_only": 1
+ },
+ {
+ "fieldname": "platform",
+ "fieldtype": "Select",
+ "label": "Platform",
+ "options": "x86_64\narm64",
+ "reqd": 1
+ },
+ {
+ "default": "8",
+ "fieldname": "root_disk_size",
+ "fieldtype": "Int",
+ "label": "Root Disk Size",
+ "reqd": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "has_data_volume",
+ "fieldtype": "Check",
+ "label": "Has Data Volume"
+ },
+ {
+ "description": "Data volumes that have been temporarily attached for tasks such as Physical Backup Restoration.",
+ "fieldname": "temporary_volumes",
+ "fieldtype": "Table",
+ "label": "Temporary Volumes",
+ "options": "Virtual Machine Temporary Volume"
+ },
+ {
+ "fieldname": "snapshots_section",
+ "fieldtype": "Section Break",
+ "label": "Snapshots"
+ },
+ {
+ "default": "0",
+ "fieldname": "skip_automated_snapshot",
+ "fieldtype": "Check",
+ "label": "Skip Automated Snapshot"
+ },
+ {
+ "default": "0",
+ "fieldname": "ready_for_conversion",
+ "fieldtype": "Check",
+ "label": "Ready For Conversion",
+ "read_only": 1
+ },
+ {
+ "fieldname": "kms_key_id",
+ "fieldtype": "Data",
+ "label": "KMS Key ID"
+ },
+ {
+ "fieldname": "data_disk_snapshot",
+ "fieldtype": "Link",
+ "label": "Data Disk Snapshot",
+ "options": "Virtual Disk Snapshot"
+ },
+ {
+ "depends_on": "eval: doc.data_disk_snapshot",
+ "fieldname": "data_disk_snapshot_volume_id",
+ "fieldtype": "Data",
+ "label": "Data Disk Snapshot Volume ID",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "depends_on": "eval: doc.data_disk_snapshot",
+ "fieldname": "data_disk_snapshot_attached",
+ "fieldtype": "Check",
+ "label": "Data Disk Snapshot Attached",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_zozh",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "description": "If you disable server snapshot, it will just take disk snapshot",
+ "fieldname": "disable_server_snapshot",
+ "fieldtype": "Check",
+ "label": "Disable Server Snapshot"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_static_ip",
+ "fieldtype": "Check",
+ "label": "Is Static IP",
+ "read_only": 1
}
],
"index_web_pages_for_search": 1,
@@ -277,6 +403,11 @@
"link_doctype": "Proxy Server",
"link_fieldname": "virtual_machine"
},
+ {
+ "group": "Servers",
+ "link_doctype": "NFS Server",
+ "link_fieldname": "virtual_machine"
+ },
{
"group": "Snapshots",
"link_doctype": "Virtual Disk Snapshot",
@@ -286,9 +417,14 @@
"group": "Snapshots",
"link_doctype": "Virtual Machine Image",
"link_fieldname": "virtual_machine"
+ },
+ {
+ "group": "Migration",
+ "link_doctype": "Virtual Machine Migration",
+ "link_fieldname": "virtual_machine"
}
],
- "modified": "2023-04-01 14:37:17.417288",
+ "modified": "2026-01-18 13:54:28.206523",
"modified_by": "Administrator",
"module": "Press",
"name": "Virtual Machine",
@@ -319,8 +455,10 @@
"write": 1
}
],
+ "row_format": "Dynamic",
+ "rows_threshold_for_grid_search": 20,
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/virtual_machine/virtual_machine.py b/press/press/doctype/virtual_machine/virtual_machine.py
index 13b37171238..3befdf48fd8 100644
--- a/press/press/doctype/virtual_machine/virtual_machine.py
+++ b/press/press/doctype/virtual_machine/virtual_machine.py
@@ -1,54 +1,589 @@
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
-import frappe
+import base64
import ipaddress
+import time
+import typing
+
import boto3
-from frappe.model.document import Document
+import botocore
+import frappe
+import pydo
+import rq
from frappe.core.utils import find
-from frappe.model.naming import make_autoname
from frappe.desk.utils import slug
+from frappe.model.document import Document
+from frappe.model.naming import make_autoname
+from frappe.utils import cint
+from frappe.utils.password import get_decrypted_password
+from hcloud import APIException
+from hcloud import Client as HetznerClient
+from hcloud.servers.domain import ServerCreatePublicNetwork
+from oci import pagination as oci_pagination
+from oci.core import BlockstorageClient, ComputeClient, VirtualNetworkClient
+from oci.core.models import (
+ CreateBootVolumeBackupDetails,
+ CreateVnicDetails,
+ CreateVolumeBackupDetails,
+ InstanceOptions,
+ InstanceSourceViaImageDetails,
+ LaunchInstanceDetails,
+ LaunchInstancePlatformConfig,
+ LaunchInstanceShapeConfigDetails,
+ UpdateBootVolumeDetails,
+ UpdateInstanceDetails,
+ UpdateInstanceShapeConfigDetails,
+ UpdateVolumeDetails,
+)
+from oci.exceptions import TransientServiceError
+
from press.overrides import get_permission_query_conditions_for_doctype
+from press.press.doctype.server_activity.server_activity import log_server_activity
from press.utils import log_error
+from press.utils.jobs import has_job_timeout_exceeded
+
+if typing.TYPE_CHECKING:
+ from press.infrastructure.doctype.virtual_machine_migration.virtual_machine_migration import (
+ VirtualMachineMigration,
+ )
+ from press.press.doctype.cluster.cluster import Cluster
+ from press.press.doctype.database_server.database_server import DatabaseServer
+ from press.press.doctype.log_server.log_server import LogServer
+ from press.press.doctype.monitor_server.monitor_server import MonitorServer
+ from press.press.doctype.proxy_server.proxy_server import ProxyServer
+ from press.press.doctype.server.server import Server
+ from press.press.doctype.virtual_disk_snapshot.virtual_disk_snapshot import VirtualDiskSnapshot
+ from press.press.doctype.virtual_machine_image.virtual_machine_image import VirtualMachineImage
+
+
+server_doctypes = [
+ "Server",
+ "Database Server",
+ "Proxy Server",
+ "Monitor Server",
+ "Log Server",
+ "NFS Server",
+]
+
+HETZNER_ROOT_DISK_ID = "hetzner-root-disk"
+DIGITALOCEAN_ROOT_DISK_ID = "digital-ocean-root-disk"
+HETZNER_ACTION_RETRIES = 60 # retry count; try to keep it lower so that it doesn't surpass than default RQ job timeout of 300 seconds
class VirtualMachine(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.virtual_machine_temporary_volume.virtual_machine_temporary_volume import (
+ VirtualMachineTemporaryVolume,
+ )
+ from press.press.doctype.virtual_machine_volume.virtual_machine_volume import VirtualMachineVolume
+
+ availability_zone: DF.Data
+ cloud_provider: DF.Literal["", "AWS EC2", "OCI", "Hetzner", "DigitalOcean"]
+ cluster: DF.Link
+ data_disk_snapshot: DF.Link | None
+ data_disk_snapshot_attached: DF.Check
+ data_disk_snapshot_volume_id: DF.Data | None
+ disable_server_snapshot: DF.Check
+ disk_size: DF.Int
+ domain: DF.Link
+ has_data_volume: DF.Check
+ index: DF.Int
+ instance_id: DF.Data | None
+ is_static_ip: DF.Check
+ kms_key_id: DF.Data | None
+ machine_image: DF.Data | None
+ machine_type: DF.Data
+ platform: DF.Literal["x86_64", "arm64"]
+ private_dns_name: DF.Data | None
+ private_ip_address: DF.Data | None
+ public_dns_name: DF.Data | None
+ public_ip_address: DF.Data | None
+ ram: DF.Int
+ ready_for_conversion: DF.Check
+ region: DF.Link
+ root_disk_size: DF.Int
+ security_group_id: DF.Data | None
+ series: DF.Literal["n", "f", "m", "c", "p", "e", "r", "u", "t", "nfs", "fs"]
+ skip_automated_snapshot: DF.Check
+ ssh_key: DF.Link
+ status: DF.Literal["Draft", "Pending", "Running", "Stopped", "Terminated"]
+ subnet_cidr_block: DF.Data | None
+ subnet_id: DF.Data | None
+ team: DF.Link | None
+ temporary_volumes: DF.Table[VirtualMachineTemporaryVolume]
+ termination_protection: DF.Check
+ vcpu: DF.Int
+ virtual_machine_image: DF.Link | None
+ volumes: DF.Table[VirtualMachineVolume]
+ # end: auto-generated types
+
+ @property
+ def is_database_server(self) -> bool:
+ if self.series == "m":
+ return True
+
+ return frappe.db.exists("Database Server", {"virtual_machine": self.name})
+
def autoname(self):
series = f"{self.series}-{slug(self.cluster)}.#####"
self.index = int(make_autoname(series)[-5:])
self.name = f"{self.series}{self.index}-{slug(self.cluster)}.{self.domain}"
- def validate(self):
+ def after_insert(self):
+ if self.virtual_machine_image:
+ image: VirtualMachineImage = frappe.get_doc("Virtual Machine Image", self.virtual_machine_image)
+ if image.has_data_volume:
+ # We have two separate volumes for root and data
+ # Copy their sizes correctly
+ self.disk_size = max(self.disk_size, image.size)
+ self.root_disk_size = max(self.root_disk_size, image.root_size)
+ self.has_data_volume = True
+ else:
+ # We have only one volume. Both root and data are the same
+ self.disk_size = max(self.disk_size, image.size)
+ self.root_disk_size = self.disk_size
+ self.has_data_volume = False
+
+ self.machine_image = image.image_id
+
+ # If data disk snapshot is provided, that will attach as second disk
+ # Regardless of VMI supporting data disk or not
+ if self.data_disk_snapshot:
+ self.has_data_volume = True
+ self.root_disk_size = image.root_size
+ self.disk_size = max(
+ self.disk_size,
+ frappe.db.get_value("Virtual Disk Snapshot", self.data_disk_snapshot, "size"),
+ )
+
if not self.machine_image:
self.machine_image = self.get_latest_ubuntu_image()
- if not self.private_ip_address:
- ip = ipaddress.IPv4Interface(self.subnet_cidr_block).ip
- index = self.index + 356
- if self.series == "n":
- self.private_ip_address = str(ip + index)
- else:
- offset = ["f", "m"].index(self.series)
- self.private_ip_address = str(
- ip + 256 * (2 * (index // 256) + offset) + (index % 256)
+ self.save()
+
+ def get_private_ip(self):
+ ip = ipaddress.IPv4Interface(self.subnet_cidr_block).ip
+ index = self.index + 356
+ if self.series == "n":
+ return str(ip + index)
+ offset = ["n", "f", "m", "c", "p", "e", "r", "u", "t", "nfs", "fs"].index(self.series)
+ return str(ip + 256 * (2 * (index // 256) + offset) + (index % 256))
+
+ def validate(self):
+ # Digital ocean does not support custom private IPs in a vpc
+ if not self.private_ip_address and self.cloud_provider != "DigitalOcean":
+ self.private_ip_address = self.get_private_ip()
+
+ self.validate_data_disk_snapshot()
+
+ def validate_data_disk_snapshot(self):
+ if not self.is_new() or not self.data_disk_snapshot:
+ return
+
+ if self.cloud_provider != "AWS EC2":
+ frappe.throw("Server Creation with Data Disk Snapshot is only supported on AWS EC2.")
+
+ # Ensure the disk snapshot is Completed
+ snapshot: VirtualDiskSnapshot = frappe.get_doc("Virtual Disk Snapshot", self.data_disk_snapshot)
+ if snapshot.status != "Completed":
+ frappe.throw("Disk Snapshot is not available.")
+
+ if snapshot.region != frappe.get_value("Cluster", self.cluster, "region"):
+ frappe.throw("Disk Snapshot is not available in the same region as the cluster")
+
+ if not self.virtual_machine_image:
+ frappe.throw("Virtual Machine Image is required to create a VM with Data Disk Snapshot")
+
+ def on_trash(self):
+ snapshots = frappe.get_all(
+ "Virtual Disk Snapshot",
+ {"virtual_machine": self.name, "status": "Unavailable"},
+ pluck="name",
+ )
+ for snapshot in snapshots:
+ frappe.delete_doc("Virtual Disk Snapshot", snapshot)
+
+ images = frappe.get_all(
+ "Virtual Machine Image",
+ {"virtual_machine": self.name, "status": "Unavailable"},
+ pluck="name",
+ )
+ for image in images:
+ frappe.delete_doc("Virtual Machine Image", image)
+
+ def on_update(self):
+ server = self.get_server()
+
+ if self.has_value_changed("has_data_volume") and server:
+ server.has_data_volume = self.has_data_volume
+ server.save()
+
+ if self.has_value_changed("disk_size") and self.should_bill_addon_storage():
+ self.update_subscription_for_addon_storage()
+
+ def check_and_attach_data_disk_snapshot_volume(self):
+ if not self.data_disk_snapshot_volume_id:
+ frappe.throw("Data Disk Snapshot Volume ID is not set.")
+
+ volume_state = self.get_state_of_volume(self.data_disk_snapshot_volume_id)
+ if volume_state == "available":
+ self.attach_volume(self.data_disk_snapshot_volume_id)
+ self.data_disk_snapshot_attached = True
+ self.status = "Pending"
+ self.save()
+ return True
+
+ if volume_state == "deleted":
+ self.data_disk_snapshot_volume_id = None
+
+ self.status = "Pending"
+ self.save()
+ return False
+
+ def ensure_no_data_disk_attached_before_attaching_snapshot_disk(self): # noqa: C901
+ """
+ returns status: bool
+ - True, if parent function should assume this function has did it's part
+ - False, parent function should call it again
+
+ """
+ if (
+ not self.data_disk_snapshot # vm doesn't have dependency on disk snapshot, so no point of dont this check
+ # These two below checks are there to prevent
+ # Any accidental call to this function
+ or self.data_disk_snapshot_volume_id # volume from snapshot has been created
+ or self.data_disk_snapshot_attached # data disk attached already
+ ):
+ """
+ Sanity Check
+
+ In dual disk (root + data) VMIs, we can't create the machine with the root disk only
+
+ So, once the VM spawned the first task is to detach and delete the extra disk
+ Once, that's done we can move ahead.
+
+ As it dealing with disk deletion, this check serve as a safeguard.
+
+ !!NOTE!! : Don't remove until unless we have stricter check somewhere else
+ """
+ return
+
+ if len(self.volumes) == 0:
+ frappe.throw("Sync the VM before checking data disk for snapshot recovery")
+
+ if len(self.volumes) == 1:
+ return
+
+ # For more volumes, found out other volume ids
+ additional_volume_ids = []
+ for volume_id in self.volumes:
+ if volume_id.device in ["/dev/xvda1", "/dev/sda1"]:
+ continue
+ if volume_id.volume_id == self.data_disk_snapshot_volume_id:
+ continue
+ additional_volume_ids.append(volume_id.volume_id)
+
+ for volume_id in additional_volume_ids:
+ # Don't do syncing multiple times
+ self.delete_volume(volume_id, sync=False)
+
+ if len(additional_volume_ids):
+ self.sync()
+
+ def create_data_disk_volume_from_snapshot(self):
+ try:
+ self.ensure_no_data_disk_attached_before_attaching_snapshot_disk()
+ datadisk_snapshot: VirtualDiskSnapshot = frappe.get_doc(
+ "Virtual Disk Snapshot", self.data_disk_snapshot
+ )
+ snapshot_volume = datadisk_snapshot.create_volume(
+ availability_zone=self.availability_zone, volume_initialization_rate=300, size=self.disk_size
+ )
+ self.data_disk_snapshot_volume_id = snapshot_volume
+ self.status = "Pending"
+ self.save()
+ return True
+ except Exception:
+ log_error(
+ title="VM Data Disk Snapshot Volume Creation Failed",
+ )
+ if not self.data_disk_snapshot_volume_id:
+ return False
+ # If it fails for any reason, try to delete the volume
+ try:
+ self.delete_volume(self.data_disk_snapshot_volume_id)
+ except: # noqa: E722
+ log_error(
+ title="VM Data Disk Snapshot Volume Cleanup Failed",
)
- if self.virtual_machine_image:
- self.disk_size = max(
- self.disk_size,
- frappe.db.get_value("Virtual Machine Image", self.virtual_machine_image, "size"),
+ return False
+
+ def should_bill_addon_storage(self):
+ """Check if storage addition should create/update subscription record"""
+ # Increasing data volume regardless of auto or manual increment
+ if not self.has_data_volume:
+ return True
+
+ if self.has_data_volume and not self.has_value_changed("root_disk_size"):
+ return True
+
+ return False
+
+ def _handle_updated_addon_storage(self, server: Server, increment: int) -> None:
+ if frappe.db.exists(
+ "Subscription",
+ {"document_name": server.name, "team": server.team, "plan_type": "Server Storage Plan"},
+ ):
+ # update the existing subscription
+ frappe.db.set_value(
+ "Subscription",
+ {
+ "document_name": server.name,
+ "team": server.team,
+ "plan_type": "Server Storage Plan",
+ },
+ {
+ "additional_storage": increment,
+ "enabled": 1,
+ },
+ )
+ else:
+ # create a new subscription
+ frappe.get_doc(
+ doctype="Subscription",
+ team=server.team,
+ plan_type="Server Storage Plan",
+ plan="Add-on Storage plan",
+ document_type=server.doctype,
+ document_name=server.name,
+ additional_storage=increment,
+ enabled=1,
+ ).insert()
+
+ def _plan_change_addon_storage(self, server: Server) -> None:
+ if frappe.db.exists(
+ "Subscription",
+ {"document_name": server.name, "team": server.team, "plan_type": "Server Storage Plan"},
+ ):
+ frappe.db.set_value(
+ "Subscription",
+ {
+ "document_name": server.name,
+ "team": server.team,
+ "plan_type": "Server Storage Plan",
+ },
+ "enabled",
+ 0,
)
+ def update_subscription_for_addon_storage(self):
+ """Update subscription record"""
+ server = self.get_server()
+
+ if not server or server.doctype == "NFS Server":
+ return
+
+ server_plan_size = frappe.db.get_value("Server Plan", server.plan, "disk")
+
+ if server_plan_size and self.disk_size > server_plan_size:
+ # Add on storage was added or updated
+ increment = self.disk_size - server_plan_size
+ self._handle_updated_addon_storage(server, increment)
+ elif self.disk_size == server_plan_size:
+ # Server was upgraded or downgraded from plan change
+ # Remove the existing add-on storage subscription
+ self._plan_change_addon_storage(server)
+ return
+
@frappe.whitelist()
def provision(self):
- options = {
- "BlockDeviceMappings": [
+ if self.cloud_provider == "AWS EC2":
+ return self._provision_aws()
+ if self.cloud_provider == "OCI":
+ return self._provision_oci()
+ if self.cloud_provider == "Hetzner":
+ return self._provision_hetzner()
+ if self.cloud_provider == "DigitalOcean":
+ return self._provision_digital_ocean()
+
+ return None
+
+ def _get_digital_ocean_ssh_key_id(self) -> int:
+ """Get digital ocean ssh key id"""
+ keys = self.client().ssh_keys.list()
+ keys = keys.get("ssh_keys", [])
+ existing_key = [key for key in keys if key["name"] == self.ssh_key]
+ if not existing_key:
+ frappe.throw(f"No SSH Key found on Digital Ocean with the name {self.ssh_key}")
+
+ return existing_key[0]["id"]
+
+ def _provision_digital_ocean(self):
+ """Provision a Digital Ocean Droplet"""
+ if not self.machine_image:
+ frappe.throw("Machine Image is required to provision Hetzner Virtual Machine.")
+
+ cluster: Cluster = frappe.get_doc("Cluster", self.cluster)
+
+ firewalls = self.client().firewalls.list()
+ firewalls = firewalls.get("firewalls", [])
+ cluster_firewall = next(fw for fw in firewalls if fw["id"] == cluster.security_group_id)
+
+ if cluster_firewall["status"] == "failed":
+ frappe.throw(f"Firewall with id {cluster.security_group_id} failed")
+
+ try:
+ droplet = self.client().droplets.create(
{
- "DeviceName": "/dev/sda1",
+ "name": self.name,
+ "region": cluster.region,
+ "size": self.machine_type,
+ "image": self.machine_image,
+ "ssh_keys": [self._get_digital_ocean_ssh_key_id()],
+ "backups": False,
+ "vpc_uuid": cluster.vpc_id,
+ "user_data": self.get_cloud_init() if self.virtual_machine_image else "",
+ }
+ )
+ self.instance_id = droplet["droplet"]["id"]
+ except Exception as e:
+ frappe.throw(f"Failed to provision Digital Ocean Droplet: {e!s}")
+
+ try:
+ for group in self.get_security_groups():
+ self.client().firewalls.assign_droplets(group, {"droplet_ids": [self.instance_id]})
+ except Exception as e:
+ frappe.throw(f"Failed to assign Firewall to Digital Ocean Droplet: {e!s}")
+
+ self.status = self.get_digital_ocean_status_map()[droplet["droplet"]["status"]]
+ self.save()
+ frappe.db.commit()
+
+ def _provision_hetzner(self):
+ from hcloud.firewalls.domain import Firewall
+ from hcloud.images.domain import Image
+ from hcloud.locations.domain import Location
+ from hcloud.networks.domain import Network
+ from hcloud.server_types.domain import ServerType
+ from hcloud.ssh_keys.domain import SSHKey
+
+ if not self.machine_image:
+ frappe.throw("Machine Image is required to provision Hetzner Virtual Machine.")
+
+ cluster = frappe.get_doc("Cluster", self.cluster)
+
+ server = (
+ self.client()
+ .servers.create(
+ name=self.name,
+ server_type=ServerType(name=self.machine_type),
+ image=Image(cint(self.machine_image)),
+ networks=[], # Don't attach to any network during creation
+ firewalls=[
+ Firewall(id=cint(security_group_id)) for security_group_id in self.get_security_groups()
+ ],
+ location=Location(name=cluster.region),
+ public_net=ServerCreatePublicNetwork(
+ enable_ipv4=True,
+ enable_ipv6=False,
+ ),
+ ssh_keys=[
+ SSHKey(name=self.ssh_key),
+ ],
+ user_data=self.get_cloud_init() if self.virtual_machine_image else "",
+ )
+ .server
+ )
+ self.instance_id = server.id
+ self.save()
+ # To ensure, we don't lose state, because machine has been created at this point
+ frappe.db.commit()
+
+ # Attach Server to Private Network
+ # Because, this allows us to provide the required private IP during network attachment
+ self.client().servers.attach_to_network(
+ server=server,
+ network=Network(id=cint(cluster.vpc_id)),
+ ip=self.private_ip_address,
+ ).wait_until_finished(HETZNER_ACTION_RETRIES)
+
+ self.status = self.get_hetzner_status_map()[server.status]
+ self.save()
+
+ # Enqueue enable protection separately to avoid any issue
+ frappe.enqueue_doc(self.doctype, self.name, "enable_termination_protection", sync=False)
+
+ def _provision_aws(self): # noqa: C901
+ additional_volumes = []
+ if self.virtual_machine_image:
+ image = frappe.get_doc("Virtual Machine Image", self.virtual_machine_image)
+ if image.has_data_volume:
+ volume = image.get_data_volume()
+ data = {
+ "DeviceName": volume.device,
"Ebs": {
"DeleteOnTermination": True,
- "VolumeSize": self.disk_size, # This in GB. Fucking AWS!
- "VolumeType": "gp3",
+ "VolumeSize": max(self.disk_size, volume.size),
+ "VolumeType": volume.volume_type,
},
+ }
+ if self.kms_key_id:
+ data["Ebs"]["Encrypted"] = True
+ data["Ebs"]["KmsKeyId"] = self.kms_key_id
+
+ additional_volumes.append(data)
+
+ for index, volume in enumerate(self.volumes, start=len(additional_volumes)):
+ device_name_index = chr(ord("f") + index)
+ volume_options = {
+ "DeviceName": f"/dev/sd{device_name_index}",
+ "Ebs": {
+ "DeleteOnTermination": True,
+ "VolumeSize": volume.size,
+ "VolumeType": volume.volume_type,
},
+ }
+ if volume.iops:
+ volume_options["Ebs"]["Iops"] = volume.iops
+ if volume.throughput:
+ volume_options["Ebs"]["Throughput"] = volume.throughput
+ if self.kms_key_id:
+ volume_options["Ebs"]["Encrypted"] = True
+ volume_options["Ebs"]["KmsKeyId"] = self.kms_key_id
+ additional_volumes.append(volume_options)
+
+ if self.data_disk_snapshot:
+ additional_volumes = [] # Don't attach any additional volumes if we are attaching a data disk snapshot
+
+ if not self.machine_image:
+ self.machine_image = self.get_latest_ubuntu_image()
+ self.save(ignore_version=True)
+
+ root_disk_data = {
+ "DeviceName": "/dev/sda1",
+ "Ebs": {
+ "DeleteOnTermination": True,
+ "VolumeSize": self.root_disk_size, # This in GB. Fucking AWS!
+ "VolumeType": "gp3",
+ },
+ }
+
+ if self.kms_key_id:
+ root_disk_data["Ebs"]["Encrypted"] = True
+ root_disk_data["Ebs"]["KmsKeyId"] = self.kms_key_id
+
+ options = {
+ "BlockDeviceMappings": [
+ *[root_disk_data],
+ *additional_volumes,
],
"ImageId": self.machine_image,
"InstanceType": self.machine_type,
@@ -56,7 +591,10 @@ def provision(self):
"MaxCount": 1,
"MinCount": 1,
"Monitoring": {"Enabled": False},
- "Placement": {"AvailabilityZone": self.availability_zone, "Tenancy": "default"},
+ "Placement": {
+ "AvailabilityZone": self.availability_zone,
+ "Tenancy": "default",
+ },
"NetworkInterfaces": [
{
"AssociatePublicIpAddress": True,
@@ -64,7 +602,7 @@ def provision(self):
"DeviceIndex": 0,
"PrivateIpAddress": self.private_ip_address,
"Groups": self.get_security_groups(),
- "SubnetId": self.aws_subnet_id,
+ "SubnetId": self.subnet_id,
},
],
"DisableApiTermination": True,
@@ -78,17 +616,88 @@ def provision(self):
"UserData": self.get_cloud_init() if self.virtual_machine_image else "",
}
if self.machine_type.startswith("t"):
- options["CreditSpecification"] = {
- "CpuCredits": "unlimited" if self.series == "n" else "standard"
- }
+ options["CreditSpecification"] = {"CpuCredits": "unlimited" if self.series == "n" else "standard"}
response = self.client().run_instances(**options)
- self.aws_instance_id = response["Instances"][0]["InstanceId"]
- self.status = self.get_status_map()[response["Instances"][0]["State"]["Name"]]
+ self.instance_id = response["Instances"][0]["InstanceId"]
+ self.status = self.get_aws_status_map()[response["Instances"][0]["State"]["Name"]]
+ self.save()
+
+ def _provision_oci(self):
+ cluster = frappe.get_doc("Cluster", self.cluster)
+ # OCI doesn't have machine types. So let's make up our own.
+ # nxm = n vcpus and m GB ram
+ vcpu, ram_in_gbs = map(int, self.machine_type.split("x"))
+ instance = (
+ self.client()
+ .launch_instance(
+ LaunchInstanceDetails(
+ compartment_id=cluster.oci_tenancy,
+ availability_domain=self.availability_zone,
+ display_name=self.name,
+ create_vnic_details=CreateVnicDetails(
+ private_ip=self.private_ip_address,
+ assign_private_dns_record=True,
+ nsg_ids=self.get_security_groups(),
+ ),
+ subnet_id=self.subnet_id,
+ instance_options=InstanceOptions(are_legacy_imds_endpoints_disabled=True),
+ source_details=InstanceSourceViaImageDetails(
+ image_id=self.machine_image,
+ boot_volume_size_in_gbs=max(self.root_disk_size, 50),
+ boot_volume_vpus_per_gb=30,
+ ),
+ shape="VM.Standard.E4.Flex",
+ shape_config=LaunchInstanceShapeConfigDetails(
+ ocpus=vcpu // 2, vcpus=vcpu, memory_in_gbs=ram_in_gbs
+ ),
+ platform_config=LaunchInstancePlatformConfig(
+ type="AMD_VM",
+ ),
+ is_pv_encryption_in_transit_enabled=True,
+ metadata={
+ "ssh_authorized_keys": frappe.db.get_value("SSH Key", self.ssh_key, "public_key"),
+ "user_data": (
+ base64.b64encode(self.get_cloud_init().encode()).decode()
+ if self.virtual_machine_image
+ else ""
+ ),
+ },
+ )
+ )
+ .data
+ )
+ self.instance_id = instance.id
+ self.status = self.get_oci_status_map()[instance.lifecycle_state]
self.save()
+ def get_mariadb_context(
+ self, server: Server | DatabaseServer, memory: int
+ ) -> dict[str, str | int] | None:
+ if server.doctype == "Database Server":
+ return {
+ "server_id": server.server_id,
+ "private_ip": self.private_ip_address or "__PRIVATE_IP__",
+ "ansible_memtotal_mb": memory,
+ "mariadb_root_password": server.get_password("mariadb_root_password"),
+ "db_port": server.db_port or 3306,
+ }
+ if server.doctype == "Server" and server.is_unified_server:
+ database_server: DatabaseServer = frappe.get_doc("Database Server", server.database_server)
+ return {
+ "server_id": database_server.server_id,
+ "private_ip": self.private_ip_address or "__PRIVATE_IP__",
+ "ansible_memtotal_mb": memory,
+ "mariadb_root_password": database_server.get_password("mariadb_root_password"),
+ "db_port": database_server.db_port or 3306,
+ }
+
+ return None
+
def get_cloud_init(self):
server = self.get_server()
+ if not server:
+ return ""
log_server, kibana_password = server.get_log_server()
cloud_init_template = "press/press/doctype/virtual_machine/cloud-init.yml.jinja2"
context = {
@@ -99,28 +708,33 @@ def get_cloud_init(self):
"monitoring_password": server.get_monitoring_password(),
"statsd_exporter_service": frappe.render_template(
"press/playbooks/roles/statsd_exporter/templates/statsd_exporter.service",
- {"private_ip": self.private_ip_address},
+ {
+ "private_ip": self.private_ip_address or "__PRIVATE_IP__"
+ }, # Replace inside cloudinit in case of DigitalOcean
is_path=True,
),
"filebeat_config": frappe.render_template(
"press/playbooks/roles/filebeat/templates/filebeat.yml",
{
+ "server_type": server.doctype,
"server": self.name,
"log_server": log_server,
"kibana_password": kibana_password,
},
is_path=True,
),
+ "is_unified_server": getattr(server, "is_unified_server", False),
}
- if server.doctype == "Database Server":
- mariadb_context = {
- "server_id": server.server_id,
- "private_ip": self.private_ip_address,
- "ansible_memtotal_mb": frappe.db.get_value("Plan", server.plan, "memory") or 1024,
- }
+ if server.doctype == "Database Server" or getattr(server, "is_unified_server", False):
+ memory = frappe.db.get_value("Server Plan", server.plan, "memory") or 1024
+ if memory < 1024:
+ frappe.throw("MariaDB cannot be installed on a server plan with less than 1GB RAM.")
+
+ mariadb_context = self.get_mariadb_context(server, memory)
context.update(
{
+ "log_requests": True,
"mariadb_config": frappe.render_template(
"press/playbooks/roles/mariadb/templates/mariadb.cnf",
mariadb_context,
@@ -131,19 +745,55 @@ def get_cloud_init(self):
mariadb_context,
is_path=True,
),
+ "mariadb_root_config": frappe.render_template(
+ "press/playbooks/roles/mariadb/templates/my.cnf",
+ mariadb_context,
+ is_path=True,
+ ),
+ "mariadb_exporter_config": frappe.render_template(
+ "press/playbooks/roles/mysqld_exporter/templates/mysqld_exporter.service",
+ mariadb_context,
+ is_path=True,
+ ),
+ "deadlock_logger_config": frappe.render_template(
+ "press/playbooks/roles/deadlock_logger/templates/deadlock_logger.service",
+ mariadb_context,
+ is_path=True,
+ ),
}
)
-
- init = frappe.render_template(cloud_init_template, context, is_path=True)
- return init
+ return frappe.render_template(cloud_init_template, context, is_path=True)
def get_server(self):
- for doctype in ["Server", "Database Server"]:
+ for doctype in server_doctypes:
server = frappe.db.get_value(doctype, {"virtual_machine": self.name}, "name")
if server:
return frappe.get_doc(doctype, server)
+ return None
+
+ def get_digital_ocean_status_map(self):
+ return {
+ "new": "Pending",
+ "active": "Running",
+ "off": "Stopped",
+ "archive": "Terminated",
+ }
- def get_status_map(self):
+ def get_hetzner_status_map(self):
+ # Hetzner has not status for Terminating or Terminated. Just returns a server not found.
+ return {
+ "running": "Running",
+ "initializing": "Pending",
+ "starting": "Pending",
+ "stopping": "Pending",
+ "off": "Stopped",
+ "deleting": "Pending",
+ "migrating": "Pending",
+ "rebuilding": "Pending",
+ "unknown": "Pending",
+ }
+
+ def get_aws_status_map(self):
return {
"pending": "Pending",
"running": "Running",
@@ -153,28 +803,215 @@ def get_status_map(self):
"terminated": "Terminated",
}
- def get_latest_ubuntu_image(self):
- return self.client("ssm").get_parameter(
- Name="/aws/service/canonical/ubuntu/server/20.04/stable/current/amd64/hvm/ebs-gp2/ami-id"
- )["Parameter"]["Value"]
+ def get_oci_status_map(self):
+ return {
+ "MOVING": "Pending",
+ "PROVISIONING": "Pending",
+ "RUNNING": "Running",
+ "STARTING": "Pending",
+ "STOPPING": "Pending",
+ "STOPPED": "Stopped",
+ "CREATING_IMAGE": "Pending",
+ "TERMINATING": "Pending",
+ "TERMINATED": "Terminated",
+ }
+
+ def get_latest_ubuntu_image(self): # noqa: C901
+ if self.cloud_provider == "AWS EC2":
+ architecture = {"x86_64": "amd64", "arm64": "arm64"}[self.platform]
+ return self.client("ssm").get_parameter(
+ Name=f"/aws/service/canonical/ubuntu/server/20.04/stable/current/{architecture}/hvm/ebs-gp2/ami-id"
+ )["Parameter"]["Value"]
+ if self.cloud_provider == "OCI":
+ cluster = frappe.get_doc("Cluster", self.cluster)
+ client = ComputeClient(cluster.get_oci_config())
+ images = client.list_images(
+ compartment_id=cluster.oci_tenancy,
+ operating_system="Canonical Ubuntu",
+ operating_system_version="20.04",
+ shape="VM.Standard3.Flex",
+ lifecycle_state="AVAILABLE",
+ ).data
+ if images:
+ return images[0].id
+ if self.cloud_provider == "Hetzner":
+ images = self.client().images.get_all(
+ name="ubuntu-22.04",
+ architecture="x86" if self.platform == "x86_64" else "arm",
+ sort="created:desc",
+ type="system",
+ )
+ if images and len(images) > 0:
+ return images[0].id
+
+ if self.cloud_provider == "DigitalOcean":
+ images = self.client().images.list(type="distribution", private=False)
+ images = images["images"]
+ ubuntu_images = [image for image in images if "22.04" in image["name"]]
+
+ if not ubuntu_images:
+ frappe.throw("No image available for Ubuntu 22.04")
+
+ return ubuntu_images[0]["id"]
+
+ return None
@frappe.whitelist()
def reboot(self):
- self.client().reboot_instances(InstanceIds=[self.aws_instance_id])
+ if self.cloud_provider == "AWS EC2":
+ self.client().reboot_instances(InstanceIds=[self.instance_id])
+ elif self.cloud_provider == "OCI":
+ self.client().instance_action(instance_id=self.instance_id, action="RESET")
+ elif self.cloud_provider == "Hetzner":
+ self.client().servers.reboot(self.get_hetzner_server_instance(fetch_data=False))
+ elif self.cloud_provider == "DigitalOcean":
+ self.client().droplet_actions.post(self.instance_id, {"type": "reboot"})
+
+ if server := self.get_server():
+ log_server_activity(self.series, server.name, action="Reboot")
+
self.sync()
- def increase_disk_size(self, increment=50):
- volume = self.volumes[0]
- volume.size += increment
- self.disk_size = volume.size
- self.client().modify_volume(VolumeId=volume.aws_volume_id, Size=volume.size)
+ @frappe.whitelist()
+ def increase_disk_size(self, volume_id=None, increment=50): # noqa: C901
+ if not increment:
+ return
+ if not volume_id:
+ volume_id = self.volumes[0].volume_id
+
+ volume = find(self.volumes, lambda v: v.volume_id == volume_id)
+ volume.size += int(increment)
+ self.disk_size = self.get_data_volume().size
+ self.root_disk_size = self.get_root_volume().size
+ is_root_volume = self.get_root_volume().volume_id == volume.volume_id
+ volume.last_updated_at = frappe.utils.now_datetime()
+
+ if self.cloud_provider == "AWS EC2":
+ self.client().modify_volume(VolumeId=volume.volume_id, Size=volume.size)
+
+ elif self.cloud_provider == "OCI":
+ if ".bootvolume." in volume.volume_id:
+ self.client(BlockstorageClient).update_boot_volume(
+ boot_volume_id=volume.volume_id,
+ update_boot_volume_details=UpdateBootVolumeDetails(size_in_gbs=volume.size),
+ )
+ else:
+ self.client(BlockstorageClient).update_volume(
+ volume_id=volume.volume_id,
+ update_volume_details=UpdateVolumeDetails(size_in_gbs=volume.size),
+ )
+ elif self.cloud_provider == "Hetzner":
+ if volume_id == HETZNER_ROOT_DISK_ID:
+ frappe.throw("Cannot increase disk size for hetzner root disk.")
+
+ from hcloud.volumes.domain import Volume
+
+ self.client().volumes.resize(Volume(volume_id), volume.size)
+
+ elif self.cloud_provider == "DigitalOcean":
+ if volume_id == DIGITALOCEAN_ROOT_DISK_ID:
+ frappe.throw("Cannot increase disk size for Digital Ocean root disk.")
+
+ self.client().volumes.resize(
+ volume_id,
+ {
+ "size_gigabytes": volume.size,
+ "region": frappe.get_value("Cluster", self.cluster, "region"),
+ },
+ )
+
+ if server := self.get_server():
+ log_server_activity(
+ self.series,
+ server=server.name,
+ action="Disk Size Change",
+ reason=f"{'Root' if is_root_volume else 'Data'} volume increased by {increment} GB",
+ )
+
self.save()
- def get_volumes(self):
- response = self.client().describe_volumes(
- Filters=[{"Name": "attachment.instance-id", "Values": [self.aws_instance_id]}]
- )
- return response["Volumes"]
+ def get_volumes(self): # noqa: C901
+ if self.cloud_provider == "AWS EC2":
+ response = self.client().describe_volumes(
+ Filters=[{"Name": "attachment.instance-id", "Values": [self.instance_id]}]
+ )
+ return response["Volumes"]
+ if self.cloud_provider == "OCI":
+ cluster = frappe.get_doc("Cluster", self.cluster)
+ return (
+ self.client()
+ .list_boot_volume_attachments(
+ compartment_id=cluster.oci_tenancy,
+ availability_domain=self.availability_zone,
+ instance_id=self.instance_id,
+ )
+ .data
+ + self.client()
+ .list_volume_attachments(
+ compartment_id=cluster.oci_tenancy,
+ instance_id=self.instance_id,
+ )
+ .data
+ )
+ if self.cloud_provider == "Hetzner":
+ instance = self.get_hetzner_server_instance(fetch_data=True)
+ volumes = []
+ for v in instance.volumes:
+ volumes.append(
+ frappe._dict(
+ {
+ "id": v.id,
+ "device": v.linux_device,
+ "size": v.size,
+ }
+ )
+ )
+
+ # This is a dummy/mock representation to make the code compatible
+ # with root_volume code.
+ volumes.append(
+ frappe._dict(
+ {
+ "id": HETZNER_ROOT_DISK_ID,
+ "device": "/dev/sda",
+ "size": instance.primary_disk_size,
+ }
+ )
+ )
+
+ return volumes
+
+ if self.cloud_provider == "DigitalOcean":
+ attached_volumes = []
+ instance = self.get_digital_ocean_server_instance()["droplet"]
+ volume_ids = instance["volume_ids"] # List of attached volume IDs
+ if volume_ids:
+ volumes = self.client().volumes.list()["volumes"]
+ volumes = [v for v in volumes if v["id"] in volume_ids]
+ else:
+ volumes = []
+
+ for v in volumes:
+ attached_volumes.append(
+ frappe._dict(
+ {
+ "id": v.id,
+ "size": v.size_gigabytes,
+ }
+ )
+ )
+
+ volumes.append(
+ frappe._dict(
+ {
+ "id": DIGITALOCEAN_ROOT_DISK_ID,
+ "device": "/dev/sda",
+ "size": instance["disk"],
+ }
+ )
+ )
+ return volumes
+ return None
def convert_to_gp3(self):
for volume in self.volumes:
@@ -183,7 +1020,7 @@ def convert_to_gp3(self):
volume.iops = max(3000, volume.iops)
volume.throughput = 250 if volume.size > 340 else 125
self.client().modify_volume(
- VolumeId=volume.aws_volume_id,
+ VolumeId=volume.volume_id,
VolumeType=volume.volume_type,
Iops=volume.iops,
Throughput=volume.throughput,
@@ -191,161 +1028,815 @@ def convert_to_gp3(self):
self.save()
@frappe.whitelist()
- def sync(self):
- response = self.client().describe_instances(InstanceIds=[self.aws_instance_id])
- if response["Reservations"]:
- instance = response["Reservations"][0]["Instances"][0]
+ def sync(self, *args, **kwargs):
+ try:
+ frappe.db.get_value(self.doctype, self.name, "status", for_update=True)
+ except frappe.QueryTimeoutError: # lock wait timeout
+ return None
+ if self.cloud_provider == "AWS EC2":
+ return self._sync_aws(*args, **kwargs)
+ if self.cloud_provider == "OCI":
+ return self._sync_oci(*args, **kwargs)
+ if self.cloud_provider == "Hetzner":
+ return self._sync_hetzner(*args, **kwargs)
+ if self.cloud_provider == "DigitalOcean":
+ return self._sync_digital_ocean(*args, **kwargs)
+ return None
- self.status = self.get_status_map()[instance["State"]["Name"]]
- self.machine_type = instance.get("InstanceType")
+ def _update_volume_info_after_sync(self):
+ attached_volumes = []
+ attached_devices = []
- self.public_ip_address = instance.get("PublicIpAddress")
- self.private_ip_address = instance.get("PrivateIpAddress")
+ for volume_index, volume in enumerate(self.get_volumes(), start=1):
+ existing_volume = find(self.volumes, lambda v: v.volume_id == volume.id)
+ row = existing_volume if existing_volume else frappe._dict()
+ row.volume_id = volume.id
+ attached_volumes.append(row.volume_id)
+ row.size = volume.size
+ row.device = volume.device
+ attached_devices.append(row.device)
- self.public_dns_name = instance.get("PublicDnsName")
- self.private_dns_name = instance.get("PrivateDnsName")
+ row.idx = volume_index
+ if not existing_volume:
+ self.append("volumes", row)
- for volume in self.get_volumes():
- existing_volume = find(
- self.volumes, lambda v: v.aws_volume_id == volume["VolumeId"]
- )
- if existing_volume:
- row = existing_volume
- else:
- row = frappe._dict()
- row.aws_volume_id = volume["VolumeId"]
- row.volume_type = volume["VolumeType"]
- row.size = volume["Size"]
- row.iops = volume["Iops"]
- if "Throughput" in volume:
- row.throughput = volume["Throughput"]
+ for volume in list(self.volumes):
+ if volume.volume_id not in attached_volumes:
+ self.remove(volume)
- if not existing_volume:
- self.append("volumes", row)
+ for volume in list(self.temporary_volumes):
+ if volume.device not in attached_devices:
+ self.remove(volume)
- self.disk_size = self.volumes[0].size
+ if self.volumes:
+ self.disk_size = self.get_data_volume().size
+ self.root_disk_size = self.get_root_volume().size
- self.termination_protection = self.client().describe_instance_attribute(
- InstanceId=self.aws_instance_id, Attribute="disableApiTermination"
- )["DisableApiTermination"]["Value"]
- else:
+ def _sync_digital_ocean(self, *args, **kwargs):
+ server_instance = self.get_digital_ocean_server_instance()
+
+ if server_instance.get("id", None) == "not_found":
self.status = "Terminated"
- self.save()
- self.update_servers()
+ self.save()
+ self.update_servers()
+ return
+
+ server_instance = server_instance.get("droplet", {})
+
+ self.status = self.get_digital_ocean_status_map()[server_instance["status"]]
+ self.machine_type = server_instance["size"]["slug"]
+ self.vcpu = server_instance["size"]["vcpus"]
+ self.ram = server_instance["size"]["memory"]
+
+ self.private_ip_address = ""
+ self.public_ip_address = ""
+
+ if len(server_instance["networks"]["v4"]) > 0:
+ private_network = next(
+ (net for net in server_instance["networks"]["v4"] if net["type"] == "private"), None
+ )
+ public_network = next(
+ (net for net in server_instance["networks"]["v4"] if net["type"] == "public"), None
+ )
+
+ if private_network:
+ self.private_ip_address = private_network.get("ip_address", "")
+
+ if public_network:
+ self.public_ip_address = public_network.get("ip_address", "")
+
+ # We don't have volume support yet for digital ocean droplets
+ self.root_disk_size = server_instance["disk"]
+ self.disk_size = server_instance["disk"]
+
+ self._update_volume_info_after_sync()
+
+ self.save()
+ self.update_servers()
+
+ def _sync_hetzner(self, server_instance=None):
+ if not server_instance:
+ try:
+ server_instance = self.get_hetzner_server_instance(fetch_data=True)
+ except APIException as e:
+ if e.code == "not_found":
+ self.status = "Terminated"
+ self.save()
+ else:
+ raise
+
+ if not server_instance:
+ # If server not found, mark it as terminated
+ # Update status at server side as well
+ if self.status == "Terminated":
+ self.update_servers()
+ return
+
+ self.status = self.get_hetzner_status_map()[server_instance.status]
+ self.machine_type = server_instance.server_type.name
+ self.vcpu = server_instance.server_type.cores
+ self.ram = server_instance.server_type.memory * 1024
+
+ self.private_ip_address = server_instance.private_net[0].ip if server_instance.private_net else ""
+ self.public_ip_address = server_instance.public_net.ipv4.ip
+
+ self.termination_protection = server_instance.protection.get("delete", False)
+
+ self._update_volume_info_after_sync()
+
+ self.save()
+ self.update_servers()
+
+ def _sync_oci(self, instance=None): # noqa: C901
+ if not instance:
+ instance = self.client().get_instance(instance_id=self.instance_id).data
+ if instance and instance.lifecycle_state != "TERMINATED":
+ cluster = frappe.get_doc("Cluster", self.cluster)
+
+ self.status = self.get_oci_status_map()[instance.lifecycle_state]
+
+ self.ram = instance.shape_config.memory_in_gbs * 1024
+ self.vcpu = instance.shape_config.vcpus
+ self.machine_type = f"{int(self.vcpu)}x{int(instance.shape_config.memory_in_gbs)}"
+
+ for vnic_attachment in (
+ self.client()
+ .list_vnic_attachments(compartment_id=cluster.oci_tenancy, instance_id=self.instance_id)
+ .data
+ ):
+ try:
+ vnic = self.client(VirtualNetworkClient).get_vnic(vnic_id=vnic_attachment.vnic_id).data
+ self.public_ip_address = vnic.public_ip
+ except Exception:
+ log_error(
+ title="OCI VNIC Fetch Error",
+ virtual_machine=self.name,
+ vnic_attachment=vnic_attachment,
+ )
+
+ available_volumes = []
+ for volume in self.get_volumes():
+ try:
+ if hasattr(volume, "volume_id"):
+ volume = self.client(BlockstorageClient).get_volume(volume_id=volume.volume_id).data
+ else:
+ volume = (
+ self.client(BlockstorageClient)
+ .get_boot_volume(boot_volume_id=volume.boot_volume_id)
+ .data
+ )
+ existing_volume = find(self.volumes, lambda v: v.volume_id == volume.id)
+ if existing_volume:
+ row = existing_volume
+ else:
+ row = frappe._dict()
+ row.volume_id = volume.id
+ row.size = volume.size_in_gbs
+
+ vpus = volume.vpus_per_gb
+ # Reference: https://docs.oracle.com/en-us/iaas/Content/Block/Concepts/blockvolumeperformance.htm
+ row.iops = min(1.5 * vpus + 45, 2500 * vpus) * row.size
+ row.throughput = min(12 * vpus + 360, 20 * vpus + 280) * row.size // 1000
+
+ if row.volume_id:
+ available_volumes.append(row.volume_id)
+
+ if not existing_volume and row.volume_id:
+ self.append("volumes", row)
+ except Exception:
+ log_error(
+ title="OCI Volume Fetch Error",
+ virtual_machine=self.name,
+ volume=volume,
+ )
+ if self.volumes:
+ self.disk_size = self.get_data_volume().size
+ self.root_disk_size = self.get_root_volume().size
+
+ for volume in list(self.volumes):
+ if volume.volume_id not in available_volumes:
+ self.remove(volume)
+
+ else:
+ self.status = "Terminated"
+ self.save()
+ self.update_servers()
+
+ def has_static_ip(self, instance) -> bool:
+ sip = False
+ try:
+ ip_owner_id = instance["NetworkInterfaces"][0]["Association"]["IpOwnerId"]
+ sip = ip_owner_id.lower() != "amazon"
+ except (KeyError, IndexError):
+ pass
+ return sip
+
+ def _sync_aws(self, response=None): # noqa: C901
+ if not response:
+ try:
+ response = self.client().describe_instances(InstanceIds=[self.instance_id])
+ except botocore.exceptions.ClientError as e:
+ if e.response.get("Error", {}).get("Code") == "InvalidInstanceID.NotFound":
+ response = {"Reservations": []}
+ if response["Reservations"]:
+ instance = response["Reservations"][0]["Instances"][0]
+
+ self.status = self.get_aws_status_map()[instance["State"]["Name"]]
+ self.machine_type = instance.get("InstanceType")
+
+ self.public_ip_address = instance.get("PublicIpAddress")
+ self.private_ip_address = instance.get("PrivateIpAddress")
+ self.is_static_ip = self.has_static_ip(instance)
+
+ self.public_dns_name = instance.get("PublicDnsName")
+ self.private_dns_name = instance.get("PrivateDnsName")
+ self.platform = instance.get("Architecture", "x86_64")
+ attached_volumes = []
+ attached_devices = []
+ for volume_index, volume in enumerate(self.get_volumes(), start=1): # idx starts from 1
+ existing_volume = find(self.volumes, lambda v: v.volume_id == volume["VolumeId"])
+ if existing_volume:
+ row = existing_volume
+ else:
+ row = frappe._dict()
+ row.volume_id = volume["VolumeId"]
+ attached_volumes.append(row.volume_id)
+ row.volume_type = volume["VolumeType"]
+ row.size = volume["Size"]
+ row.iops = volume["Iops"]
+ row.device = volume["Attachments"][0]["Device"]
+ attached_devices.append(row.device)
+
+ if "Throughput" in volume:
+ row.throughput = volume["Throughput"]
+
+ row.idx = volume_index
+ if not existing_volume:
+ self.append("volumes", row)
+
+ self.disk_size = self.get_data_volume().size
+ self.root_disk_size = self.get_root_volume().size
+
+ for volume in list(self.volumes):
+ if volume.volume_id not in attached_volumes:
+ self.remove(volume)
+
+ for volume in list(self.temporary_volumes):
+ if volume.device not in attached_devices:
+ self.remove(volume)
+
+ self.termination_protection = self.client().describe_instance_attribute(
+ InstanceId=self.instance_id, Attribute="disableApiTermination"
+ )["DisableApiTermination"]["Value"]
+
+ instance_type_response = self.client().describe_instance_types(InstanceTypes=[self.machine_type])
+ self.ram = instance_type_response["InstanceTypes"][0]["MemoryInfo"]["SizeInMiB"]
+ self.vcpu = instance_type_response["InstanceTypes"][0]["VCpuInfo"]["DefaultVCpus"]
+ else:
+ self.status = "Terminated"
+ self.save()
+ self.update_servers()
+
+ def get_root_volume(self):
+ if len(self.volumes) == 1:
+ return self.volumes[0]
+
+ ROOT_VOLUME_FILTERS = {
+ "AWS EC2": lambda v: v.device == "/dev/sda1",
+ "OCI": lambda v: ".bootvolume." in v.volume_id,
+ "Hetzner": lambda v: v.device == "/dev/sda",
+ "DigitalOcean": lambda v: v.device == "/dev/sda",
+ }
+ root_volume_filter = ROOT_VOLUME_FILTERS.get(self.cloud_provider)
+ volume = find(self.volumes, root_volume_filter)
+ if volume: # Un-provisioned machines might not have any volumes
+ return volume
+ return frappe._dict({"size": 0})
+
+ def get_data_volume(self):
+ if not self.has_data_volume:
+ return self.get_root_volume()
+
+ if len(self.volumes) == 1:
+ return self.volumes[0]
+
+ temporary_volume_devices = [x.device for x in self.temporary_volumes]
+
+ DATA_VOLUME_FILTERS = {
+ "AWS EC2": lambda v: v.device != "/dev/sda1" and v.device not in temporary_volume_devices,
+ "OCI": lambda v: ".bootvolume." not in v.volume_id and v.device not in temporary_volume_devices,
+ "Hetzner": lambda v: v.device != "/dev/sda" and v.device not in temporary_volume_devices,
+ "DigitalOcean": lambda v: v.device != "/dev/sda" and v.device not in temporary_volume_devices,
+ }
+ data_volume_filter = DATA_VOLUME_FILTERS.get(self.cloud_provider)
+ volume = find(self.volumes, data_volume_filter)
+ if volume: # Un-provisioned machines might not have any volumes
+ return volume
+ return frappe._dict({"size": 0})
def update_servers(self):
status_map = {
"Pending": "Pending",
"Running": "Active",
"Terminated": "Archived",
- "Stopped": "Archived",
+ "Stopped": "Pending",
}
- for doctype in ["Server", "Database Server", "Proxy Server"]:
+ for doctype in server_doctypes:
server = frappe.get_all(doctype, {"virtual_machine": self.name}, pluck="name")
if server:
server = server[0]
frappe.db.set_value(doctype, server, "ip", self.public_ip_address)
- if self.public_ip_address:
+ frappe.db.set_value(doctype, server, "private_ip", self.private_ip_address)
+ if doctype in ["Server", "Proxy Server"]:
+ frappe.db.set_value(doctype, server, "is_static_ip", self.is_static_ip)
+ if doctype in ["Server", "Database Server"]:
+ frappe.db.set_value(doctype, server, "ram", self.ram)
+ if self.public_ip_address and self.has_value_changed("public_ip_address"):
frappe.get_doc(doctype, server).create_dns_record()
frappe.db.set_value(doctype, server, "status", status_map[self.status])
def update_name_tag(self, name):
- self.client().create_tags(
- Resources=[self.aws_instance_id],
- Tags=[
- {"Key": "Name", "Value": name},
- ],
- )
+ if self.cloud_provider == "AWS EC2":
+ self.client().create_tags(
+ Resources=[self.instance_id],
+ Tags=[
+ {"Key": "Name", "Value": name},
+ ],
+ )
@frappe.whitelist()
- def create_image(self):
+ def create_image(self, public=True):
image = frappe.get_doc(
- {"doctype": "Virtual Machine Image", "virtual_machine": self.name}
+ {
+ "doctype": "Virtual Machine Image",
+ "virtual_machine": self.name,
+ "public": public,
+ "has_data_volume": self.has_data_volume,
+ "platform": self.platform,
+ }
).insert()
return image.name
@frappe.whitelist()
- def create_snapshots(self):
+ def create_snapshots(
+ self,
+ exclude_boot_volume=False,
+ physical_backup=False,
+ rolling_snapshot=False,
+ dedicated_snapshot=False,
+ ):
+ """
+ exclude_boot_volume is applicable only for Servers with data volume
+ """
+ if not self.has_data_volume:
+ exclude_boot_volume = False
+
+ # Store the newly created snapshots reference in the flags
+ # So that, we can get the correct reference of snapshots created in current session
+ self.flags.created_snapshots = []
+ if self.cloud_provider == "AWS EC2":
+ self._create_snapshots_aws(
+ exclude_boot_volume, physical_backup, rolling_snapshot, dedicated_snapshot
+ )
+ elif self.cloud_provider == "OCI":
+ self._create_snapshots_oci(exclude_boot_volume)
+ elif self.cloud_provider == "Hetzner":
+ self._create_snapshots_hetzner()
+
+ def _create_snapshots_aws(
+ self,
+ exclude_boot_volume: bool,
+ physical_backup: bool,
+ rolling_snapshot: bool,
+ dedicated_snapshot: bool,
+ ):
+ temporary_volume_ids = self.get_temporary_volume_ids()
+ instance_specification = {"InstanceId": self.instance_id, "ExcludeBootVolume": exclude_boot_volume}
+ if temporary_volume_ids:
+ instance_specification["ExcludeDataVolumeIds"] = temporary_volume_ids
+
response = self.client().create_snapshots(
- InstanceSpecification={"InstanceId": self.aws_instance_id},
+ InstanceSpecification=instance_specification,
Description=f"Frappe Cloud - {self.name} - {frappe.utils.now()}",
TagSpecifications=[
{
"ResourceType": "snapshot",
"Tags": [
- {"Key": "Name", "Value": f"Frappe Cloud - {self.name} - {frappe.utils.now()}"}
+ {"Key": "Name", "Value": f"Frappe Cloud - {self.name} - {frappe.utils.now()}"},
+ {"Key": "Physical Backup", "Value": "Yes" if physical_backup else "No"},
+ {"Key": "Rolling Snapshot", "Value": "Yes" if rolling_snapshot else "No"},
+ {"Key": "Dedicated Snapshot", "Value": "Yes" if dedicated_snapshot else "No"},
],
},
],
)
for snapshot in response.get("Snapshots", []):
try:
- frappe.get_doc(
+ doc = frappe.get_doc(
{
"doctype": "Virtual Disk Snapshot",
"virtual_machine": self.name,
- "aws_snapshot_id": snapshot["SnapshotId"],
+ "snapshot_id": snapshot["SnapshotId"],
+ "physical_backup": physical_backup,
+ "rolling_snapshot": rolling_snapshot,
+ "dedicated_snapshot": dedicated_snapshot,
}
).insert()
+ self.flags.created_snapshots.append(doc.name)
except Exception:
- log_error(
- title="Virtual Disk Snapshot Error", virtual_machine=self.name, snapshot=snapshot
- )
+ log_error(title="Virtual Disk Snapshot Error", virtual_machine=self.name, snapshot=snapshot)
- @frappe.whitelist()
- def disable_termination_protection(self):
- self.client().modify_instance_attribute(
- InstanceId=self.aws_instance_id, DisableApiTermination={"Value": False}
+ def _create_snapshots_oci(self, exclude_boot_volume: bool):
+ for volume in self.volumes:
+ try:
+ if ".bootvolume." in volume.volume_id:
+ if exclude_boot_volume:
+ continue
+ snapshot = (
+ self.client(BlockstorageClient)
+ .create_boot_volume_backup(
+ CreateBootVolumeBackupDetails(
+ boot_volume_id=volume.volume_id,
+ type="INCREMENTAL",
+ display_name=f"Frappe Cloud - {self.name} - {volume.name} - {frappe.utils.now()}",
+ )
+ )
+ .data
+ )
+ else:
+ snapshot = (
+ self.client(BlockstorageClient)
+ .create_volume_backup(
+ CreateVolumeBackupDetails(
+ volume_id=volume.volume_id,
+ type="INCREMENTAL",
+ display_name=f"Frappe Cloud - {self.name} - {volume.name} - {frappe.utils.now()}",
+ )
+ )
+ .data
+ )
+ doc = frappe.get_doc(
+ {
+ "doctype": "Virtual Disk Snapshot",
+ "virtual_machine": self.name,
+ "snapshot_id": snapshot.id,
+ }
+ ).insert()
+ self.flags.created_snapshots.append(doc.name)
+ except TransientServiceError:
+ # We've hit OCI rate limit for creating snapshots
+ # Let's try again later
+ pass
+ except Exception:
+ log_error(title="Virtual Disk Snapshot Error", virtual_machine=self.name, snapshot=snapshot)
+
+ def _create_snapshots_hetzner(self):
+ server = self.get_hetzner_server_instance(fetch_data=True)
+ response = server.create_image(
+ type="snapshot", description=f"Frappe Cloud - {self.name} - {frappe.utils.now()}"
)
+
+ doc = frappe.get_doc(
+ {
+ "doctype": "Virtual Disk Snapshot",
+ "virtual_machine": self.name,
+ "snapshot_id": response.image.id,
+ "volume_id": HETZNER_ROOT_DISK_ID,
+ }
+ ).insert()
+ self.flags.created_snapshots.append(doc.name)
+
+ def get_temporary_volume_ids(self) -> list[str]:
+ tmp_volume_ids = set()
+ tmp_volumes_devices = [x.device for x in self.temporary_volumes]
+
+ def get_volume_id_by_device(device):
+ for volume in self.volumes:
+ if volume.device == device:
+ return volume.volume_id
+ return None
+
+ for device in tmp_volumes_devices:
+ volume_id = get_volume_id_by_device(device)
+ if volume_id:
+ tmp_volume_ids.add(volume_id)
+ return list(tmp_volume_ids)
+
+ @frappe.whitelist()
+ def disable_termination_protection(self, sync: bool | None = None):
+ if sync is None:
+ sync = False
+
+ if self.cloud_provider == "AWS EC2":
+ self.client().modify_instance_attribute(
+ InstanceId=self.instance_id, DisableApiTermination={"Value": False}
+ )
+ elif self.cloud_provider == "Hetzner":
+ self.get_hetzner_server_instance().change_protection(delete=False, rebuild=False)
+
+ if sync:
+ self.sync()
+
+ @frappe.whitelist()
+ def enable_termination_protection(self, sync: bool | None = None):
+ if sync is None:
+ sync = False
+
+ if self.cloud_provider == "AWS EC2":
+ self.client().modify_instance_attribute(
+ InstanceId=self.instance_id, DisableApiTermination={"Value": True}
+ )
+ elif self.cloud_provider == "Hetzner":
+ self.get_hetzner_server_instance().change_protection(delete=True, rebuild=True)
+
+ if sync:
+ self.sync()
+
+ @frappe.whitelist()
+ def start(self):
+ if self.cloud_provider == "AWS EC2":
+ self.client().start_instances(InstanceIds=[self.instance_id])
+ elif self.cloud_provider == "OCI":
+ self.client().instance_action(instance_id=self.instance_id, action="START")
+ elif self.cloud_provider == "Hetzner":
+ self.client().servers.power_on(self.get_hetzner_server_instance(fetch_data=False))
+ elif self.cloud_provider == "DigitalOcean":
+ self.client().droplet_actions.post(self.instance_id, {"type": "power_on"})
+
+ # Digital Ocean `start` takes some time therefore this sync is useless for DO.
self.sync()
@frappe.whitelist()
- def enable_termination_protection(self):
- self.client().modify_instance_attribute(
- InstanceId=self.aws_instance_id, DisableApiTermination={"Value": True}
- )
+ def stop(self, force=False):
+ if self.cloud_provider == "AWS EC2":
+ self.client().stop_instances(InstanceIds=[self.instance_id], Force=bool(force))
+ elif self.cloud_provider == "OCI":
+ self.client().instance_action(instance_id=self.instance_id, action="STOP")
+ elif self.cloud_provider == "Hetzner":
+ self.client().servers.shutdown(self.get_hetzner_server_instance(fetch_data=False))
+ elif self.cloud_provider == "DigitalOcean":
+ self.client().droplet_actions.post(self.instance_id, {"type": "power_off"})
self.sync()
@frappe.whitelist()
- def start(self):
- self.client().start_instances(InstanceIds=[self.aws_instance_id])
+ def force_stop(self):
+ self.stop(force=True)
@frappe.whitelist()
- def stop(self):
- self.client().stop_instances(InstanceIds=[self.aws_instance_id])
+ def force_terminate(self):
+ if not frappe.conf.developer_mode:
+ return
+ if self.cloud_provider == "AWS EC2":
+ self.client().modify_instance_attribute(
+ InstanceId=self.instance_id, DisableApiTermination={"Value": False}
+ )
+ self.client().terminate_instances(InstanceIds=[self.instance_id])
@frappe.whitelist()
- def terminate(self):
- self.client().terminate_instances(InstanceIds=[self.aws_instance_id])
+ def terminate(self): # noqa: C901
+ if self.cloud_provider == "AWS EC2":
+ self.client().terminate_instances(InstanceIds=[self.instance_id])
+
+ elif self.cloud_provider == "OCI":
+ self.client().terminate_instance(instance_id=self.instance_id)
+
+ elif self.cloud_provider == "Hetzner":
+ for volume in self.volumes:
+ if volume.volume_id == HETZNER_ROOT_DISK_ID:
+ continue
+ self.delete_volume(volume.volume_id, sync=False)
+ self.client().servers.delete(
+ self.get_hetzner_server_instance(fetch_data=False)
+ ).wait_until_finished(HETZNER_ACTION_RETRIES)
+
+ elif self.cloud_provider == "DigitalOcean":
+ for volume in self.volumes:
+ if volume.volume_id == DIGITALOCEAN_ROOT_DISK_ID:
+ continue
+ self.delete_volume(volume.volume_id, sync=False)
+
+ self.client().droplets.destroy(self.instance_id)
+
+ if server := self.get_server():
+ log_server_activity(self.series, server.name, action="Terminated")
+
+ def _wait_for_digital_ocean_resize_action_completion(self, action_id: int):
+ """Wait for resize to complete before starting the droplet."""
+ if self.cloud_provider == "DigitalOcean" and action_id:
+ time.sleep(2) # Wait for some time before checking the action status
+ action = self.client().actions.get(action_id)
+ if action["action"]["status"] == "completed":
+ self.start()
+ else:
+ frappe.enqueue_doc(
+ "Virtual Machine",
+ self.name,
+ "_wait_for_digital_ocean_resize_action_completion",
+ action_id=action_id,
+ queue="long",
+ )
@frappe.whitelist()
- def resize(self, machine_type):
- self.client().modify_instance_attribute(
- InstanceId=self.aws_instance_id,
- InstanceType={"Value": machine_type},
- )
+ def resize(self, machine_type, upgrade_disk: bool = False):
+ if self.cloud_provider == "AWS EC2":
+ self.client().modify_instance_attribute(
+ InstanceId=self.instance_id,
+ InstanceType={"Value": machine_type},
+ )
+ elif self.cloud_provider == "OCI":
+ vcpu, ram_in_gbs = map(int, machine_type.split("x"))
+ self.client().update_instance(
+ self.instance_id,
+ UpdateInstanceDetails(
+ shape_config=UpdateInstanceShapeConfigDetails(
+ ocpus=vcpu // 2, vcpus=vcpu, memory_in_gbs=ram_in_gbs
+ )
+ ),
+ )
+ elif self.cloud_provider == "Hetzner":
+ from hcloud.server_types.domain import ServerType
+
+ self.client().servers.change_type(
+ self.get_hetzner_server_instance(fetch_data=False),
+ server_type=ServerType(name=machine_type),
+ upgrade_disk=upgrade_disk,
+ )
+
+ elif self.cloud_provider == "DigitalOcean":
+ resize_action = self.client().droplet_actions.post(
+ droplet_id=self.instance_id,
+ body={
+ "type": "resize",
+ "size": machine_type,
+ "disk": upgrade_disk,
+ },
+ )
+ action_id = resize_action["action"]["id"]
+ frappe.enqueue_doc(
+ "Virtual Machine",
+ self.name,
+ "_wait_for_digital_ocean_resize_action_completion",
+ action_id=action_id,
+ queue="long",
+ )
+
self.machine_type = machine_type
self.save()
- def client(self, client_type="ec2"):
+ @frappe.whitelist()
+ def get_ebs_performance(self):
+ if self.cloud_provider == "AWS EC2":
+ volume = self.volumes[0]
+ return volume.iops, volume.throughput
+ return None
+
+ @frappe.whitelist()
+ def update_ebs_performance(self, volume_id, iops, throughput):
+ if self.cloud_provider == "AWS EC2":
+ volume = find(self.volumes, lambda v: v.volume_id == volume_id)
+ new_iops = int(iops) or volume.iops
+ new_throughput = int(throughput) or volume.throughput
+ self.client().modify_volume(
+ VolumeId=volume.volume_id,
+ Iops=new_iops,
+ Throughput=new_throughput,
+ )
+ self.sync()
+
+ @frappe.whitelist()
+ def get_oci_volume_performance(self):
+ if self.cloud_provider == "OCI":
+ volume = self.volumes[0]
+ return ((volume.iops / volume.size) - 45) / 1.5
+ return None
+
+ @frappe.whitelist()
+ def update_oci_volume_performance(self, vpus):
+ if self.cloud_provider == "OCI":
+ volume = self.volumes[0]
+ if ".bootvolume." in volume.volume_id:
+ self.client(BlockstorageClient).update_boot_volume(
+ boot_volume_id=volume.volume_id,
+ update_boot_volume_details=UpdateBootVolumeDetails(vpus_per_gb=int(vpus)),
+ )
+ else:
+ self.client(BlockstorageClient).update_volume(
+ volume_id=volume.volume_id,
+ update_volume_details=UpdateVolumeDetails(vpus_per_gb=int(vpus)),
+ )
+ self.sync()
+
+ def client(self, client_type=None):
cluster = frappe.get_doc("Cluster", self.cluster)
- return boto3.client(
- client_type,
- region_name=self.region,
- aws_access_key_id=cluster.aws_access_key_id,
- aws_secret_access_key=cluster.get_password("aws_secret_access_key"),
- )
+ if self.cloud_provider == "AWS EC2":
+ return boto3.client(
+ client_type or "ec2",
+ region_name=self.region,
+ aws_access_key_id=cluster.aws_access_key_id,
+ aws_secret_access_key=cluster.get_password("aws_secret_access_key"),
+ )
+
+ if self.cloud_provider == "OCI":
+ return (client_type or ComputeClient)(cluster.get_oci_config())
+
+ if self.cloud_provider == "Hetzner":
+ api_token = cluster.get_password("hetzner_api_token")
+ return HetznerClient(token=api_token)
+
+ if self.cloud_provider == "DigitalOcean":
+ api_token = cluster.get_password("digital_ocean_api_token")
+ return pydo.Client(token=api_token)
+
+ return None
@frappe.whitelist()
- def create_server(self):
+ def create_unified_server(self) -> tuple[Server, DatabaseServer]:
+ """Virtual machines of series U will create a u series app server and u series database server"""
+
+ if self.series != "u":
+ frappe.throw("Only virtual machines of series 'u' can create unified servers.")
+
+ server_document = {
+ "doctype": "Server",
+ "hostname": f"u{self.index}-{slug(self.cluster)}",
+ "domain": self.domain,
+ "cluster": self.cluster,
+ "provider": self.cloud_provider,
+ "virtual_machine": self.name,
+ "team": self.team,
+ "is_primary": True,
+ "platform": self.platform,
+ "is_unified_server": True,
+ }
+
+ if self.virtual_machine_image:
+ server_document["is_server_prepared"] = True
+ server_document["is_server_setup"] = True
+ server_document["is_server_renamed"] = True
+ server_document["is_upstream_setup"] = True
+
+ else:
+ server_document["is_provisioning_press_job_completed"] = True
+
+ common_agent_password = frappe.generate_hash(length=32)
+
+ server = frappe.get_doc(server_document)
+ server.agent_password = common_agent_password
+ server = server.insert()
+
+ database_server_document = {
+ "doctype": "Database Server",
+ "hostname": f"u{self.index}-{slug(self.cluster)}",
+ "domain": self.domain,
+ "cluster": self.cluster,
+ "provider": self.cloud_provider,
+ "virtual_machine": self.name,
+ "server_id": self.index,
+ "is_primary": True,
+ "team": self.team,
+ "is_unified_server": True,
+ }
+
+ if self.virtual_machine_image:
+ database_server_document["is_server_prepared"] = True
+ database_server_document["is_server_setup"] = True
+ database_server_document["is_server_renamed"] = True
+ if self.data_disk_snapshot:
+ database_server_document["mariadb_root_password"] = get_decrypted_password(
+ "Virtual Disk Snapshot", self.data_disk_snapshot, "mariadb_root_password"
+ )
+ else:
+ database_server_document["mariadb_root_password"] = get_decrypted_password(
+ "Virtual Machine Image", self.virtual_machine_image, "mariadb_root_password"
+ )
+
+ if not database_server_document["mariadb_root_password"]:
+ frappe.throw(
+ f"Virtual Machine Image {self.virtual_machine_image} does not have a MariaDB root password set."
+ )
+ else:
+ database_server_document["is_provisioning_press_job_completed"] = True
+
+ database_server = frappe.get_doc(database_server_document)
+ database_server.agent_password = common_agent_password
+ database_server = database_server.insert()
+
+ return server, database_server
+
+ @frappe.whitelist()
+ def create_server(self, is_secondary: bool = False, primary: str | None = None) -> Server:
document = {
"doctype": "Server",
"hostname": f"{self.series}{self.index}-{slug(self.cluster)}",
"domain": self.domain,
"cluster": self.cluster,
- "provider": "AWS EC2",
+ "provider": self.cloud_provider,
"virtual_machine": self.name,
"team": self.team,
+ "is_primary": not is_secondary,
+ "is_secondary": is_secondary,
+ "platform": self.platform,
+ "primary": primary,
}
if self.virtual_machine_image:
@@ -353,17 +1844,21 @@ def create_server(self):
document["is_server_setup"] = True
document["is_server_renamed"] = True
document["is_upstream_setup"] = True
+ else:
+ document["is_provisioning_press_job_completed"] = True
- return frappe.get_doc(document).insert()
+ server = frappe.get_doc(document).insert()
+ frappe.msgprint(frappe.get_desk_link(server.doctype, server.name))
+ return server
@frappe.whitelist()
- def create_database_server(self):
+ def create_database_server(self) -> DatabaseServer:
document = {
"doctype": "Database Server",
"hostname": f"{self.series}{self.index}-{slug(self.cluster)}",
"domain": self.domain,
"cluster": self.cluster,
- "provider": "AWS EC2",
+ "provider": self.cloud_provider,
"virtual_machine": self.name,
"server_id": self.index,
"is_primary": True,
@@ -374,19 +1869,92 @@ def create_database_server(self):
document["is_server_prepared"] = True
document["is_server_setup"] = True
document["is_server_renamed"] = True
- document["mariadb_root_password"] = frappe.get_doc(
- "Virtual Machine Image", self.virtual_machine_image
- ).get_password("mariadb_root_password")
+ if self.data_disk_snapshot:
+ document["mariadb_root_password"] = get_decrypted_password(
+ "Virtual Disk Snapshot", self.data_disk_snapshot, "mariadb_root_password"
+ )
+ else:
+ document["mariadb_root_password"] = get_decrypted_password(
+ "Virtual Machine Image", self.virtual_machine_image, "mariadb_root_password"
+ )
- return frappe.get_doc(document).insert()
+ if not document["mariadb_root_password"]:
+ frappe.throw(
+ f"Virtual Machine Image {self.virtual_machine_image} does not have a MariaDB root password set."
+ )
+ else:
+ document["is_provisioning_press_job_completed"] = True
+
+ server = frappe.get_doc(document).insert()
+ frappe.msgprint(frappe.get_desk_link(server.doctype, server.name))
+ return server
+
+ def get_root_domains(self):
+ return frappe.get_all("Root Domain", {"enabled": True}, pluck="name")
@frappe.whitelist()
- def create_proxy_server(self):
+ def create_proxy_server(self) -> ProxyServer:
document = {
"doctype": "Proxy Server",
"hostname": f"{self.series}{self.index}-{slug(self.cluster)}",
"domain": self.domain,
"cluster": self.cluster,
+ "provider": self.cloud_provider,
+ "virtual_machine": self.name,
+ "team": self.team,
+ "domains": [{"domain": domain} for domain in self.get_root_domains()],
+ }
+ if self.virtual_machine_image:
+ document["is_server_setup"] = True
+ document["is_primary"] = True
+
+ server = frappe.get_doc(document).insert()
+ frappe.msgprint(frappe.get_desk_link(server.doctype, server.name))
+ return server
+
+ @frappe.whitelist()
+ def create_monitor_server(self) -> MonitorServer:
+ document = {
+ "doctype": "Monitor Server",
+ "hostname": f"{self.series}{self.index}-{slug(self.cluster)}",
+ "domain": self.domain,
+ "cluster": self.cluster,
+ "provider": self.cloud_provider,
+ "virtual_machine": self.name,
+ "team": self.team,
+ }
+ if self.virtual_machine_image:
+ document["is_server_setup"] = True
+
+ server = frappe.get_doc(document).insert()
+ frappe.msgprint(frappe.get_desk_link(server.doctype, server.name))
+ return server
+
+ @frappe.whitelist()
+ def create_log_server(self) -> LogServer:
+ document = {
+ "doctype": "Log Server",
+ "hostname": f"{self.series}{self.index}-{slug(self.cluster)}",
+ "domain": self.domain,
+ "cluster": self.cluster,
+ "provider": self.cloud_provider,
+ "virtual_machine": self.name,
+ "team": self.team,
+ }
+ if self.virtual_machine_image:
+ document["is_server_setup"] = True
+
+ server = frappe.get_doc(document).insert()
+ frappe.msgprint(frappe.get_desk_link(server.doctype, server.name))
+ return server
+
+ @frappe.whitelist()
+ def create_registry_server(self):
+ document = {
+ "doctype": "Registry Server",
+ "hostname": f"{self.series}{self.index}-{slug(self.cluster)}",
+ "domain": self.domain,
+ "cluster": self.cluster,
"provider": "AWS EC2",
"virtual_machine": self.name,
"team": self.team,
@@ -394,36 +1962,867 @@ def create_proxy_server(self):
if self.virtual_machine_image:
document["is_server_setup"] = True
- return frappe.get_doc(document).insert()
+ server = frappe.get_doc(document).insert()
+ frappe.msgprint(frappe.get_desk_link(server.doctype, server.name))
+ return server
def get_security_groups(self):
- groups = [self.aws_security_group_id]
+ groups = [self.security_group_id]
if self.series == "n":
- groups.append(
- frappe.db.get_value("Cluster", self.cluster, "aws_proxy_security_group_id")
- )
+ groups.append(frappe.db.get_value("Cluster", self.cluster, "proxy_security_group_id"))
return groups
+ @frappe.whitelist()
+ def get_serial_console_credentials(self):
+ client = self.client("ec2-instance-connect")
+ client.send_serial_console_ssh_public_key(
+ InstanceId=self.instance_id,
+ SSHPublicKey=frappe.db.get_value("SSH Key", self.ssh_key, "public_key"),
+ )
+ serial_console_endpoint = AWS_SERIAL_CONSOLE_ENDPOINT_MAP[self.region]
+ username = f"{self.instance_id}.port0"
+ host = serial_console_endpoint["endpoint"]
+ return {
+ "username": username,
+ "host": host,
+ "fingerprint": serial_console_endpoint["fingerprint"],
+ "command": f"ssh {username}@{host}",
+ }
+
+ @frappe.whitelist()
+ def reboot_with_serial_console(self):
+ if self.cloud_provider == "AWS EC2":
+ self.get_server().reboot_with_serial_console()
-get_permission_query_conditions = get_permission_query_conditions_for_doctype(
- "Virtual Machine"
-)
+ if server := self.get_server():
+ log_server_activity(
+ self.series,
+ server.name,
+ action="Reboot",
+ reason="Unable to reboot manually, rebooting with serial console",
+ )
+
+ self.sync()
+
+ @classmethod
+ def bulk_sync_aws(cls):
+ try:
+ clusters = frappe.get_all(
+ "Virtual Machine",
+ ["cluster", "cloud_provider", "max(`index`) as max_index"],
+ {
+ "status": ("not in", ("Terminated", "Draft")),
+ "cloud_provider": "AWS EC2",
+ },
+ group_by="cluster",
+ )
+ except: # noqa E722
+ clusters = frappe.get_all(
+ "Virtual Machine",
+ ["cluster", "cloud_provider", {"MAX": "index", "as": "max_index"}],
+ {
+ "status": ("not in", ("Terminated", "Draft")),
+ "cloud_provider": "AWS EC2",
+ },
+ group_by="cluster",
+ )
+ for cluster in clusters:
+ CHUNK_SIZE = 25 # Each call will pick up ~50 machines (2 x CHUNK_SIZE)
+ # Generate closed bounds for 25 indexes at a time
+ # (1, 25), (26, 50), (51, 75), ...
+ # We might have uneven chunks because of missing indexes
+ chunks = [(ii, ii + CHUNK_SIZE - 1) for ii in range(1, cluster.max_index, CHUNK_SIZE)]
+ for start, end in chunks:
+ # Pick a random machine
+ # TODO: This probably should be a method on the Cluster
+ machines = cls._get_active_machines_within_chunk_range(
+ cluster.cloud_provider, cluster.cluster, start, end
+ )
+ if not machines:
+ # There might not be any running machines in the chunk range
+ continue
+
+ frappe.enqueue_doc(
+ "Virtual Machine",
+ machines[0].name,
+ method="bulk_sync_aws_cluster",
+ start=start,
+ end=end,
+ queue="sync",
+ job_id=f"bulk_sync_aws:{cluster.cluster}:{start}-{end}",
+ deduplicate=True,
+ )
+
+ def bulk_sync_aws_cluster(self, start, end):
+ client = self.client()
+ machines = self.__class__._get_active_machines_within_chunk_range(
+ self.cloud_provider, self.cluster, start, end
+ )
+ instance_ids = [machine.instance_id for machine in machines]
+ response = client.describe_instances(Filters=[{"Name": "instance-id", "Values": instance_ids}])
+ for reservation in response["Reservations"]:
+ for instance in reservation["Instances"]:
+ machine: VirtualMachine = frappe.get_doc(
+ "Virtual Machine", {"instance_id": instance["InstanceId"]}
+ )
+ try:
+ machine.sync({"Reservations": [{"Instances": [instance]}]})
+ frappe.db.commit() # release lock
+ except Exception:
+ log_error("Virtual Machine Sync Error", virtual_machine=machine.name)
+ frappe.db.rollback()
+
+ @classmethod
+ def _get_active_machines_within_chunk_range(cls, provider, cluster, start, end):
+ return frappe.get_all(
+ "Virtual Machine",
+ fields=["name", "instance_id"],
+ filters=[
+ ["status", "not in", ("Terminated", "Draft")],
+ ["cloud_provider", "=", provider],
+ ["cluster", "=", cluster],
+ ["instance_id", "is", "set"],
+ ["index", ">=", start],
+ ["index", "<=", end],
+ ],
+ )
+
+ @classmethod
+ def bulk_sync_oci(cls):
+ for cluster in frappe.get_all("Cluster", {"cloud_provider": "OCI"}, pluck="name"):
+ # pick any random non-terminated machine from the cluster
+ machines = frappe.get_all(
+ "Virtual Machine",
+ filters={
+ "status": ("not in", ("Terminated", "Draft")),
+ "cloud_provider": "OCI",
+ "cluster": cluster,
+ "instance_id": ("is", "set"),
+ },
+ pluck="name",
+ limit=1,
+ )
+ if not machines:
+ continue
+ frappe.enqueue_doc(
+ "Virtual Machine",
+ machines[0],
+ method="bulk_sync_oci_cluster",
+ queue="sync",
+ job_id=f"bulk_sync_oci:{cluster}",
+ deduplicate=True,
+ cluster_name=cluster,
+ )
+
+ def bulk_sync_oci_cluster(self, cluster_name: str):
+ cluster: Cluster = frappe.get_doc("Cluster", cluster_name)
+ client: "ComputeClient" = self.client()
+
+ try:
+ response = oci_pagination.list_call_get_all_results(
+ client.list_instances, compartment_id=cluster.oci_tenancy
+ ).data
+
+ instance_ids = frappe.get_all(
+ "Virtual Machine",
+ filters={
+ "status": ("not in", ("Terminated", "Draft")),
+ "cloud_provider": "OCI",
+ "cluster": cluster.name,
+ "instance_id": ("is", "set"),
+ },
+ pluck="instance_id",
+ )
+ instance_ids = set(instance_ids)
+ # filter out non-existing instances
+ response = [instance for instance in response if instance.id in instance_ids]
+
+ # Split into batches
+ BATCH_SIZE = 15
+ for i in range(0, len(response), BATCH_SIZE):
+ frappe.enqueue_doc(
+ "Virtual Machine",
+ self.name,
+ method="bulk_sync_oci_cluster_in_batch",
+ queue="sync",
+ job_id=f"bulk_sync_oci_batch:{cluster.name}:{i}-{i + BATCH_SIZE}",
+ deduplicate=True,
+ enqueue_after_commit=True,
+ instances=response[i : i + BATCH_SIZE],
+ )
+ except Exception:
+ log_error("Virtual Machine OCI Bulk Sync Error", cluster=cluster.name)
+ frappe.db.rollback()
+
+ def bulk_sync_oci_cluster_in_batch(self, instances: list[frappe._dict]):
+ for instance in instances:
+ machine: VirtualMachine = frappe.get_doc("Virtual Machine", {"instance_id": instance.id})
+ if has_job_timeout_exceeded():
+ return
+ try:
+ machine.sync(instance=instance)
+ frappe.db.commit() # release lock
+ except rq.timeouts.JobTimeoutException:
+ return
+ except Exception:
+ log_error("Virtual Machine Sync Error", virtual_machine=machine.name)
+ frappe.db.rollback()
+
+ def disable_delete_on_termination_for_all_volumes(self):
+ attached_volumes = self.client().describe_instance_attribute(
+ InstanceId=self.instance_id, Attribute="blockDeviceMapping"
+ )
+
+ modified_volumes = []
+ for volume in attached_volumes["BlockDeviceMappings"]:
+ volume["Ebs"]["DeleteOnTermination"] = False
+ volume["Ebs"].pop("AttachTime", None)
+ volume["Ebs"].pop("Status", None)
+ modified_volumes.append(volume)
+
+ self.client().modify_instance_attribute(
+ InstanceId=self.instance_id, BlockDeviceMappings=modified_volumes
+ )
+
+ def _create_vmm(self, virtual_machine_image: str, machine_type: str) -> VirtualMachineMigration:
+ return frappe.new_doc(
+ "Virtual Machine Migration",
+ virtual_machine=self.name,
+ virtual_machine_image=virtual_machine_image,
+ machine_type=machine_type,
+ ).insert()
+
+ @frappe.whitelist()
+ def convert_to_arm(self, virtual_machine_image, machine_type):
+ if self.series == "f" and not self.ready_for_conversion:
+ frappe.throw("Please complete pre-migration steps before migrating", frappe.ValidationError)
+
+ return self._create_vmm(virtual_machine_image, machine_type)
+
+ @frappe.whitelist()
+ def convert_to_amd(self, virtual_machine_image, machine_type):
+ return self._create_vmm(virtual_machine_image, machine_type)
+
+ def attach_new_volume_aws_oci(self, size, iops=None, throughput=None, log_activity: bool = True):
+ volume_options = {
+ "AvailabilityZone": self.availability_zone,
+ "Size": size,
+ "VolumeType": "gp3",
+ "TagSpecifications": [
+ {
+ "ResourceType": "volume",
+ "Tags": [{"Key": "Name", "Value": f"Frappe Cloud - {self.name}"}],
+ },
+ ],
+ }
+ if iops:
+ volume_options["Iops"] = iops
+ if throughput:
+ volume_options["Throughput"] = throughput
+
+ if self.kms_key_id:
+ volume_options["Encrypted"] = True
+ volume_options["KmsKeyId"] = self.kms_key_id
+ volume_id = self.client().create_volume(**volume_options)["VolumeId"]
+ self.wait_for_volume_to_be_available(volume_id)
+ self.attach_volume(volume_id)
+
+ if log_activity and (server := self.get_server()):
+ log_server_activity(
+ self.series,
+ server.name,
+ action="Volume",
+ reason="Volume attached on server",
+ )
+
+ return volume_id
+
+ def attach_new_volume_hetzner(self, size, iops=None, throughput=None, log_activity: bool = True):
+ _ = iops
+ _ = throughput
+
+ volume_create_request = self.client().volumes.create(
+ size=size,
+ name=f"{self.name}-vol-{frappe.generate_hash(length=8)}",
+ format="ext4",
+ automount=False,
+ server=self.get_hetzner_server_instance(fetch_data=False),
+ )
+ volume_create_request.action.wait_until_finished(HETZNER_ACTION_RETRIES)
+ for action in volume_create_request.next_actions:
+ action.wait_until_finished(HETZNER_ACTION_RETRIES)
+
+ if log_activity and (server := self.get_server()):
+ log_server_activity(
+ self.series,
+ server.name,
+ action="Volume",
+ reason="Volume attached on server",
+ )
+
+ return volume_create_request.volume.id
+
+ @frappe.whitelist()
+ def attach_new_volume(self, size, iops=None, throughput=None, log_activity: bool = True):
+ if self.cloud_provider in ["AWS EC2", "OCI"]:
+ return self.attach_new_volume_aws_oci(size, iops, throughput, log_activity)
+
+ if self.cloud_provider == "Hetzner":
+ return self.attach_new_volume_hetzner(size=size, log_activity=log_activity)
+ return None
+ def wait_for_volume_to_be_available(self, volume_id):
+ # AWS EC2 specific
+ while self.get_state_of_volume(volume_id) != "available":
+ time.sleep(1)
+
+ def get_state_of_volume(self, volume_id):
+ if self.cloud_provider != "AWS EC2":
+ raise NotImplementedError
+ try:
+ # AWS EC2 specific
+ # https://docs.aws.amazon.com/ebs/latest/userguide/ebs-describing-volumes.html
+ return self.client().describe_volumes(VolumeIds=[volume_id])["Volumes"][0]["State"]
+ except botocore.exceptions.ClientError as e:
+ if e.response.get("Error", {}).get("Code") == "InvalidVolume.NotFound":
+ return "deleted"
+
+ def get_volume_modifications(self, volume_id):
+ if self.cloud_provider != "AWS EC2":
+ raise NotImplementedError
+
+ # AWS EC2 specific https://docs.aws.amazon.com/ebs/latest/userguide/monitoring-volume-modifications.html
+
+ try:
+ return self.client().describe_volumes_modifications(VolumeIds=[volume_id])[
+ "VolumesModifications"
+ ][0]
+ except botocore.exceptions.ClientError as e:
+ if e.response.get("Error", {}).get("Code") == "InvalidVolumeModification.NotFound":
+ return None
+
+ def get_digital_ocean_server_instance(self):
+ """Get digital ocean droplet instance"""
+ if self.cloud_provider != "DigitalOcean":
+ raise NotImplementedError
+
+ return self.client().droplets.get(self.instance_id)
+
+ def get_hetzner_server_instance(self, fetch_data=True):
+ if self.cloud_provider != "Hetzner":
+ raise NotImplementedError
+
+ if fetch_data:
+ return self.client().servers.get_by_id(self.instance_id)
+
+ from hcloud.servers.domain import Server as HetznerServer
+
+ return HetznerServer(cint(self.instance_id))
+
+ @frappe.whitelist()
+ def attach_volume(self, volume_id, is_temporary_volume: bool = False):
+ """
+ temporary_volumes: If you are attaching a volume to an instance just for temporary use, then set this to True.
+
+ Then, snapshot and other stuff will be ignored for this volume.
+ """
+ if self.cloud_provider == "AWS EC2":
+ # Attach a volume to the instance and return the device name
+ device_name = self.get_next_volume_device_name()
+ self.client().attach_volume(
+ Device=device_name,
+ InstanceId=self.instance_id,
+ VolumeId=volume_id,
+ )
+
+ elif self.cloud_provider == "Hetzner":
+ volume = self.client().volumes.get_by_id(int(volume_id))
+
+ """
+ This is a temporary assignment of linux_device from Hetzner API to
+ device_name. linux_device is actually the mountpoint of the volume.
+ Example: linux_device = /mnt/HC_Volume_103061048
+ """
+ device_name = volume.linux_device
+ for action in volume.get_actions():
+ action.wait_until_finished(HETZNER_ACTION_RETRIES) # wait for previous actions to finish
+
+ self.client().volumes.attach(
+ volume, self.get_hetzner_server_instance(fetch_data=False), automount=False
+ )
+ else:
+ raise NotImplementedError
+
+ if is_temporary_volume:
+ # add the volume to the list of temporary volumes
+ self.append("temporary_volumes", {"device": device_name})
+
+ self.save()
+ self.sync()
+ return device_name
+
+ def get_next_volume_device_name(self):
+ # Hold the lock, so that we dont allocate same device name to multiple volumes
+ frappe.db.get_value(self.doctype, self.name, "status", for_update=True)
+ # First volume starts from /dev/sdf
+ used_devices = {v.device for v in self.volumes} | {v.device for v in self.temporary_volumes}
+ for i in range(5, 26): # 'f' to 'z'
+ device_name = f"/dev/sd{chr(ord('a') + i)}"
+ if device_name not in used_devices:
+ return device_name
+ frappe.throw("No device name available for new volume")
+ return None
+
+ @frappe.whitelist()
+ def detach(self, volume_id, sync: bool | None = None):
+ if self.cloud_provider == "AWS EC2":
+ volume = find(self.volumes, lambda v: v.volume_id == volume_id)
+ if not volume:
+ return False
+ self.client().detach_volume(
+ Device=volume.device, InstanceId=self.instance_id, VolumeId=volume.volume_id
+ )
+ elif self.cloud_provider == "OCI":
+ raise NotImplementedError
+ elif self.cloud_provider == "Hetzner":
+ from hcloud.volumes.domain import Volume
+
+ if volume_id == HETZNER_ROOT_DISK_ID:
+ frappe.throw("Cannot detach hetzner root disk.")
+
+ self.client().volumes.detach(Volume(id=volume_id)).wait_until_finished(HETZNER_ACTION_RETRIES)
+ if sync:
+ self.sync()
+ return True
+
+ @frappe.whitelist()
+ def delete_volume(self, volume_id, sync: bool | None = None): # noqa: C901
+ if sync is None:
+ sync = True
+
+ if self.detach(volume_id, sync=sync):
+ if self.cloud_provider == "AWS EC2":
+ self.wait_for_volume_to_be_available(volume_id)
+ self.client().delete_volume(VolumeId=volume_id)
+ self.add_comment("Comment", f"Volume Deleted - {volume_id}")
+ if self.cloud_provider == "OCI":
+ raise NotImplementedError
+ if self.cloud_provider == "Hetzner":
+ if volume_id == HETZNER_ROOT_DISK_ID:
+ frappe.throw("Cannot delete hetzner root disk.")
+
+ from hcloud.volumes.domain import Volume
+
+ self.client().volumes.delete(Volume(id=cint(volume_id)))
+
+ if self.cloud_provider == "DigitalOcean":
+ if volume_id == DIGITALOCEAN_ROOT_DISK_ID:
+ frappe.throw("Cannot delete digitalocean root disk.")
+
+ self.client().volumes.delete(volume_id=volume_id)
+
+ if sync:
+ self.sync()
+
+ def detach_static_ip(self):
+ if self.cloud_provider != "AWS EC2" or not self.is_static_ip:
+ return
+
+ client = self.client()
+ response = client.describe_addresses(PublicIps=[self.public_ip_address])
+
+ address_info = response["Addresses"][0]
+ if "AssociationId" not in address_info:
+ return
+
+ client.disassociate_address(AssociationId=address_info["AssociationId"])
+ self.sync()
+
+ def attach_static_ip(self, static_ip):
+ if self.cloud_provider != "AWS EC2":
+ return
+
+ if self.is_static_ip:
+ frappe.throw("Virtual Machine already has a static IP associated.")
+
+ client = self.client()
+ response = client.describe_addresses(PublicIps=[static_ip])
+
+ address_info = response["Addresses"][0]
+ if "AssociationId" in address_info:
+ frappe.throw("Static IP is already associated with another instance.")
+
+ client.associate_address(AllocationId=address_info["AllocationId"], InstanceId=self.instance_id)
+ self.sync()
+
+
+get_permission_query_conditions = get_permission_query_conditions_for_doctype("Virtual Machine")
+
+
+def sync_virtual_machines_hetzner():
+ for machine in frappe.get_all(
+ "Virtual Machine",
+ {"status": ("not in", ("Draft", "Terminated")), "cloud_provider": "Hetzner"},
+ pluck="name",
+ ):
+ if has_job_timeout_exceeded():
+ return
+ try:
+ VirtualMachine("Virtual Machine", machine).sync()
+ frappe.db.commit() # release lock
+ except Exception:
+ log_error(title="Virtual Machine Sync Error", virtual_machine=machine)
+ frappe.db.rollback()
+
+
+@frappe.whitelist()
def sync_virtual_machines():
+ VirtualMachine.bulk_sync_aws()
+ VirtualMachine.bulk_sync_oci()
+ sync_virtual_machines_hetzner()
+
+
+def snapshot_oci_virtual_machines():
machines = frappe.get_all(
- "Virtual Machine", {"status": ("not in", ("Terminated", "Draft"))}
+ "Virtual Machine", {"status": "Running", "skip_automated_snapshot": 0, "cloud_provider": "OCI"}
)
for machine in machines:
- frappe.enqueue_doc("Virtual Machine", machine.name, "sync")
+ # Skip if a snapshot has already been created today
+ if frappe.get_all(
+ "Virtual Disk Snapshot",
+ {
+ "virtual_machine": machine.name,
+ "physical_backup": 0,
+ "rolling_snapshot": 0,
+ "creation": (">=", frappe.utils.today()),
+ },
+ limit=1,
+ ):
+ continue
+ try:
+ frappe.get_doc("Virtual Machine", machine.name).create_snapshots()
+ frappe.db.commit()
+ except Exception:
+ frappe.db.rollback()
+ log_error(title="Virtual Machine Snapshot Error", virtual_machine=machine.name)
-def snapshot_virtual_machines():
- machines = frappe.get_all("Virtual Machine", {"status": "Running"})
+def snapshot_hetzner_virtual_machines():
+ machines = frappe.get_all(
+ "Virtual Machine", {"status": "Running", "skip_automated_snapshot": 0, "cloud_provider": "Hetzner"}
+ )
for machine in machines:
+ # Skip if a snapshot has already been created today
+ if frappe.get_all(
+ "Virtual Disk Snapshot",
+ {
+ "virtual_machine": machine.name,
+ "physical_backup": 0,
+ "rolling_snapshot": 0,
+ "creation": (">=", frappe.utils.today()),
+ },
+ limit=1,
+ ):
+ continue
try:
frappe.get_doc("Virtual Machine", machine.name).create_snapshots()
frappe.db.commit()
except Exception:
frappe.db.rollback()
log_error(title="Virtual Machine Snapshot Error", virtual_machine=machine.name)
+
+
+def snapshot_aws_internal_virtual_machines():
+ machines = frappe.get_all(
+ "Virtual Machine",
+ {
+ "status": "Running",
+ "skip_automated_snapshot": 0,
+ "cloud_provider": "AWS EC2",
+ "series": ("not in", ["f", "m"]),
+ },
+ pluck="name",
+ )
+ server_snapshot_disabled_vms = frappe.get_all(
+ "Virtual Machine",
+ {
+ "status": "Running",
+ "skip_automated_snapshot": 0,
+ "cloud_provider": "AWS EC2",
+ "disable_server_snapshot": 1,
+ "series": ("in", ["f", "m"]),
+ },
+ pluck="name",
+ )
+ machines.extend(server_snapshot_disabled_vms)
+
+ for machine in machines:
+ # Skip if a snapshot has already been created today
+ if frappe.get_all(
+ "Virtual Disk Snapshot",
+ {
+ "virtual_machine": machine,
+ "physical_backup": 0,
+ "rolling_snapshot": 0,
+ "creation": (">=", frappe.utils.today()),
+ },
+ limit=1,
+ ):
+ continue
+ try:
+ frappe.get_doc("Virtual Machine", machine).create_snapshots()
+ frappe.db.commit()
+ except Exception:
+ frappe.db.rollback()
+ log_error(title="Virtual Machine Snapshot Error", virtual_machine=machine)
+
+
+def snapshot_aws_servers():
+ servers_with_snapshot = frappe.get_all(
+ "Server Snapshot",
+ {
+ "status": ["in", ["Pending", "Processing", "Completed"]],
+ "consistent": 0,
+ "free": 1,
+ "creation": (">=", frappe.utils.today()),
+ },
+ pluck="app_server",
+ )
+ vms_with_snapshot = frappe.get_all(
+ "Server", {"name": ("in", servers_with_snapshot)}, pluck="virtual_machine"
+ )
+ machines = frappe.get_all(
+ "Virtual Machine",
+ {
+ "name": ("not in", vms_with_snapshot),
+ "status": "Running",
+ "skip_automated_snapshot": 0,
+ "cloud_provider": "AWS EC2",
+ "series": "f",
+ "disable_server_snapshot": 0,
+ },
+ order_by="RAND()",
+ pluck="name",
+ limit_page_length=50,
+ )
+ for machine in machines:
+ if has_job_timeout_exceeded():
+ return
+ app_server = frappe.get_value("Server", {"virtual_machine": machine}, "name")
+ try:
+ server: "Server" = frappe.get_doc("Server", app_server)
+ servers = [
+ ["Server", server.name],
+ ["Database Server", server.database_server],
+ ]
+ # Check if any press job is running on the server or the db server
+ is_press_job_running = False
+ for server_type, name in servers:
+ if (
+ frappe.db.count(
+ "Press Job",
+ filters={
+ "status": ("in", ["Pending", "Running"]),
+ "server_type": server_type,
+ "server": name,
+ },
+ )
+ > 0
+ ):
+ is_press_job_running = True
+ break
+
+ # Also skip if the server was created within last 1 hour
+ # to avoid snapshotting a blank server which is still being setup
+ if is_press_job_running or server.creation > frappe.utils.add_to_date(None, hours=-1):
+ continue
+
+ server._create_snapshot(consistent=False, expire_at=frappe.utils.add_days(None, 2), free=True)
+ frappe.db.commit()
+ except Exception:
+ frappe.db.rollback()
+ log_error(title="Server Snapshot Error", virtual_machine=machine)
+
+
+def rolling_snapshot_database_server_virtual_machines():
+ # For now, let's keep it specific to database servers having physical backup enabled
+ virtual_machines = frappe.get_all(
+ "Database Server",
+ filters={
+ "status": "Active",
+ "enable_physical_backup": 1,
+ },
+ pluck="name",
+ )
+
+ # Find out virtual machines with snapshot explicitly skipped
+ ignorable_virtual_machines = set(
+ frappe.get_all("Virtual Machine", {"skip_automated_snapshot": 1}, pluck="name")
+ )
+
+ start_time = time.time()
+ for virtual_machine_name in virtual_machines:
+ if has_job_timeout_exceeded():
+ return
+
+ # Don't spend more than 10 minutes in snapshotting
+ if time.time() - start_time > 900:
+ break
+
+ if virtual_machine_name in ignorable_virtual_machines:
+ continue
+
+ # Skip if a valid snapshot has already existed within last 2 hours
+ if frappe.get_all(
+ "Virtual Disk Snapshot",
+ {
+ "status": [
+ "in",
+ ["Pending", "Completed"],
+ ],
+ "virtual_machine": virtual_machine_name,
+ "physical_backup": 0,
+ "rolling_snapshot": 1,
+ "creation": (">=", frappe.utils.add_to_date(None, hours=-2)),
+ },
+ limit=1,
+ ):
+ continue
+
+ try:
+ # Also, if vm has multiple volumes, then exclude boot volume
+ frappe.get_doc("Virtual Machine", virtual_machine_name).create_snapshots(
+ exclude_boot_volume=True, rolling_snapshot=True
+ )
+ frappe.db.commit()
+ except Exception:
+ frappe.db.rollback()
+
+
+AWS_SERIAL_CONSOLE_ENDPOINT_MAP = {
+ "us-east-2": {
+ "endpoint": "serial-console.ec2-instance-connect.us-east-2.aws",
+ "fingerprint": "SHA256:EhwPkTzRtTY7TRSzz26XbB0/HvV9jRM7mCZN0xw/d/0",
+ },
+ "us-east-1": {
+ "endpoint": "serial-console.ec2-instance-connect.us-east-1.aws",
+ "fingerprint": "SHA256:dXwn5ma/xadVMeBZGEru5l2gx+yI5LDiJaLUcz0FMmw",
+ },
+ "us-west-1": {
+ "endpoint": "serial-console.ec2-instance-connect.us-west-1.aws",
+ "fingerprint": "SHA256:OHldlcMET8u7QLSX3jmRTRAPFHVtqbyoLZBMUCqiH3Y",
+ },
+ "us-west-2": {
+ "endpoint": "serial-console.ec2-instance-connect.us-west-2.aws",
+ "fingerprint": "SHA256:EMCIe23TqKaBI6yGHainqZcMwqNkDhhAVHa1O2JxVUc",
+ },
+ "af-south-1": {
+ "endpoint": "ec2-serial-console.af-south-1.api.aws",
+ "fingerprint": "SHA256:RMWWZ2fVePeJUqzjO5jL2KIgXsczoHlz21Ed00biiWI",
+ },
+ "ap-east-1": {
+ "endpoint": "ec2-serial-console.ap-east-1.api.aws",
+ "fingerprint": "SHA256:T0Q1lpiXxChoZHplnAkjbP7tkm2xXViC9bJFsjYnifk",
+ },
+ "ap-south-2": {
+ "endpoint": "ec2-serial-console.ap-south-2.api.aws",
+ "fingerprint": "SHA256:WJgPBSwV4/shN+OPITValoewAuYj15DVW845JEhDKRs",
+ },
+ "ap-southeast-3": {
+ "endpoint": "ec2-serial-console.ap-southeast-3.api.aws",
+ "fingerprint": "SHA256:5ZwgrCh+lfns32XITqL/4O0zIfbx4bZgsYFqy3o8mIk",
+ },
+ "ap-southeast-4": {
+ "endpoint": "ec2-serial-console.ap-southeast-4.api.aws",
+ "fingerprint": "SHA256:Avaq27hFgLvjn5gTSShZ0oV7h90p0GG46wfOeT6ZJvM",
+ },
+ "ap-south-1": {
+ "endpoint": "serial-console.ec2-instance-connect.ap-south-1.aws",
+ "fingerprint": "SHA256:oBLXcYmklqHHEbliARxEgH8IsO51rezTPiSM35BsU40",
+ },
+ "ap-northeast-3": {
+ "endpoint": "ec2-serial-console.ap-northeast-3.api.aws",
+ "fingerprint": "SHA256:Am0/jiBKBnBuFnHr9aXsgEV3G8Tu/vVHFXE/3UcyjsQ",
+ },
+ "ap-northeast-2": {
+ "endpoint": "serial-console.ec2-instance-connect.ap-northeast-2.aws",
+ "fingerprint": "SHA256:FoqWXNX+DZ++GuNTztg9PK49WYMqBX+FrcZM2dSrqrI",
+ },
+ "ap-southeast-1": {
+ "endpoint": "serial-console.ec2-instance-connect.ap-southeast-1.aws",
+ "fingerprint": "SHA256:PLFNn7WnCQDHx3qmwLu1Gy/O8TUX7LQgZuaC6L45CoY",
+ },
+ "ap-southeast-2": {
+ "endpoint": "serial-console.ec2-instance-connect.ap-southeast-2.aws",
+ "fingerprint": "SHA256:yFvMwUK9lEUQjQTRoXXzuN+cW9/VSe9W984Cf5Tgzo4",
+ },
+ "ap-northeast-1": {
+ "endpoint": "serial-console.ec2-instance-connect.ap-northeast-2.aws",
+ "fingerprint": "SHA256:RQfsDCZTOfQawewTRDV1t9Em/HMrFQe+CRlIOT5um4k",
+ },
+ "ca-central-1": {
+ "endpoint": "serial-console.ec2-instance-connect.ca-central-1.aws",
+ "fingerprint": "SHA256:P2O2jOZwmpMwkpO6YW738FIOTHdUTyEv2gczYMMO7s4",
+ },
+ "cn-north-1": {
+ "endpoint": "ec2-serial-console.cn-north-1.api.amazonwebservices.com.cn",
+ "fingerprint": "SHA256:2gHVFy4H7uU3+WaFUxD28v/ggMeqjvSlgngpgLgGT+Y",
+ },
+ "cn-northwest-1": {
+ "endpoint": "ec2-serial-console.cn-northwest-1.api.amazonwebservices.com.cn",
+ "fingerprint": "SHA256:TdgrNZkiQOdVfYEBUhO4SzUA09VWI5rYOZGTogpwmiM",
+ },
+ "eu-central-1": {
+ "endpoint": "serial-console.ec2-instance-connect.eu-central-1.aws",
+ "fingerprint": "SHA256:aCMFS/yIcOdOlkXvOl8AmZ1Toe+bBnrJJ3Fy0k0De2c",
+ },
+ "eu-west-1": {
+ "endpoint": "serial-console.ec2-instance-connect.eu-west-1.aws",
+ "fingerprint": "SHA256:h2AaGAWO4Hathhtm6ezs3Bj7udgUxi2qTrHjZAwCW6E",
+ },
+ "eu-west-2": {
+ "endpoint": "serial-console.ec2-instance-connect.eu-west-2.aws",
+ "fingerprint": "SHA256:a69rd5CE/AEG4Amm53I6lkD1ZPvS/BCV3tTPW2RnJg8",
+ },
+ "eu-south-1": {
+ "endpoint": "ec2-serial-console.eu-south-1.api.aws",
+ "fingerprint": "SHA256:lC0kOVJnpgFyBVrxn0A7n99ecLbXSX95cuuS7X7QK30",
+ },
+ "eu-west-3": {
+ "endpoint": "serial-console.ec2-instance-connect.eu-west-3.aws",
+ "fingerprint": "SHA256:q8ldnAf9pymeNe8BnFVngY3RPAr/kxswJUzfrlxeEWs",
+ },
+ "eu-south-2": {
+ "endpoint": "ec2-serial-console.eu-south-2.api.aws",
+ "fingerprint": "SHA256:GoCW2DFRlu669QNxqFxEcsR6fZUz/4F4n7T45ZcwoEc",
+ },
+ "eu-north-1": {
+ "endpoint": "serial-console.ec2-instance-connect.eu-north-1.aws",
+ "fingerprint": "SHA256:tkGFFUVUDvocDiGSS3Cu8Gdl6w2uI32EPNpKFKLwX84",
+ },
+ "eu-central-2": {
+ "endpoint": "ec2-serial-console.eu-central-2.api.aws",
+ "fingerprint": "SHA256:8Ppx2mBMf6WdCw0NUlzKfwM4/IfRz4OaXFutQXWp6mk",
+ },
+ "me-south-1": {
+ "endpoint": "ec2-serial-console.me-south-1.api.aws",
+ "fingerprint": "SHA256:nPjLLKHu2QnLdUq2kVArsoK5xvPJOMRJKCBzCDqC3k8",
+ },
+ "me-central-1": {
+ "endpoint": "ec2-serial-console.me-central-1.api.aws",
+ "fingerprint": "SHA256:zpb5duKiBZ+l0dFwPeyykB4MPBYhI/XzXNeFSDKBvLE",
+ },
+ "sa-east-1": {
+ "endpoint": "ec2-serial-console.sa-east-1.api.aws",
+ "fingerprint": "SHA256:rd2+/32Ognjew1yVIemENaQzC+Botbih62OqAPDq1dI",
+ },
+ "us-gov-east-1": {
+ "endpoint": "serial-console.ec2-instance-connect.us-gov-east-1.amazonaws.com",
+ "fingerprint": "SHA256:tIwe19GWsoyLClrtvu38YEEh+DHIkqnDcZnmtebvF28",
+ },
+ "us-gov-west-1": {
+ "endpoint": "serial-console.ec2-instance-connect.us-gov-west-1.amazonaws.com",
+ "fingerprint": "SHA256:kfOFRWLaOZfB+utbd3bRf8OlPf8nGO2YZLqXZiIw5DQ",
+ },
+}
diff --git a/press/press/doctype/virtual_machine/virtual_machine_list.js b/press/press/doctype/virtual_machine/virtual_machine_list.js
new file mode 100644
index 00000000000..333f8f63110
--- /dev/null
+++ b/press/press/doctype/virtual_machine/virtual_machine_list.js
@@ -0,0 +1,13 @@
+frappe.listview_settings['Virtual Machine'] = {
+ onload: function (list) {
+ list.page.add_menu_item(__('Sync'), function () {
+ frappe.call({
+ method:
+ 'press.press.doctype.virtual_machine.virtual_machine.sync_virtual_machines',
+ callback: function () {
+ listview.refresh();
+ },
+ });
+ });
+ },
+};
diff --git a/press/press/doctype/virtual_machine_image/patches/rename_aws_fields.py b/press/press/doctype/virtual_machine_image/patches/rename_aws_fields.py
new file mode 100644
index 00000000000..751d2627738
--- /dev/null
+++ b/press/press/doctype/virtual_machine_image/patches/rename_aws_fields.py
@@ -0,0 +1,12 @@
+# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+
+import frappe
+from frappe.model.utils.rename_field import rename_field
+
+
+def execute():
+ frappe.reload_doctype("Virtual Machine Image")
+ rename_field("Virtual Machine Image", "aws_instance_id", "instance_id")
+ rename_field("Virtual Machine Image", "aws_ami_id", "image_id")
+ rename_field("Virtual Machine Image", "aws_snapshot_id", "snapshot_id")
diff --git a/press/press/doctype/virtual_machine_image/patches/set_root_size.py b/press/press/doctype/virtual_machine_image/patches/set_root_size.py
new file mode 100644
index 00000000000..d8855fe0037
--- /dev/null
+++ b/press/press/doctype/virtual_machine_image/patches/set_root_size.py
@@ -0,0 +1,32 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+from __future__ import annotations
+
+import frappe
+
+
+def execute():
+ # Set `root_size` to size`
+ frappe.db.sql("UPDATE `tabVirtual Machine Image` SET `root_size` = `size`")
+
+ # Set `disk_size` and `root_disk_size` on images with multiple volumes
+ multi_volume_images = frappe.db.sql(
+ """
+ SELECT image.name
+ FROM `tabVirtual Machine Image` image
+ LEFT JOIN `tabVirtual Machine Image Volume` volume
+ ON volume.parent = image.name
+ WHERE image.status = 'Available'
+ GROUP BY image.name
+ HAVING COUNT(volume.name) > 1
+ """,
+ as_dict=True,
+ )
+ for image_name in multi_volume_images:
+ image = frappe.get_doc("Virtual Machine Image", image_name)
+ image.has_data_volume = True
+ image.save()
+ size = image.get_data_volume().size
+ root_size = image.get_root_volume().size
+ frappe.db.set_value("Virtual Machine Image", image.name, "size", size)
+ frappe.db.set_value("Virtual Machine Image", image.name, "root_size", root_size)
diff --git a/press/press/doctype/virtual_machine_image/test_virtual_machine_image.py b/press/press/doctype/virtual_machine_image/test_virtual_machine_image.py
index 40464031b74..f28e766ad0a 100644
--- a/press/press/doctype/virtual_machine_image/test_virtual_machine_image.py
+++ b/press/press/doctype/virtual_machine_image/test_virtual_machine_image.py
@@ -1,24 +1,26 @@
# Copyright (c) 2022, Frappe and Contributors
# See license.txt
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from unittest.mock import MagicMock, patch
import frappe
from frappe.tests.utils import FrappeTestCase
-from press.press.doctype.cluster.cluster import Cluster
-from press.press.doctype.cluster.test_cluster import create_test_cluster
+from press.press.doctype.cluster.test_cluster import create_test_cluster
from press.press.doctype.virtual_machine_image.virtual_machine_image import (
VirtualMachineImage,
)
-from unittest.mock import patch, MagicMock
+if TYPE_CHECKING:
+ from press.press.doctype.cluster.cluster import Cluster
@patch.object(VirtualMachineImage, "client", new=MagicMock())
@patch.object(VirtualMachineImage, "after_insert", new=MagicMock())
def create_test_virtual_machine_image(
- ip: str = None,
- cluster: Cluster = None,
- series: str = "m",
+ ip: str | None = None, cluster: Cluster | None = None, series: str = "m", platform: str = "x86_64"
) -> VirtualMachineImage:
"""Create test Virtual Machine Image doc"""
if not ip:
@@ -29,13 +31,17 @@ def create_test_virtual_machine_image(
create_test_virtual_machine,
)
+ vm = create_test_virtual_machine(cluster=cluster, series=series, platform=platform)
+
return frappe.get_doc(
{
"doctype": "Virtual Machine Image",
- "virtual_machine": create_test_virtual_machine(cluster=cluster, series=series).name,
+ "virtual_machine": vm.name,
+ "region": vm.region,
"status": "Available",
- "aws_ami_id": "ami-1234567890",
+ "image_id": "ami-1234567890",
"mariadb_root_password": "password",
+ "platform": platform,
}
).insert(ignore_if_duplicate=True)
diff --git a/press/press/doctype/virtual_machine_image/virtual_machine_image.js b/press/press/doctype/virtual_machine_image/virtual_machine_image.js
index c4efd7aea57..c7547d1c6dc 100644
--- a/press/press/doctype/virtual_machine_image/virtual_machine_image.js
+++ b/press/press/doctype/virtual_machine_image/virtual_machine_image.js
@@ -28,10 +28,7 @@ frappe.ui.form.on('Virtual Machine Image', {
fieldname: 'cluster',
get_query: () => {
return {
- filters: [
- ['name', '!=', frm.doc.cluster],
- ['cloud_provider', '=', 'AWS EC2'],
- ],
+ filters: [['name', '!=', frm.doc.cluster]],
};
},
},
@@ -52,9 +49,9 @@ frappe.ui.form.on('Virtual Machine Image', {
},
__('Actions'),
);
- if (frm.doc.aws_ami_id) {
+ if (frm.doc.image_id) {
frm.add_web_link(
- `https://${frm.doc.region}.console.aws.amazon.com/ec2/v2/home?region=${frm.doc.region}#ImageDetails:imageId=${frm.doc.aws_ami_id}`,
+ `https://${frm.doc.region}.console.aws.amazon.com/ec2/v2/home?region=${frm.doc.region}#ImageDetails:imageId=${frm.doc.image_id}`,
__('Visit AWS Dashboard'),
);
}
diff --git a/press/press/doctype/virtual_machine_image/virtual_machine_image.json b/press/press/doctype/virtual_machine_image/virtual_machine_image.json
index b33f1670820..71e10709aae 100644
--- a/press/press/doctype/virtual_machine_image/virtual_machine_image.json
+++ b/press/press/doctype/virtual_machine_image/virtual_machine_image.json
@@ -7,19 +7,28 @@
"engine": "InnoDB",
"field_order": [
"virtual_machine",
- "aws_instance_id",
- "aws_ami_id",
- "aws_snapshot_id",
+ "instance_id",
+ "image_id",
+ "snapshot_id",
"status",
"size",
+ "root_size",
"column_break_5",
"cluster",
"region",
"platform",
+ "cloud_provider",
"series",
"copied_from",
+ "public",
+ "object_storage_uri",
"credentials_section",
- "mariadb_root_password"
+ "mariadb_root_password",
+ "section_break_acrc",
+ "has_data_volume",
+ "volumes",
+ "digital_ocean_action_section",
+ "action_id"
],
"fields": [
{
@@ -32,41 +41,20 @@
"set_only_once": 1
},
{
- "fetch_from": "virtual_machine.aws_instance_id",
- "fieldname": "aws_instance_id",
- "fieldtype": "Data",
- "in_list_view": 1,
- "label": "AWS Instance ID",
- "read_only": 1,
- "reqd": 1
- },
- {
- "fieldname": "aws_ami_id",
- "fieldtype": "Data",
- "label": "AWS AMI ID",
- "read_only": 1
- },
- {
+ "fetch_from": "virtual_machine.platform",
+ "fetch_if_empty": 1,
"fieldname": "platform",
"fieldtype": "Data",
"label": "Platform",
- "read_only": 1
- },
- {
- "fetch_from": "virtual_machine.cluster",
- "fetch_if_empty": 1,
- "fieldname": "cluster",
- "fieldtype": "Link",
- "label": "Cluster",
- "options": "Cluster",
- "read_only": 1,
- "reqd": 1
+ "reqd": 1,
+ "set_only_once": 1
},
{
"fetch_from": "cluster.region",
"fieldname": "region",
- "fieldtype": "Data",
+ "fieldtype": "Link",
"label": "Region",
+ "options": "Cloud Region",
"read_only": 1,
"reqd": 1
},
@@ -88,7 +76,7 @@
"fieldname": "series",
"fieldtype": "Select",
"label": "Series",
- "options": "n\nf\nm",
+ "options": "n\nf\nm\nc\np\ne\nr\nnfs\nfs\nu",
"read_only": 1,
"reqd": 1
},
@@ -109,23 +97,107 @@
"label": "MariaDB Root Password",
"read_only": 1
},
- {
- "fieldname": "aws_snapshot_id",
- "fieldtype": "Data",
- "label": "AWS Snapshot ID",
- "read_only": 1
- },
{
"fieldname": "copied_from",
"fieldtype": "Link",
"label": "Copied From",
"options": "Virtual Machine Image",
"read_only": 1
+ },
+ {
+ "fetch_from": "virtual_machine.cluster",
+ "fetch_if_empty": 1,
+ "fieldname": "cluster",
+ "fieldtype": "Link",
+ "in_standard_filter": 1,
+ "label": "Cluster",
+ "options": "Cluster",
+ "reqd": 1
+ },
+ {
+ "fetch_from": "virtual_machine.instance_id",
+ "fieldname": "instance_id",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Instance ID",
+ "read_only": 1,
+ "reqd": 1
+ },
+ {
+ "fieldname": "image_id",
+ "fieldtype": "Data",
+ "in_standard_filter": 1,
+ "label": "Image ID",
+ "read_only": 1
+ },
+ {
+ "fieldname": "snapshot_id",
+ "fieldtype": "Data",
+ "label": "Snapshot ID",
+ "read_only": 1
+ },
+ {
+ "default": "1",
+ "fieldname": "public",
+ "fieldtype": "Check",
+ "label": "Public"
+ },
+ {
+ "fieldname": "section_break_acrc",
+ "fieldtype": "Section Break",
+ "label": "Volumes"
+ },
+ {
+ "fieldname": "volumes",
+ "fieldtype": "Table",
+ "label": "Volumes",
+ "options": "Virtual Machine Image Volume",
+ "read_only": 1
+ },
+ {
+ "fieldname": "root_size",
+ "fieldtype": "Int",
+ "label": "Root Size",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fetch_from": "virtual_machine.has_data_volume",
+ "fieldname": "has_data_volume",
+ "fieldtype": "Check",
+ "label": "Has Data Volume",
+ "read_only": 1
+ },
+ {
+ "fieldname": "object_storage_uri",
+ "fieldtype": "Small Text",
+ "label": "Object Storage URI"
+ },
+ {
+ "fetch_from": "virtual_machine.cloud_provider",
+ "fetch_if_empty": 1,
+ "fieldname": "cloud_provider",
+ "fieldtype": "Data",
+ "label": "Cloud Provider",
+ "reqd": 1
+ },
+ {
+ "collapsible": 1,
+ "depends_on": "eval: doc.cloud_provider == \"DigitalOcean\"",
+ "fieldname": "digital_ocean_action_section",
+ "fieldtype": "Section Break",
+ "label": "Digital Ocean Action"
+ },
+ {
+ "fieldname": "action_id",
+ "fieldtype": "Data",
+ "label": "Action Id"
}
],
+ "grid_page_length": 50,
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2022-11-11 10:33:39.581634",
+ "modified": "2026-01-18 15:54:34.633890",
"modified_by": "Administrator",
"module": "Press",
"name": "Virtual Machine Image",
@@ -144,8 +216,11 @@
"write": 1
}
],
+ "row_format": "Dynamic",
+ "rows_threshold_for_grid_search": 20,
+ "show_title_field_in_link": 1,
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"title_field": "virtual_machine"
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/virtual_machine_image/virtual_machine_image.py b/press/press/doctype/virtual_machine_image/virtual_machine_image.py
index d88d1fd8537..680850ed0fd 100644
--- a/press/press/doctype/virtual_machine_image/virtual_machine_image.py
+++ b/press/press/doctype/virtual_machine_image/virtual_machine_image.py
@@ -1,88 +1,417 @@
# Copyright (c) 2022, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
-import frappe
-from frappe.model.document import Document
-from frappe.core.utils import find
+import math
import boto3
+import frappe
+import pydo
+from frappe.core.utils import find
+from frappe.model.document import Document
+from hcloud import APIException, Client
+from oci.core import ComputeClient
+from oci.core.models import (
+ CreateImageDetails,
+)
+from oci.core.models.image_source_via_object_storage_uri_details import (
+ ImageSourceViaObjectStorageUriDetails,
+)
+from tenacity import retry, stop_after_attempt, wait_fixed
+from tenacity.retry import retry_if_result
class VirtualMachineImage(Document):
- def after_insert(self):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.virtual_machine_image_volume.virtual_machine_image_volume import (
+ VirtualMachineImageVolume,
+ )
+
+ action_id: DF.Data | None
+ cloud_provider: DF.Data
+ cluster: DF.Link
+ copied_from: DF.Link | None
+ has_data_volume: DF.Check
+ image_id: DF.Data | None
+ instance_id: DF.Data
+ mariadb_root_password: DF.Password | None
+ object_storage_uri: DF.SmallText | None
+ platform: DF.Data
+ public: DF.Check
+ region: DF.Link
+ root_size: DF.Int
+ series: DF.Literal["n", "f", "m", "c", "p", "e", "r", "nfs", "fs", "u"]
+ size: DF.Int
+ snapshot_id: DF.Data | None
+ status: DF.Literal["Pending", "Available", "Unavailable"]
+ virtual_machine: DF.Link
+ volumes: DF.Table[VirtualMachineImageVolume]
+ # end: auto-generated types
+
+ DOCTYPE = "Virtual Machine Image"
+
+ def before_insert(self):
self.set_credentials()
+ if (
+ self.cloud_provider == "Hetzner" or self.cloud_provider == "DigitalOcean"
+ ) and self.has_data_volume:
+ frappe.throw("Hetzner Virtual Machine Images cannot have data volumes.")
+
+ if self.cloud_provider == "DigitalOcean":
+ snapshots = self.client.droplets.list_snapshots(self.instance_id)
+ if snapshots.get("snapshots", []):
+ frappe.throw(
+ "A snapshot already exists, please delete the existing snapshot before creating a new image."
+ )
+
+ def after_insert(self):
if self.copied_from:
self.create_image_from_copy()
else:
self.create_image()
def create_image(self):
- response = self.client.create_image(
- InstanceId=self.aws_instance_id,
- Name=f"Frappe Cloud {self.name} - {self.virtual_machine}",
- )
- self.aws_ami_id = response["ImageId"]
+ cluster = frappe.get_doc("Cluster", self.cluster)
+
+ if cluster.cloud_provider == "AWS EC2":
+ volumes = self.get_volumes_from_virtual_machine()
+ response = self.client.create_image(
+ InstanceId=self.instance_id,
+ Name=f"Frappe Cloud {self.name} - {self.virtual_machine}",
+ BlockDeviceMappings=volumes,
+ )
+ self.image_id = response["ImageId"]
+
+ elif cluster.cloud_provider == "OCI":
+ object_storage_details = {}
+ instance_details = {}
+ if self.object_storage_uri:
+ object_storage_details = {
+ "image_source_details": ImageSourceViaObjectStorageUriDetails(
+ source_uri=self.object_storage_uri
+ )
+ }
+ else:
+ instance_details = {
+ "instance_id": self.instance_id,
+ }
+ image = self.client.create_image(
+ CreateImageDetails(
+ compartment_id=cluster.oci_tenancy,
+ display_name=f"Frappe Cloud {self.name} - {self.virtual_machine}",
+ **instance_details,
+ **object_storage_details,
+ )
+ ).data
+ self.image_id = image.id
+
+ elif cluster.cloud_provider == "Hetzner":
+ from hcloud.servers.domain import Server
+
+ response = self.client.servers.create_image(
+ server=Server(id=self.instance_id),
+ description=f"Frappe Cloud VMI {self.name} - {self.virtual_machine} ",
+ labels={
+ "environment": "prod" if not frappe.conf.developer_mode else "local",
+ "instance-id": str(self.instance_id),
+ "virtual-machine": self.virtual_machine,
+ },
+ type="snapshot",
+ )
+ self.image_id = response.image.id
+ self.snapshot_id = response.image.id
+
+ elif cluster.cloud_provider == "DigitalOcean":
+ action = self.client.droplet_actions.post(
+ self.instance_id,
+ {"type": "snapshot", "name": f"Frappe Cloud {self.name} - {self.virtual_machine}"},
+ )
+ action = action["action"]
+ self.action_id = action["id"]
self.sync()
def create_image_from_copy(self):
- source = frappe.get_doc("Virtual Machine Image", self.copied_from)
- response = self.client.copy_image(
- Name=f"Frappe Cloud {self.name} - {self.virtual_machine}",
- SourceImageId=source.aws_ami_id,
- SourceRegion=source.region,
- )
- self.aws_ami_id = response["ImageId"]
- self.sync()
+ if self.cloud_provider == "AWS EC2":
+ source = frappe.get_doc("Virtual Machine Image", self.copied_from)
+ response = self.client.copy_image(
+ Name=f"Frappe Cloud {self.name} - {self.virtual_machine}",
+ SourceImageId=source.image_id,
+ SourceRegion=source.region,
+ )
+ self.image_id = response["ImageId"]
+ self.sync()
+
+ elif self.cloud_provider == "DigitalOcean":
+ action = self.client.image_actions.post(
+ self.image_id,
+ {"type": "transfer", "region": frappe.db.get_value("Cluster", self.cluster, "region")},
+ )
+ action = action["action"]
+ self.action_id = action["id"]
+ self.sync()
+
+ else:
+ raise NotImplementedError("Copying images is only supported for AWS EC2 and DigitalOcean.")
def set_credentials(self):
- if self.series == "m" and frappe.db.exists("Database Server", self.virtual_machine):
- self.mariadb_root_password = frappe.get_doc(
- "Database Server", self.virtual_machine
- ).get_password("mariadb_root_password")
+ if (self.series == "m" or self.series == "u") and frappe.db.exists(
+ "Database Server", self.virtual_machine
+ ):
+ self.mariadb_root_password = frappe.get_doc("Database Server", self.virtual_machine).get_password(
+ "mariadb_root_password"
+ )
@frappe.whitelist()
- def sync(self):
- images = self.client.describe_images(ImageIds=[self.aws_ami_id])["Images"]
- if images:
- image = images[0]
- self.status = self.get_status_map(image["State"])
- self.platform = image["Architecture"]
- volume = find(image["BlockDeviceMappings"], lambda x: "Ebs" in x.keys())
- if volume and "VolumeSize" in volume["Ebs"]:
- self.size = volume["Ebs"]["VolumeSize"]
- if volume and "SnapshotId" in volume["Ebs"]:
- self.aws_snapshot_id = volume["Ebs"]["SnapshotId"]
- else:
- self.status = "Unavailable"
+ def sync(self): # noqa: C901
+ cluster = frappe.get_doc("Cluster", self.cluster)
+ if cluster.cloud_provider == "AWS EC2":
+ images = self.client.describe_images(ImageIds=[self.image_id])["Images"]
+ if images:
+ image = images[0]
+ self.status = self.get_aws_status_map(image["State"])
+ self.platform = image["Architecture"]
+ volume = find(image["BlockDeviceMappings"], lambda x: "Ebs" in x)
+ # This information is not accurate for images created from multiple volumes
+ attached_snapshots = []
+ if volume and "SnapshotId" in volume["Ebs"]:
+ self.snapshot_id = volume["Ebs"]["SnapshotId"]
+ for volume in image["BlockDeviceMappings"]:
+ if "Ebs" not in volume:
+ # We don't care about non-EBS (instance store) volumes
+ continue
+ snapshot_id = volume["Ebs"].get("SnapshotId")
+ if not snapshot_id:
+ # We don't care about volumes without snapshots
+ continue
+ attached_snapshots.append(snapshot_id)
+ existing = find(self.volumes, lambda x: x.snapshot_id == snapshot_id)
+ device = volume["DeviceName"]
+ volume_type = volume["Ebs"]["VolumeType"]
+ size = volume["Ebs"]["VolumeSize"]
+ if existing:
+ existing.device = device
+ existing.volume_type = volume_type
+ existing.size = size
+ else:
+ self.append(
+ "volumes",
+ {
+ "snapshot_id": snapshot_id,
+ "device": device,
+ "volume_type": volume_type,
+ "size": size,
+ },
+ )
+ for volume in list(self.volumes):
+ if volume.snapshot_id not in attached_snapshots:
+ self.remove(volume)
+
+ self.size = self.get_data_volume().size
+ self.root_size = self.get_root_volume().size
+ else:
+ self.status = "Unavailable"
+ elif cluster.cloud_provider == "OCI":
+ image = self.client.get_image(self.image_id).data
+ self.status = self.get_oci_status_map(image.lifecycle_state)
+ if image.size_in_mbs:
+ self.size = image.size_in_mbs // 1024
+ elif cluster.cloud_provider == "Hetzner":
+ try:
+ image = self.client.images.get_by_id(self.image_id)
+ self.status = self.get_hetzner_status_map(image.status)
+ self.size = math.ceil(image.disk_size)
+ self.root_size = self.size
+ except APIException as e:
+ if e.code == "not_found":
+ self.status = "Unavailable"
+ else:
+ raise e
+ elif cluster.cloud_provider == "DigitalOcean":
+ if self.copied_from:
+ action_status = self.client.image_actions.get(
+ action_id=self.action_id,
+ image_id=frappe.db.get_value("Virtual Machine Image", self.copied_from, "image_id"),
+ )
+ else:
+ action_status = self.client.droplet_actions.get(
+ droplet_id=self.instance_id, action_id=self.action_id
+ )
+
+ status = action_status["action"]["status"]
+ self.status = self.get_digital_ocean_status_map(status)
+
+ if self.status == "Available":
+ if self.copied_from:
+ images = self.client.snapshots.get(action_status["action"]["resource_id"]).get("snapshot")
+ else:
+ virtual_machine_status = frappe.db.get_value(
+ "Virtual Machine", self.virtual_machine, "status"
+ )
+ if virtual_machine_status == "Terminated":
+ images = self.client.snapshots.get(self.image_id).get("snapshot")
+ else:
+ # We need this since the image ID might not be ready immediately after creation
+ images = self.client.droplets.list_snapshots(self.instance_id).get("snapshots")
+
+ image = images[0] if isinstance(images, list) else images
+ self.image_id = image["id"]
+ self.size = image["min_disk_size"]
+ self.root_size = image["min_disk_size"]
+
self.save()
+ return self.status
+
+ @retry(
+ retry=retry_if_result(lambda result: result != "Available"),
+ wait=wait_fixed(60),
+ stop=stop_after_attempt(10),
+ )
+ def wait_for_availability(self):
+ """Retries sync until the image is available"""
+ return self.sync()
@frappe.whitelist()
- def copy_image(self, cluster):
+ def copy_image(self, cluster: str):
image = frappe.copy_doc(self)
image.copied_from = self.name
image.cluster = cluster
- image.insert()
- return image.name
+ return image.insert()
@frappe.whitelist()
def delete_image(self):
- self.client.deregister_image(ImageId=self.aws_ami_id)
- if self.aws_snapshot_id:
- self.client.delete_snapshot(SnapshotId=self.aws_snapshot_id)
+ cluster = frappe.get_doc("Cluster", self.cluster)
+ if cluster.cloud_provider == "AWS EC2":
+ self.client.deregister_image(ImageId=self.image_id)
+ if self.snapshot_id:
+ self.client.delete_snapshot(SnapshotId=self.snapshot_id)
+ elif cluster.cloud_provider == "OCI":
+ self.client.delete_image(self.image_id)
+ elif cluster.cloud_provider == "Hetzner":
+ from hcloud.images.domain import Image
+
+ self.client.images.delete(Image(self.image_id))
self.sync()
- def get_status_map(self, status):
+ def get_aws_status_map(self, status):
return {
"pending": "Pending",
"available": "Available",
}.get(status, "Unavailable")
+ def get_hetzner_status_map(self, status):
+ return {
+ "creating": "Pending",
+ "available": "Available",
+ }.get(status, "Unavailable")
+
+ def get_digital_ocean_status_map(self, status: str):
+ return {
+ "in-progress": "Pending",
+ "completed": "Available",
+ }.get(status, "Unavailable")
+
+ def get_oci_status_map(self, status):
+ return {
+ "PROVISIONING": "Pending",
+ "IMPORTING": "Pending",
+ "AVAILABLE": "Available",
+ "EXPORTING": "Pending",
+ "DISABLED": "Unavailable",
+ "DELETED": "Unavailable",
+ }.get(status, "Unavailable")
+
+ def get_volumes_from_virtual_machine(self):
+ machine = frappe.get_doc("Virtual Machine", self.virtual_machine)
+ volumes = []
+ for volume in machine.volumes:
+ volumes.append(
+ {
+ "DeviceName": volume.device,
+ "Ebs": {
+ "DeleteOnTermination": True,
+ "VolumeSize": volume.size,
+ "VolumeType": volume.volume_type,
+ },
+ }
+ )
+ return volumes
+
@property
def client(self):
cluster = frappe.get_doc("Cluster", self.cluster)
- return boto3.client(
- "ec2",
- region_name=self.region,
- aws_access_key_id=cluster.aws_access_key_id,
- aws_secret_access_key=cluster.get_password("aws_secret_access_key"),
+ if cluster.cloud_provider == "AWS EC2":
+ return boto3.client(
+ "ec2",
+ region_name=self.region,
+ aws_access_key_id=cluster.aws_access_key_id,
+ aws_secret_access_key=cluster.get_password("aws_secret_access_key"),
+ )
+ if cluster.cloud_provider == "OCI":
+ return ComputeClient(cluster.get_oci_config())
+ if cluster.cloud_provider == "Hetzner":
+ api_token = cluster.get_password("hetzner_api_token")
+ return Client(token=api_token)
+ if cluster.cloud_provider == "DigitalOcean":
+ return pydo.Client(token=cluster.get_password("digital_ocean_api_token"))
+ return None
+
+ @classmethod
+ def get_available_for_series(
+ cls,
+ series: str,
+ region: str | None = None,
+ platform: str | None = None,
+ cloud_provider: str | None = None,
+ ) -> str | None:
+ images = frappe.qb.DocType(cls.DOCTYPE)
+ get_available_images = (
+ frappe.qb.from_(images)
+ .select(images.name)
+ .where(images.status == "Available")
+ .where(images.public == 1)
+ .where(
+ images.series == series,
+ )
+ .orderby(images.creation, order=frappe.qb.desc)
)
+ if region:
+ get_available_images = get_available_images.where(images.region == region)
+ if platform:
+ get_available_images = get_available_images.where(images.platform == platform)
+ if cloud_provider:
+ get_available_images = get_available_images.where(images.cloud_provider == cloud_provider)
+
+ available_images = get_available_images.run(as_dict=True)
+ if not available_images:
+ return None
+ return available_images[0].name
+
+ def get_root_volume(self):
+ # This only works for AWS
+ if len(self.volumes) == 1:
+ return self.volumes[0]
+
+ volume = find(self.volumes, lambda v: v.device == "/dev/sda1")
+ if volume:
+ return volume
+ return frappe._dict({"size": 0})
+
+ def get_data_volume(self):
+ if not self.has_data_volume:
+ return self.get_root_volume()
+
+ # This only works for AWS
+ if len(self.volumes) == 1:
+ return self.volumes[0]
+
+ volume = find(self.volumes, lambda v: v.device != "/dev/sda1")
+ if volume:
+ return volume
+ return frappe._dict({"size": 0})
diff --git a/press/press/doctype/virtual_machine_image_volume/__init__.py b/press/press/doctype/virtual_machine_image_volume/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/virtual_machine_image_volume/virtual_machine_image_volume.json b/press/press/doctype/virtual_machine_image_volume/virtual_machine_image_volume.json
new file mode 100644
index 00000000000..0f08f784e4c
--- /dev/null
+++ b/press/press/doctype/virtual_machine_image_volume/virtual_machine_image_volume.json
@@ -0,0 +1,63 @@
+{
+ "actions": [],
+ "autoname": "autoincrement",
+ "creation": "2024-09-25 14:07:29.049839",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "snapshot_id",
+ "device",
+ "column_break_ygbk",
+ "volume_type",
+ "size"
+ ],
+ "fields": [
+ {
+ "fieldname": "volume_type",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Volume Type",
+ "options": "gp3\ngp2",
+ "read_only": 1
+ },
+ {
+ "fieldname": "size",
+ "fieldtype": "Int",
+ "in_list_view": 1,
+ "label": "Size",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_ygbk",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "device",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Device",
+ "read_only": 1
+ },
+ {
+ "fieldname": "snapshot_id",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Snapshot ID",
+ "read_only": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2024-09-25 14:23:33.386098",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Virtual Machine Image Volume",
+ "naming_rule": "Autoincrement",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/virtual_machine_image_volume/virtual_machine_image_volume.py b/press/press/doctype/virtual_machine_image_volume/virtual_machine_image_volume.py
new file mode 100644
index 00000000000..5c4b6a4d55c
--- /dev/null
+++ b/press/press/doctype/virtual_machine_image_volume/virtual_machine_image_volume.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from __future__ import annotations
+
+from frappe.model.document import Document
+
+
+class VirtualMachineImageVolume(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ device: DF.Data | None
+ name: DF.Int | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ size: DF.Int
+ snapshot_id: DF.Data | None
+ volume_type: DF.Literal["gp3", "gp2"]
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/virtual_machine_temporary_volume/__init__.py b/press/press/doctype/virtual_machine_temporary_volume/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/virtual_machine_temporary_volume/virtual_machine_temporary_volume.json b/press/press/doctype/virtual_machine_temporary_volume/virtual_machine_temporary_volume.json
new file mode 100644
index 00000000000..52c8b2a2ab2
--- /dev/null
+++ b/press/press/doctype/virtual_machine_temporary_volume/virtual_machine_temporary_volume.json
@@ -0,0 +1,32 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-01-15 11:46:59.223545",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "device"
+ ],
+ "fields": [
+ {
+ "fieldname": "device",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Device",
+ "reqd": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-01-15 11:48:44.056676",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Virtual Machine Temporary Volume",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/press/doctype/virtual_machine_temporary_volume/virtual_machine_temporary_volume.py b/press/press/doctype/virtual_machine_temporary_volume/virtual_machine_temporary_volume.py
new file mode 100644
index 00000000000..c4928a222c0
--- /dev/null
+++ b/press/press/doctype/virtual_machine_temporary_volume/virtual_machine_temporary_volume.py
@@ -0,0 +1,23 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class VirtualMachineTemporaryVolume(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ device: DF.Data
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
+ pass
diff --git a/press/press/doctype/virtual_machine_volume/patches/rename_aws_fields.py b/press/press/doctype/virtual_machine_volume/patches/rename_aws_fields.py
new file mode 100644
index 00000000000..f2a42657da1
--- /dev/null
+++ b/press/press/doctype/virtual_machine_volume/patches/rename_aws_fields.py
@@ -0,0 +1,10 @@
+# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and Contributors
+# For license information, please see license.txt
+
+import frappe
+from frappe.model.utils.rename_field import rename_field
+
+
+def execute():
+ frappe.reload_doctype("Virtual Machine Volume")
+ rename_field("Virtual Machine Volume", "aws_volume_id", "volume_id")
diff --git a/press/press/doctype/virtual_machine_volume/virtual_machine_volume.json b/press/press/doctype/virtual_machine_volume/virtual_machine_volume.json
index 83bcbb3c296..a8efff46b0b 100644
--- a/press/press/doctype/virtual_machine_volume/virtual_machine_volume.json
+++ b/press/press/doctype/virtual_machine_volume/virtual_machine_volume.json
@@ -6,61 +6,123 @@
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
- "aws_volume_id",
+ "volume_id",
"volume_type",
"size",
+ "column_break_ygbk",
"iops",
- "throughput"
+ "throughput",
+ "last_updated_at",
+ "section_break_frlu",
+ "device",
+ "column_break_buwy",
+ "detach",
+ "delete_volume",
+ "increase_disk_size",
+ "update_ebs_performance"
],
"fields": [
{
+ "columns": 1,
"fieldname": "volume_type",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Volume Type",
"options": "gp3\ngp2",
- "read_only": 1
- },
- {
- "fieldname": "aws_volume_id",
- "fieldtype": "Data",
- "in_list_view": 1,
- "label": "AWS Volume ID",
- "read_only": 1
+ "read_only_depends_on": "eval: doc.volume_type"
},
{
+ "columns": 1,
"fieldname": "size",
"fieldtype": "Int",
"in_list_view": 1,
"label": "Size",
- "read_only": 1
+ "read_only_depends_on": "eval: doc.size"
},
{
+ "columns": 1,
"fieldname": "iops",
"fieldtype": "Int",
"in_list_view": 1,
"label": "IOPS",
- "read_only": 1
+ "read_only_depends_on": "eval: doc.iops"
},
{
+ "columns": 1,
"fieldname": "throughput",
"fieldtype": "Int",
"in_list_view": 1,
"label": "Throughput",
+ "read_only_depends_on": "eval: doc.throughput"
+ },
+ {
+ "columns": 4,
+ "fieldname": "volume_id",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Volume ID",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "last_updated_at",
+ "fieldtype": "Datetime",
+ "label": "Last updated at",
"read_only": 1
+ },
+ {
+ "fieldname": "column_break_ygbk",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "section_break_frlu",
+ "fieldtype": "Section Break"
+ },
+ {
+ "columns": 2,
+ "fieldname": "device",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Device",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_buwy",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "detach",
+ "fieldtype": "Button",
+ "label": "Detach"
+ },
+ {
+ "fieldname": "increase_disk_size",
+ "fieldtype": "Button",
+ "label": "Increase Disk Size"
+ },
+ {
+ "fieldname": "delete_volume",
+ "fieldtype": "Button",
+ "label": "Delete Volume"
+ },
+ {
+ "fieldname": "update_ebs_performance",
+ "fieldtype": "Button",
+ "label": "Update EBS Performance"
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
- "modified": "2022-06-28 22:17:27.201263",
+ "modified": "2025-10-16 13:11:45.102040",
"modified_by": "Administrator",
"module": "Press",
"name": "Virtual Machine Volume",
"naming_rule": "Autoincrement",
"owner": "Administrator",
"permissions": [],
+ "row_format": "Dynamic",
"sort_field": "modified",
"sort_order": "DESC",
"states": []
-}
\ No newline at end of file
+}
diff --git a/press/press/doctype/virtual_machine_volume/virtual_machine_volume.py b/press/press/doctype/virtual_machine_volume/virtual_machine_volume.py
index 28bff28a818..5135e784bdc 100644
--- a/press/press/doctype/virtual_machine_volume/virtual_machine_volume.py
+++ b/press/press/doctype/virtual_machine_volume/virtual_machine_volume.py
@@ -2,8 +2,31 @@
# For license information, please see license.txt
# import frappe
+from __future__ import annotations
+
from frappe.model.document import Document
class VirtualMachineVolume(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ device: DF.Data | None
+ iops: DF.Int
+ last_updated_at: DF.Datetime | None
+ name: DF.Int | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ size: DF.Int
+ throughput: DF.Int
+ volume_id: DF.Data | None
+ volume_type: DF.Literal["gp3", "gp2"]
+ # end: auto-generated types
+
pass
diff --git a/press/press/doctype/wireguard_peer/__init__.py b/press/press/doctype/wireguard_peer/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/doctype/wireguard_peer/templates/wg0.conf b/press/press/doctype/wireguard_peer/templates/wg0.conf
new file mode 100644
index 00000000000..4606f2ca7a7
--- /dev/null
+++ b/press/press/doctype/wireguard_peer/templates/wg0.conf
@@ -0,0 +1,10 @@
+[Interface]
+Address = {{ wireguard_network }}
+PrivateKey = {{ wireguard_private_key }}
+{% if peers %}{% for peer in (peers) %}
+[Peer]
+PublicKey = {{ peer.public_key }}
+Endpoint = {{ peer.endpoint }}
+AllowedIPs = {{ peer.allowed_ips}}
+PersistentKeepalive = 25
+{% endfor %}{% endif %}
\ No newline at end of file
diff --git a/press/press/doctype/wireguard_peer/test_wireguard_peer.py b/press/press/doctype/wireguard_peer/test_wireguard_peer.py
new file mode 100644
index 00000000000..94bb9608def
--- /dev/null
+++ b/press/press/doctype/wireguard_peer/test_wireguard_peer.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2023, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestWireguardPeer(FrappeTestCase):
+ pass
diff --git a/press/press/doctype/wireguard_peer/wireguard_peer.js b/press/press/doctype/wireguard_peer/wireguard_peer.js
new file mode 100644
index 00000000000..018be50b579
--- /dev/null
+++ b/press/press/doctype/wireguard_peer/wireguard_peer.js
@@ -0,0 +1,48 @@
+// Copyright (c) 2023, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Wireguard Peer', {
+ refresh: function (frm) {
+ frm.add_fetch('server_name', 'ip', 'ip');
+ frm.add_fetch('server_name', 'private_ip', 'private_ip');
+ frm.add_fetch('server_name', 'title', 'peer_name');
+
+ [
+ [__('Setup Wireguard'), 'setup_wireguard', false],
+ [__('Ping Peer'), 'ping_peer', false],
+ [__('Fetch Private Network'), 'fetch_peer_private_network', false],
+ [__('Generate Config'), 'generate_config', false],
+ [__('Generate QR'), 'generate_qr_code', false],
+ ].forEach(([label, method, confirm, condition]) => {
+ if (typeof condition === 'undefined' || condition) {
+ frm.add_custom_button(
+ label,
+ () => {
+ if (confirm) {
+ frappe.confirm(
+ `Are you sure you want to ${label.toLowerCase()}?`,
+ () =>
+ frm.call(method).then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ } else {
+ frm.refresh();
+ }
+ }),
+ );
+ } else {
+ frm.call(method).then((r) => {
+ if (r.message) {
+ frappe.msgprint(r.message);
+ } else {
+ frm.refresh();
+ }
+ });
+ }
+ },
+ __('Actions'),
+ );
+ }
+ });
+ },
+});
diff --git a/press/press/doctype/wireguard_peer/wireguard_peer.json b/press/press/doctype/wireguard_peer/wireguard_peer.json
new file mode 100644
index 00000000000..cb311afa609
--- /dev/null
+++ b/press/press/doctype/wireguard_peer/wireguard_peer.json
@@ -0,0 +1,178 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2023-08-07 17:54:33.377864",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "server_section",
+ "server_type",
+ "server_name",
+ "peer_name",
+ "upstream_proxy",
+ "column_break_bsvs",
+ "status",
+ "ip",
+ "private_ip",
+ "peer_private_network",
+ "wireguard_section",
+ "wireguard_network",
+ "peer_ip",
+ "allowed_ips",
+ "column_break_aqot",
+ "is_wireguard_setup",
+ "private_key",
+ "public_key",
+ "config_tab",
+ "peer_config"
+ ],
+ "fields": [
+ {
+ "fieldname": "peer_name",
+ "fieldtype": "Data",
+ "label": "Peer Name",
+ "reqd": 1,
+ "unique": 1
+ },
+ {
+ "default": "Active",
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "label": "Status",
+ "options": "Active\nBroken\nArchived"
+ },
+ {
+ "fieldname": "peer_ip",
+ "fieldtype": "Data",
+ "label": "Peer IP",
+ "read_only": 1,
+ "unique": 1
+ },
+ {
+ "fieldname": "column_break_bsvs",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "private_key",
+ "fieldtype": "Password",
+ "label": "Private Key"
+ },
+ {
+ "fieldname": "public_key",
+ "fieldtype": "Data",
+ "label": "Public Key"
+ },
+ {
+ "description": "Comma Seperated CIDR blocks. EG: 10.122.0.0/20,10.7.0.1/32",
+ "fieldname": "allowed_ips",
+ "fieldtype": "Data",
+ "label": "Allowed IPs",
+ "read_only": 1
+ },
+ {
+ "fieldname": "peer_private_network",
+ "fieldtype": "Data",
+ "label": "Peer Private Network"
+ },
+ {
+ "fieldname": "upstream_proxy",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Upstream Proxy",
+ "mandatory_depends_on": "eval:doc.setup_wireguard",
+ "options": "Proxy Server",
+ "reqd": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "is_wireguard_setup",
+ "fieldtype": "Check",
+ "label": "Is Wireguard Setup"
+ },
+ {
+ "fieldname": "private_ip",
+ "fieldtype": "Data",
+ "label": "Private IP"
+ },
+ {
+ "fetch_from": "upstream_proxy.wireguard_network",
+ "fieldname": "wireguard_network",
+ "fieldtype": "Data",
+ "label": "Wireguard Network",
+ "read_only": 1
+ },
+ {
+ "fieldname": "ip",
+ "fieldtype": "Data",
+ "label": "Public IP"
+ },
+ {
+ "fieldname": "config_tab",
+ "fieldtype": "Tab Break",
+ "label": "Config"
+ },
+ {
+ "fieldname": "peer_config",
+ "fieldtype": "Code",
+ "label": "Peer Config",
+ "options": "conf",
+ "read_only": 1
+ },
+ {
+ "fieldname": "server_type",
+ "fieldtype": "Select",
+ "label": "Server Type",
+ "options": "Server\nDatabase Server",
+ "reqd": 1
+ },
+ {
+ "fieldname": "server_name",
+ "fieldtype": "Dynamic Link",
+ "in_list_view": 1,
+ "label": "Server Name",
+ "options": "server_type",
+ "reqd": 1
+ },
+ {
+ "fieldname": "server_section",
+ "fieldtype": "Section Break",
+ "label": "Server"
+ },
+ {
+ "fieldname": "wireguard_section",
+ "fieldtype": "Section Break",
+ "label": "Wireguard"
+ },
+ {
+ "fieldname": "column_break_aqot",
+ "fieldtype": "Column Break"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2024-03-11 18:18:39.689700",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Wireguard Peer",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "search_fields": "peer_name,status",
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": [],
+ "title_field": "peer_name"
+}
\ No newline at end of file
diff --git a/press/press/doctype/wireguard_peer/wireguard_peer.py b/press/press/doctype/wireguard_peer/wireguard_peer.py
new file mode 100644
index 00000000000..0b446bfc5c4
--- /dev/null
+++ b/press/press/doctype/wireguard_peer/wireguard_peer.py
@@ -0,0 +1,192 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+import ipaddress
+import json
+import subprocess
+
+import frappe
+from frappe.model.document import Document
+
+from press.runner import Ansible
+from press.utils import log_error
+
+
+class WireguardPeer(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ allowed_ips: DF.Data | None
+ ip: DF.Data | None
+ is_wireguard_setup: DF.Check
+ peer_config: DF.Code | None
+ peer_ip: DF.Data | None
+ peer_name: DF.Data
+ peer_private_network: DF.Data | None
+ private_ip: DF.Data | None
+ private_key: DF.Password | None
+ public_key: DF.Data | None
+ server_name: DF.DynamicLink
+ server_type: DF.Literal["Server", "Database Server"]
+ status: DF.Literal["Active", "Broken", "Archived"]
+ upstream_proxy: DF.Link
+ wireguard_network: DF.Data | None
+ # end: auto-generated types
+
+ def validate(self):
+ self.next_ip_address()
+ if not self.private_ip:
+ self.allowed_ips = self.peer_ip
+ else:
+ self.allowed_ips = f"{self.peer_ip},{self.peer_private_network}"
+
+ def next_ip_address(self):
+ try:
+ if self.is_new() and not self.peer_ip:
+ network_address = ipaddress.ip_network(self.wireguard_network)
+ ips = frappe.get_list(
+ "Wireguard Peer",
+ filters={"wireguard_network": self.wireguard_network},
+ pluck="peer_ip",
+ fields=["peer_ip"],
+ )
+ if not ips:
+ self.peer_ip = str(network_address[2])
+ return
+ last_ip_address = ipaddress.ip_address(max(ips))
+ next_ip_addr = last_ip_address + 1
+ while next_ip_addr not in network_address:
+ next_ip_addr += 1
+ self.peer_ip = str(next_ip_addr)
+ except Exception:
+ log_error("Wireguard Peer IP Exception", server=self.as_dict())
+ frappe.throw("Invalid Wireguard Network")
+
+ @frappe.whitelist()
+ def setup_wireguard(self):
+ frappe.enqueue_doc("Wireguard Peer", self.name, "_setup_peer_wg")
+
+ @frappe.whitelist()
+ def ping_peer(self):
+ try:
+ ansible = Ansible(
+ playbook="ping.yml",
+ server=self,
+ )
+ play = ansible.run()
+ if play.status == "Success":
+ if not self.peer_private_network:
+ self.fetch_peer_private_network(play)
+ except Exception:
+ log_error("Server Ping Exception", server=self.as_dict())
+
+ @frappe.whitelist()
+ def fetch_peer_private_network(self, play=None):
+ if not play:
+ play = frappe.get_last_doc(
+ "Ansible Play", {"status": "Success", "server": self.name, "play": "Ping Server"}
+ )
+ res = frappe.get_last_doc(
+ "Ansible Task", {"status": "Success", "play": play.name, "task": "Gather Facts"}
+ ).result
+ facts = json.loads(res)["ansible_facts"]
+ self.private_ip = facts["eth1"]["ipv4"]["address"]
+ self.peer_private_network = str(
+ ipaddress.IPv4Network(
+ f'{facts["eth1"]["ipv4"]["address"]}/{facts["eth1"]["ipv4"]["netmask"]}',
+ strict=False,
+ )
+ )
+ self.save()
+
+ def _setup_peer_wg(self):
+ proxy = frappe.get_doc("Proxy Server", self.upstream_proxy)
+ try:
+ ansible = Ansible(
+ playbook="wireguard.yml",
+ server=self,
+ variables={
+ "wireguard_port": proxy.wireguard_port,
+ "interface_id": proxy.private_ip_interface_id,
+ "wireguard_network": self.peer_ip + "/" + self.wireguard_network.split("/")[1],
+ "wireguard_private_key": self.get_password("private_key")
+ if self.private_key
+ else False,
+ "wireguard_public_key": self.get_password("public_key")
+ if self.public_key
+ else False,
+ "peers": json.dumps(
+ [
+ {
+ "public_key": proxy.get_password("wireguard_public_key"),
+ "allowed_ips": self.wireguard_network,
+ "peer_ip": proxy.name,
+ }
+ ]
+ ),
+ },
+ )
+ play = ansible.run()
+ if play.status == "Success":
+ self.reload()
+ self.is_wireguard_setup = True
+ try:
+ if not self.private_key and not self.public_key:
+ self.private_key = frappe.get_doc(
+ "Ansible Task", {"play": play.name, "task": "Generate Wireguard Private Key"}
+ ).output
+ self.public_key = frappe.get_doc(
+ "Ansible Task", {"play": play.name, "task": "Generate Wireguard Public Key"}
+ ).output
+ except Exception:
+ log_error("Wireguard Key Save error", server=self.as_dict())
+ if not self.peer_private_network:
+ self.peer_private_network = frappe.get_doc(
+ "Ansible Task", {"play": play.name, "task": "Get Subnet Mask of eth1"}
+ ).output
+ self.save()
+ proxy.reload_wireguard()
+ except Exception:
+ log_error("Wireguard Setup Exception", server=self.as_dict())
+
+ @frappe.whitelist()
+ def generate_config(self):
+ if not self.private_key or not self.public_key:
+ self.private_key = subprocess.check_output(["wg", "genkey"]).decode().strip()
+ self.public_key = (
+ subprocess.check_output([f"echo '{self.private_key}' | wg pubkey"], shell=True)
+ .decode()
+ .strip()
+ )
+ self.save()
+ proxy = frappe.get_doc("Proxy Server", self.upstream_proxy)
+ variables = {
+ "wireguard_network": self.peer_ip + "/" + self.wireguard_network.split("/")[1],
+ "wireguard_private_key": self.get_password("private_key"),
+ "wireguard_port": proxy.wireguard_port,
+ "peers": [
+ {
+ "public_key": proxy.get_password("wireguard_public_key"),
+ "endpoint": proxy.name + ":" + str(proxy.wireguard_port),
+ "allowed_ips": self.wireguard_network,
+ "peer_ip": proxy.name,
+ }
+ ],
+ }
+ outputText = frappe.render_template(
+ "press/doctype/wireguard_peer/templates/wg0.conf", variables, is_path=True
+ )
+ self.peer_config = outputText
+ self.save()
+ proxy.reload_wireguard()
+
+ @frappe.whitelist()
+ def download_config(self):
+ frappe.local.response.filename = f"{self.name}.conf"
+ frappe.local.response.filecontent = self.peer_config
+ frappe.local.response.type = "download"
diff --git a/press/press/doctype/wireguard_peer/wireguard_peer_dashboard.py b/press/press/doctype/wireguard_peer/wireguard_peer_dashboard.py
new file mode 100644
index 00000000000..6b907bf7ba3
--- /dev/null
+++ b/press/press/doctype/wireguard_peer/wireguard_peer_dashboard.py
@@ -0,0 +1,11 @@
+from frappe import _
+
+
+def get_data():
+ return {
+ "fieldname": "Server",
+ "non_standard_fieldnames": {"Server": "Wireguard Peer"},
+ "transactions": [
+ {"label": _("Logs"), "items": ["Ansible Play"]},
+ ],
+ }
diff --git a/press/press/report/agent_versions/agent_versions.js b/press/press/report/agent_versions/agent_versions.js
index 50bb6a0b57d..bfdd39c2737 100644
--- a/press/press/report/agent_versions/agent_versions.js
+++ b/press/press/report/agent_versions/agent_versions.js
@@ -5,15 +5,19 @@
frappe.query_reports['Agent Versions'] = {
onload: function (report) {
report.page.add_button(__('Update Agent'), () => {
+ let filters = {
+ server_type: frappe.query_report.get_filter_value('server_type'),
+ exclude_self_hosted: frappe.query_report.get_filter_value(
+ 'exclude_self_hosted',
+ ),
+ };
+ let team = frappe.query_report.get_filter_value('team');
+ if (team) {
+ filters['team'] = team;
+ }
frappe
.call('press.press.report.agent_versions.agent_versions.update_agent', {
- filters: {
- team: frappe.query_report.get_filter_value('team'),
- server_type: frappe.query_report.get_filter_value('server_type'),
- exclude_self_hosted: frappe.query_report.get_filter_value(
- 'exclude_self_hosted',
- ),
- },
+ filters: filters,
})
.then((r) => {
frappe.query_report.refresh();
diff --git a/press/press/report/agent_versions/agent_versions.json b/press/press/report/agent_versions/agent_versions.json
index 9b9748f7287..5a6209a60b6 100644
--- a/press/press/report/agent_versions/agent_versions.json
+++ b/press/press/report/agent_versions/agent_versions.json
@@ -6,6 +6,22 @@
"docstatus": 0,
"doctype": "Report",
"filters": [
+ {
+ "fieldname": "server_type",
+ "fieldtype": "Select",
+ "label": "Server Type",
+ "mandatory": 0,
+ "options": "\nServer\nDatabase Server\nProxy Server\nLog Server\nMonitor Server\nRegistry Server\nTrace Server\nAnalytics Server",
+ "wildcard_filter": 0
+ },
+ {
+ "fieldname": "server_name",
+ "fieldtype": "Data",
+ "label": "Server Name",
+ "mandatory": 0,
+ "options": "",
+ "wildcard_filter": 0
+ },
{
"fieldname": "team",
"fieldtype": "Link",
@@ -15,24 +31,25 @@
"wildcard_filter": 1
},
{
- "fieldname": "server_type",
- "fieldtype": "Select",
- "label": "Server Type",
+ "fieldname": "exclude_self_hosted",
+ "fieldtype": "Check",
+ "label": "Exclude Self Hosted",
"mandatory": 0,
- "options": "\nServer\nDatabase Server\nProxy Server\nLog Server\nMonitor Server\nRegistry Server\nTrace Server\nAnalytics Server",
"wildcard_filter": 0
},
{
- "fieldname": "exclude_self_hosted",
- "fieldtype": "Check",
- "label": "Exclude Self Hosted",
+ "fieldname": "cluster",
+ "fieldtype": "Link",
+ "label": "Cluster",
"mandatory": 0,
+ "options": "Cluster",
"wildcard_filter": 0
}
],
"idx": 0,
"is_standard": "Yes",
- "modified": "2023-05-17 18:31:33.157701",
+ "letterhead": null,
+ "modified": "2025-01-17 10:49:27.237566",
"modified_by": "Administrator",
"module": "Press",
"name": "Agent Versions",
@@ -51,5 +68,6 @@
{
"role": "Press Admin"
}
- ]
+ ],
+ "timeout": 0
}
\ No newline at end of file
diff --git a/press/press/report/agent_versions/agent_versions.py b/press/press/report/agent_versions/agent_versions.py
index 12a25f5b5e4..0adc85302b2 100644
--- a/press/press/report/agent_versions/agent_versions.py
+++ b/press/press/report/agent_versions/agent_versions.py
@@ -4,6 +4,7 @@
import json
import frappe
+
from press.agent import Agent
from press.press.report.server_stats.server_stats import get_servers
@@ -49,6 +50,12 @@ def execute(filters=None):
"fieldtype": "Long Text",
"width": 100,
},
+ {
+ "fieldname": "python",
+ "label": frappe._("Python Version"),
+ "fieldtype": "Data",
+ "width": 100,
+ },
]
data = get_data(filters)
diff --git a/press/press/report/aws_instance_pricing/aws_instance_pricing.json b/press/press/report/aws_instance_pricing/aws_instance_pricing.json
index 646d2a87179..46ddf3aab13 100644
--- a/press/press/report/aws_instance_pricing/aws_instance_pricing.json
+++ b/press/press/report/aws_instance_pricing/aws_instance_pricing.json
@@ -1,6 +1,13 @@
{
"add_total_row": 0,
"columns": [
+ {
+ "fieldname": "cluster",
+ "fieldtype": "Link",
+ "label": "Cluster",
+ "options": "Cluster",
+ "width": 0
+ },
{
"fieldname": "instance",
"fieldtype": "Data",
@@ -25,6 +32,42 @@
"label": "Memory",
"width": 0
},
+ {
+ "fieldname": "processor",
+ "fieldtype": "Data",
+ "label": "Processor",
+ "width": 0
+ },
+ {
+ "fieldname": "family",
+ "fieldtype": "Data",
+ "label": "Family",
+ "width": 0
+ },
+ {
+ "fieldname": "generation",
+ "fieldtype": "Int",
+ "label": "Generation",
+ "width": 0
+ },
+ {
+ "fieldname": "is_latest_generation",
+ "fieldtype": "Check",
+ "label": "Is Latest Generation?",
+ "width": 0
+ },
+ {
+ "fieldname": "size",
+ "fieldtype": "Data",
+ "label": "Size",
+ "width": 0
+ },
+ {
+ "fieldname": "size_multiplier",
+ "fieldtype": "Float",
+ "label": "Size Multiplier",
+ "width": 0
+ },
{
"fieldname": "on_demand",
"fieldtype": "Float",
@@ -57,7 +100,6 @@
}
],
"creation": "2022-09-19 17:12:10.701432",
- "disable_prepared_report": 0,
"disabled": 0,
"docstatus": 0,
"doctype": "Report",
@@ -66,7 +108,7 @@
"fieldname": "cluster",
"fieldtype": "Link",
"label": "Cluster",
- "mandatory": 1,
+ "mandatory": 0,
"options": "Cluster",
"wildcard_filter": 0
},
@@ -75,10 +117,11 @@
"fieldtype": "Select",
"label": "Instance Family",
"mandatory": 0,
- "options": "General Purpose\nCompute Optimized\nMemory Optimized\n",
+ "options": "\nGeneral Purpose\nCompute Optimized\nMemory Optimized",
"wildcard_filter": 0
},
{
+ "default": "Intel",
"fieldname": "processor",
"fieldtype": "Select",
"label": "Processor",
@@ -87,16 +130,9 @@
"wildcard_filter": 0
},
{
- "fieldname": "enhanced_networking",
- "fieldtype": "Check",
- "label": "Enhanced Networking",
- "mandatory": 0,
- "wildcard_filter": 0
- },
- {
- "fieldname": "instance_store",
+ "fieldname": "latest_generation_only",
"fieldtype": "Check",
- "label": "Instance Store",
+ "label": "Latest Generation Only",
"mandatory": 0,
"wildcard_filter": 0
}
@@ -104,7 +140,8 @@
"idx": 0,
"is_standard": "Yes",
"json": "{}",
- "modified": "2022-09-20 01:34:57.237617",
+ "letterhead": null,
+ "modified": "2023-11-06 19:08:45.728372",
"modified_by": "Administrator",
"module": "Press",
"name": "AWS Instance Pricing",
diff --git a/press/press/report/aws_instance_pricing/aws_instance_pricing.py b/press/press/report/aws_instance_pricing/aws_instance_pricing.py
index 9fee281562b..fad0434b0b6 100644
--- a/press/press/report/aws_instance_pricing/aws_instance_pricing.py
+++ b/press/press/report/aws_instance_pricing/aws_instance_pricing.py
@@ -1,11 +1,12 @@
# Copyright (c) 2022, Frappe and contributors
# For license information, please see license.txt
-import frappe
-from frappe.utils import flt, cint
-from frappe.core.utils import find
import json
+
import boto3
+import frappe
+from frappe.core.utils import find
+from frappe.utils import cint, flt
def execute(filters=None):
@@ -16,7 +17,20 @@ def execute(filters=None):
def get_data(filters):
- cluster = frappe.get_doc("Cluster", filters.cluster)
+ if filters.cluster:
+ clusters = [filters.cluster]
+ else:
+ clusters = frappe.get_all(
+ "Cluster", filters={"public": 1, "cloud_provider": "AWS EC2"}, pluck="name"
+ )
+ data = []
+ for cluster in clusters:
+ data.extend(get_cluster_data(filters, cluster))
+ return data
+
+
+def get_cluster_data(filters, cluster_name):
+ cluster = frappe.get_doc("Cluster", cluster_name)
client = boto3.client(
"pricing",
region_name="ap-south-1",
@@ -44,11 +58,6 @@ def get_data(filters):
}
)
- if not filters.instance_store:
- product_filters.append(
- {"Type": "TERM_MATCH", "Field": "storage", "Value": "EBS only"}
- )
-
response_iterator = paginator.paginate(
ServiceCode="AmazonEC2", Filters=product_filters, PaginationConfig={"PageSize": 100}
)
@@ -56,22 +65,12 @@ def get_data(filters):
for response in response_iterator:
for item in response["PriceList"]:
product = json.loads(item)
- if (
- filters.enhanced_networking
- and "n." not in product["product"]["attributes"]["instanceType"]
- ):
- continue
- if (
- not filters.enhanced_networking
- and "n." in product["product"]["attributes"]["instanceType"]
- ):
- continue
-
if filters.processor:
if filters.processor not in product["product"]["attributes"]["physicalProcessor"]:
continue
row = {
+ "cluster": cluster.name,
"instance_type": product["product"]["attributes"]["instanceType"].split(".")[0],
"instance": product["product"]["attributes"]["instanceType"],
"vcpu": cint(product["product"]["attributes"]["vcpu"], 0),
@@ -81,8 +80,31 @@ def get_data(filters):
row["on_demand"] = (
flt(list(term["priceDimensions"].values())[0]["pricePerUnit"]["USD"]) * 750
)
+ instance_type = parse_instance_type(row["instance"])
+ if not instance_type:
+ continue
+
+ family, generation, processor, size = instance_type
+
+ row.update(
+ {
+ "family": family,
+ "generation": generation,
+ "processor": processor,
+ "size": size,
+ "size_multiplier": parse_size_multiplier(size),
+ }
+ )
rows.append(row)
+ latest_generation = max(row["generation"] for row in rows)
+ for row in rows:
+ if row["generation"] == latest_generation:
+ row["is_latest_generation"] = True
+
+ if filters.latest_generation_only:
+ rows = [row for row in rows if row.get("is_latest_generation")]
+
client = boto3.client(
"savingsplans",
aws_access_key_id=cluster.aws_access_key_id,
@@ -114,3 +136,75 @@ def get_data(filters):
rows.sort(key=lambda x: (x["instance_type"], x["vcpu"], x["memory"]))
return rows
+
+
+FAMILIES = [
+ "c",
+ "d",
+ "f",
+ "g",
+ "hpc",
+ "inf",
+ "i",
+ "mac",
+ "m",
+ "p",
+ "r",
+ "trn",
+ "t",
+ "u",
+ "vt",
+ "x",
+]
+PREFERRED_FAMILIES = [
+ "c",
+ "m",
+ "r",
+]
+PROCESSORS = ["a", "g", "i"]
+
+
+def parse_instance_type(instance_type):
+ instance_type, size = instance_type.split(".")
+ # Skip metal instances
+ if "metal" in size:
+ return
+
+ family = None
+ for ff in FAMILIES:
+ if instance_type.startswith(ff):
+ family = ff
+ break
+
+ # Ignore other instance families
+ if family not in PREFERRED_FAMILIES:
+ return
+
+ rest = instance_type.removeprefix(family)
+ generation = int(rest[0])
+ rest = rest[1:]
+
+ # If processor isn't mentioned, assume it's an Intel
+ if rest and rest[0] in PROCESSORS:
+ processor = rest[0]
+ rest = rest[1:]
+ else:
+ processor = "i"
+
+ if rest:
+ return
+
+ return family, generation, processor, size
+
+
+def parse_size_multiplier(size):
+ SIZES = {
+ "medium": 1 / 4,
+ "large": 1 / 2,
+ "xlarge": 1,
+ }
+ if size in SIZES:
+ return SIZES[size]
+ else:
+ size = size.removesuffix("xlarge")
+ return float(size)
diff --git a/press/press/report/aws_rightsizing_recommendation/__init__.py b/press/press/report/aws_rightsizing_recommendation/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/report/aws_rightsizing_recommendation/aws_rightsizing_recommendation.js b/press/press/report/aws_rightsizing_recommendation/aws_rightsizing_recommendation.js
new file mode 100644
index 00000000000..9c3a997eeea
--- /dev/null
+++ b/press/press/report/aws_rightsizing_recommendation/aws_rightsizing_recommendation.js
@@ -0,0 +1,23 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.query_reports['AWS Rightsizing Recommendation'] = {
+ onload: function (report) {
+ report.page.add_button(__('Rightsize'), () => {
+ frappe
+ .call(
+ 'press.press.report.aws_rightsizing_recommendation.aws_rightsizing_recommendation.rightsize',
+ {
+ filters: {
+ resource_type:
+ frappe.query_report.get_filter_value('resource_type'),
+ action_type: frappe.query_report.get_filter_value('action_type'),
+ },
+ },
+ )
+ .then((r) => {
+ frappe.query_report.refresh();
+ });
+ });
+ },
+};
diff --git a/press/press/report/aws_rightsizing_recommendation/aws_rightsizing_recommendation.json b/press/press/report/aws_rightsizing_recommendation/aws_rightsizing_recommendation.json
new file mode 100644
index 00000000000..b4ca072a974
--- /dev/null
+++ b/press/press/report/aws_rightsizing_recommendation/aws_rightsizing_recommendation.json
@@ -0,0 +1,175 @@
+{
+ "add_total_row": 1,
+ "columns": [
+ {
+ "fieldname": "virtual_machine",
+ "fieldtype": "Link",
+ "label": "Virtual Machine",
+ "options": "Virtual Machine",
+ "width": 0
+ },
+ {
+ "fieldname": "resource_type",
+ "fieldtype": "Data",
+ "label": "Resource Type",
+ "options": "",
+ "width": 0
+ },
+ {
+ "fieldname": "estimated_cost",
+ "fieldtype": "Currency",
+ "label": "Estimated Cost",
+ "options": "currency",
+ "width": 0
+ },
+ {
+ "fieldname": "estimated_savings",
+ "fieldtype": "Currency",
+ "label": "Estimated Savings",
+ "options": "currency",
+ "width": 0
+ },
+ {
+ "fieldname": "estimated_savings_percentage",
+ "fieldtype": "Int",
+ "label": "Estimated Savings Percentage",
+ "width": 0
+ },
+ {
+ "fieldname": "current_iops",
+ "fieldtype": "Int",
+ "label": "Current IOPS",
+ "width": 0
+ },
+ {
+ "fieldname": "current_throughput",
+ "fieldtype": "Int",
+ "label": "Current Throughput",
+ "width": 0
+ },
+ {
+ "fieldname": "recommended_iops",
+ "fieldtype": "Int",
+ "label": "Recommended IOPS",
+ "width": 0
+ },
+ {
+ "fieldname": "recommended_throughput",
+ "fieldtype": "Int",
+ "label": "Recommended Throughput",
+ "width": 0
+ },
+ {
+ "fieldname": "current_instance_type",
+ "fieldtype": "Data",
+ "label": "Current Instance Type",
+ "width": 0
+ },
+ {
+ "fieldname": "recommended_instance_type",
+ "fieldtype": "Data",
+ "label": "Recommended Instance Type",
+ "width": 0
+ },
+ {
+ "fieldname": "current_usage",
+ "fieldtype": "Data",
+ "label": "Current Usage",
+ "width": 0
+ },
+ {
+ "fieldname": "recommended_usage",
+ "fieldtype": "Data",
+ "label": "Recommended Usage",
+ "width": 0
+ },
+ {
+ "fieldname": "volume_id",
+ "fieldtype": "Data",
+ "label": "Volume ID",
+ "width": 0
+ },
+ {
+ "fieldname": "region",
+ "fieldtype": "Data",
+ "label": "Region",
+ "width": 0
+ },
+ {
+ "fieldname": "server_type",
+ "fieldtype": "Link",
+ "label": "Server Type",
+ "options": "DocType",
+ "width": 0
+ },
+ {
+ "fieldname": "server",
+ "fieldtype": "Dynamic Link",
+ "label": "Server",
+ "options": "server_type",
+ "width": 0
+ },
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "label": "Team",
+ "options": "Team",
+ "width": 0
+ },
+ {
+ "fieldname": "public",
+ "fieldtype": "Check",
+ "label": "Public",
+ "width": 0
+ },
+ {
+ "fieldname": "currency",
+ "fieldtype": "Link",
+ "label": "Currency",
+ "options": "Currency",
+ "width": 0
+ }
+ ],
+ "creation": "2024-09-10 15:22:38.545636",
+ "disabled": 0,
+ "docstatus": 0,
+ "doctype": "Report",
+ "filters": [
+ {
+ "default": "Compute",
+ "fieldname": "resource_type",
+ "fieldtype": "Select",
+ "label": "Resource Type",
+ "mandatory": 0,
+ "options": "\nCompute\nStorage",
+ "wildcard_filter": 0
+ },
+ {
+ "default": "Rightsize",
+ "fieldname": "action_type",
+ "fieldtype": "Select",
+ "label": "Action Type",
+ "mandatory": 1,
+ "options": "Rightsize\nMigrate to Graviton",
+ "wildcard_filter": 0
+ }
+ ],
+ "idx": 0,
+ "is_standard": "Yes",
+ "json": "",
+ "letterhead": null,
+ "modified": "2024-09-17 15:45:42.606684",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "AWS Rightsizing Recommendation",
+ "owner": "Administrator",
+ "prepared_report": 0,
+ "ref_doctype": "Virtual Machine",
+ "report_name": "AWS Rightsizing Recommendation",
+ "report_type": "Script Report",
+ "roles": [
+ {
+ "role": "System Manager"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/press/press/report/aws_rightsizing_recommendation/aws_rightsizing_recommendation.py b/press/press/report/aws_rightsizing_recommendation/aws_rightsizing_recommendation.py
new file mode 100644
index 00000000000..8cb86a9d6aa
--- /dev/null
+++ b/press/press/report/aws_rightsizing_recommendation/aws_rightsizing_recommendation.py
@@ -0,0 +1,170 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+import json
+
+import boto3
+import frappe
+from frappe.core.utils import find
+from frappe.utils import cint
+
+
+def execute(filters=None):
+ frappe.only_for("System Manager")
+ columns = frappe.get_doc("Report", "AWS Rightsizing Recommendation").get_columns()
+ resource_type = filters.get("resource_type")
+ columns_to_remove = []
+ if resource_type == "Compute":
+ columns_to_remove = [
+ "volume_id",
+ "current_iops",
+ "recommended_iops",
+ "current_throughput",
+ "recommended_throughput",
+ ]
+ elif resource_type == "Storage":
+ columns_to_remove = ["current_instance_type", "recommended_instance_type"]
+ columns = [column for column in columns if column.fieldname not in columns_to_remove]
+ data = get_data(resource_type, filters.get("action_type"))
+ return columns, data
+
+
+def get_data(resource_type, action_type): # noqa: C901
+ settings = frappe.get_single("Press Settings")
+ client = boto3.client(
+ "cost-optimization-hub",
+ region_name="us-east-1",
+ aws_access_key_id=settings.aws_access_key_id,
+ aws_secret_access_key=settings.get_password("aws_secret_access_key"),
+ )
+
+ resource_types = {
+ "Compute": ["Ec2Instance"],
+ "Storage": ["EbsVolume"],
+ }.get(resource_type, ["Ec2Instance", "EbsVolume"])
+
+ action_types = {
+ "Rightsize": ["Rightsize"],
+ "Migrate to Graviton": ["MigrateToGraviton"],
+ }.get(action_type)
+
+ paginator = client.get_paginator("list_recommendations")
+ response_iterator = paginator.paginate(
+ filter={
+ "resourceTypes": resource_types,
+ "actionTypes": action_types,
+ },
+ )
+
+ results = []
+ for response in response_iterator:
+ for row in response["items"]:
+ resource_type = {
+ "Ec2Instance": "Virtual Machine",
+ "EbsVolume": "Virtual Machine Volume",
+ }[row["currentResourceType"]]
+
+ if resource_type == "Virtual Machine":
+ virtual_machine = frappe.get_all(
+ resource_type, {"instance_id": row["resourceId"]}, pluck="name"
+ )
+ elif resource_type == "Virtual Machine Volume":
+ virtual_machine = frappe.get_all(
+ resource_type, {"volume_id": row["resourceId"]}, pluck="parent"
+ )
+
+ if not virtual_machine:
+ # This resource is not managed by Press. Ignore
+ continue
+ virtual_machine = virtual_machine[0]
+
+ server_type = {
+ "f": "Server",
+ "m": "Database Server",
+ "n": "Proxy Server",
+ }[frappe.db.get_value("Virtual Machine", virtual_machine, "series")]
+
+ server = frappe.db.get_value(
+ server_type,
+ {"virtual_machine": virtual_machine},
+ ["name", "team", "public"],
+ as_dict=True,
+ )
+
+ if not server:
+ continue
+
+ data = {
+ "resource_type": resource_type,
+ "virtual_machine": virtual_machine,
+ "server_type": server_type,
+ "server": server.name,
+ "team": server.team,
+ "public": server.public,
+ "region": row["region"],
+ "estimated_cost": row["estimatedMonthlyCost"],
+ "estimated_savings": row["estimatedMonthlySavings"],
+ "estimated_savings_percentage": row["estimatedSavingsPercentage"],
+ "current_usage": row["currentResourceSummary"],
+ "recommended_usage": row["recommendedResourceSummary"],
+ "currency": "USD",
+ }
+
+ if resource_type == "Virtual Machine":
+ data["current_instance_type"] = row["currentResourceSummary"]
+ data["recommended_instance_type"] = row["recommendedResourceSummary"]
+ elif resource_type == "Virtual Machine Volume":
+ data["volume_id"] = row["resourceId"]
+ # Splits "99.0 GB Storage/3000.0 IOPS/125.0 MB/s Throughput" into
+ # ["99.0 GB Storage", "3000.0 IOPS", "125.0 MB", "/s Throughput"]
+ _, iops, throughput, _ = row["currentResourceSummary"].split("/")
+ data["current_iops"] = cint(iops.split()[0])
+ data["current_throughput"] = cint(throughput.split()[0])
+
+ _, iops, throughput, _ = row["recommendedResourceSummary"].split("/")
+ data["recommended_iops"] = cint(iops.split()[0])
+ data["recommended_throughput"] = cint(throughput.split()[0])
+
+ results.append(data)
+ results.sort(key=lambda x: x["estimated_savings"], reverse=True)
+ return results
+
+
+@frappe.whitelist()
+def rightsize(filters):
+ filters = frappe._dict(json.loads(filters))
+ if filters.resource_type == "Storage":
+ frappe.enqueue(
+ "press.press.report.aws_rightsizing_recommendation.aws_rightsizing_recommendation.rightsize_volumes",
+ filters=filters,
+ queue="long",
+ )
+
+
+def rightsize_volumes(filters):
+ for row in execute(filters)[1]:
+ row = frappe._dict(row)
+
+ machine = frappe.get_doc("Virtual Machine", row.virtual_machine)
+ volume = find(machine.volumes, lambda v: v.volume_id == row.volume_id)
+
+ if not volume:
+ # This volume is not managed by Press. Ignore
+ continue
+
+ # Always downgrade performance
+ iops = min(row.recommended_iops, volume.iops)
+ throughput = min(row.recommended_throughput, volume.throughput)
+
+ # Already at recommended performance. Ignore
+ if volume.iops == iops and volume.throughput == throughput:
+ continue
+
+ try:
+ machine.update_ebs_performance(volume.volume_id, iops, throughput)
+ machine.add_comment(
+ "Comment",
+ f"Rightsized EBS volume {volume.volume_id} from {volume.iops} IOPS and {volume.throughput} MB/s to {iops} IOPS and {throughput} MB/s",
+ )
+ frappe.db.commit()
+ except Exception:
+ frappe.db.rollback()
diff --git a/press/press/report/bench_memory_limits/__init__.py b/press/press/report/bench_memory_limits/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/report/bench_memory_limits/bench_memory_limits.js b/press/press/report/bench_memory_limits/bench_memory_limits.js
new file mode 100644
index 00000000000..d8f84795ced
--- /dev/null
+++ b/press/press/report/bench_memory_limits/bench_memory_limits.js
@@ -0,0 +1,4 @@
+// Copyright (c) 2023, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.query_reports['Bench Memory Limits'] = {};
diff --git a/press/press/report/bench_memory_limits/bench_memory_limits.json b/press/press/report/bench_memory_limits/bench_memory_limits.json
new file mode 100644
index 00000000000..df322bf8e72
--- /dev/null
+++ b/press/press/report/bench_memory_limits/bench_memory_limits.json
@@ -0,0 +1,47 @@
+{
+ "add_total_row": 1,
+ "columns": [],
+ "creation": "2023-10-17 19:23:39.391050",
+ "disabled": 0,
+ "docstatus": 0,
+ "doctype": "Report",
+ "filters": [
+ {
+ "fieldname": "server",
+ "fieldtype": "Link",
+ "label": "Server",
+ "mandatory": 1,
+ "options": "Server",
+ "wildcard_filter": 0
+ }
+ ],
+ "idx": 0,
+ "is_standard": "Yes",
+ "letter_head": "",
+ "letterhead": null,
+ "modified": "2023-10-20 11:26:37.053554",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Bench Memory Limits",
+ "owner": "Administrator",
+ "prepared_report": 0,
+ "query": "\t\tbench_workloads = {}\n\t\tbenches = frappe.get_all(\n\t\t\t\"Bench\",\n\t\t\tfilters={\"server\": self.name, \"status\": \"Active\", \"auto_scale_workers\": True},\n\t\t\tpluck=\"name\",\n\t\t)\n\t\tfor bench_name in benches:\n\t\t\tbench = frappe.get_doc(\"Bench\", bench_name)\n\t\t\tbench_workloads[bench_name] = bench.work_load\n\n\t\ttotal_workload = sum(bench_workloads.values())\n",
+ "ref_doctype": "Bench",
+ "report_name": "Bench Memory Limits",
+ "report_script": "",
+ "report_type": "Script Report",
+ "roles": [
+ {
+ "role": "Press Admin"
+ },
+ {
+ "role": "System Manager"
+ },
+ {
+ "role": "Press Member"
+ },
+ {
+ "role": "Site Manager"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/press/press/report/bench_memory_limits/bench_memory_limits.py b/press/press/report/bench_memory_limits/bench_memory_limits.py
new file mode 100644
index 00000000000..3cf5d02fad3
--- /dev/null
+++ b/press/press/report/bench_memory_limits/bench_memory_limits.py
@@ -0,0 +1,119 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+import frappe
+
+from press.api.server import prometheus_query
+
+
+def execute(filters=None):
+ columns = [
+ {
+ "fieldname": "bench",
+ "label": frappe._("Bench"),
+ "fieldtype": "Link",
+ "options": "Bench",
+ "width": 200,
+ },
+ {
+ "fieldname": "workload",
+ "label": frappe._("Workload"),
+ "fieldtype": "Data",
+ "width": 200,
+ },
+ {
+ "fieldname": "allocated_ram",
+ "label": frappe._("Allocated RAM (based on current workers)"),
+ "fieldtype": "Float",
+ "width": 200,
+ },
+ {
+ "fieldname": "5m_avg_server_ram",
+ "label": frappe._("5m average RAM"),
+ "fieldtype": "Float",
+ "width": 200,
+ },
+ {
+ "fieldname": "6h_avg_server_ram",
+ "label": frappe._("6h average RAM"),
+ "fieldtype": "Float",
+ "width": 200,
+ },
+ {
+ "fieldname": "max_server_ram",
+ "label": frappe._("6h max RAM"),
+ "fieldtype": "Float",
+ "width": 200,
+ },
+ ]
+
+ return columns, get_data(filters)
+
+
+def get_data(filters):
+ server_name = filters.get("server")
+ benches = frappe.get_all(
+ "Bench",
+ filters={
+ "server": server_name,
+ "status": "Active",
+ "auto_scale_workers": True,
+ },
+ pluck="name",
+ )
+ server = frappe.get_doc("Server", server_name)
+ result = []
+ for bench_name in benches:
+ bench = frappe.get_doc("Bench", bench_name)
+
+ gn, bg = bench.allocate_workers(
+ server.workload, server.max_gunicorn_workers, server.max_bg_workers
+ )
+ result.append(
+ {
+ "bench": bench_name,
+ "workload": bench.workload,
+ "allocated_ram": gn * 150 + bg * (3 * 80),
+ }
+ )
+
+ prom_res = prometheus_query(
+ f'sum(avg_over_time(container_memory_rss{{instance="{server_name}", name=~".+"}}[5m])) by (name)',
+ lambda x: x,
+ "Asia/Kolkata",
+ 60,
+ 60,
+ )["datasets"]
+ for row in result:
+ for prom_row in prom_res:
+ if row["bench"] == prom_row["name"]["name"]:
+ row["5m_avg_server_ram"] = prom_row["values"][-1] / 1024 / 1024
+ break
+
+ prom_res = prometheus_query(
+ f'sum(avg_over_time(container_memory_rss{{instance="{server_name}", name=~".+"}}[6h])) by (name)',
+ lambda x: x,
+ "Asia/Kolkata",
+ 6 * 3600,
+ 60,
+ )["datasets"]
+ for row in result:
+ for prom_row in prom_res:
+ if row["bench"] == prom_row["name"]["name"]:
+ row["6h_avg_server_ram"] = prom_row["values"][-1] / 1024 / 1024
+ break
+
+ prom_res = prometheus_query(
+ f'sum(max_over_time(container_memory_rss{{instance="{server_name}", name=~".+"}}[6h])) by (name)',
+ lambda x: x,
+ "Asia/Kolkata",
+ 6 * 3600,
+ 60,
+ )["datasets"]
+ for row in result:
+ for prom_row in prom_res:
+ if row["bench"] == prom_row["name"]["name"]:
+ row["max_server_ram"] = prom_row["values"][-1] / 1024 / 1024
+ break
+
+ return result
diff --git a/press/press/report/binary_log_browser/binary_log_browser.py b/press/press/report/binary_log_browser/binary_log_browser.py
index b8974ce750c..427e7937942 100644
--- a/press/press/report/binary_log_browser/binary_log_browser.py
+++ b/press/press/report/binary_log_browser/binary_log_browser.py
@@ -1,16 +1,18 @@
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
import frappe
import pytz
import sqlparse
-from press.agent import Agent
+from frappe.core.doctype.access_log.access_log import make_access_log
from frappe.utils import (
get_datetime,
get_datetime_str,
get_system_timezone,
)
-from frappe.core.doctype.access_log.access_log import make_access_log
+
+from press.agent import Agent
try:
from frappe.utils import convert_utc_to_user_timezone
@@ -20,9 +22,7 @@
def execute(filters=None):
frappe.only_for(["System Manager", "Site Manager"])
- filters.database = frappe.get_doc("Site", filters.site).fetch_info()["config"][
- "db_name"
- ]
+ filters.database = frappe.get_doc("Site", filters.site).fetch_info()["config"]["db_name"]
make_access_log(
doctype="Site",
@@ -66,21 +66,15 @@ def get_data(filters):
files = agent.get("database/binary/logs")
- files_in_timespan = get_files_in_timespan(
- files, data["start_datetime"], data["stop_datetime"]
- )
+ files_in_timespan = get_files_in_timespan(files, data["start_datetime"], data["stop_datetime"])
results = []
for file in files_in_timespan:
rows = agent.post(f"database/binary/logs/{file}", data=data)
- for row in rows:
+ for row in rows or []:
if filters.format_queries:
- row["query"] = sqlparse.format(
- row["query"].strip(), keyword_case="upper", reindent=True
- )
- row["timestamp"] = get_datetime_str(
- convert_utc_to_user_timezone(get_datetime(row["timestamp"]))
- )
+ row["query"] = sqlparse.format(row["query"].strip(), keyword_case="upper", reindent=True)
+ row["timestamp"] = get_datetime_str(convert_utc_to_user_timezone(get_datetime(row["timestamp"])))
results.append(row)
if len(results) > data["max_lines"]:
@@ -89,14 +83,9 @@ def get_data(filters):
return results
-def get_files_in_timespan(
- files: list[dict[str, str]], start: str, stop: str
-) -> list[str]:
+def get_files_in_timespan(files: list[dict[str, str]], start: str, stop: str) -> list[str]:
files.sort(key=lambda f: f["modified"])
- start = convert_user_timezone_to_utc(start)
- stop = convert_user_timezone_to_utc(stop)
-
files_in_timespan = []
for file in files:
diff --git a/press/press/report/mariadb_deadlock_browser/mariadb_deadlock_browser.js b/press/press/report/mariadb_deadlock_browser/mariadb_deadlock_browser.js
index 0fbf06feca5..3ceec0ec330 100644
--- a/press/press/report/mariadb_deadlock_browser/mariadb_deadlock_browser.js
+++ b/press/press/report/mariadb_deadlock_browser/mariadb_deadlock_browser.js
@@ -24,10 +24,10 @@ frappe.query_reports['MariaDB Deadlock Browser'] = {
reqd: 1,
},
{
- fieldname: 'max_lines',
- label: __('Max Lines'),
+ fieldname: 'max_log_size',
+ label: __('Max Log Size'),
fieldtype: 'Int',
- default: 4000,
+ default: 500,
},
],
};
diff --git a/press/press/report/mariadb_deadlock_browser/mariadb_deadlock_browser.py b/press/press/report/mariadb_deadlock_browser/mariadb_deadlock_browser.py
index d360374e1ef..3c58a3e5ac3 100644
--- a/press/press/report/mariadb_deadlock_browser/mariadb_deadlock_browser.py
+++ b/press/press/report/mariadb_deadlock_browser/mariadb_deadlock_browser.py
@@ -1,13 +1,224 @@
# Copyright (c) 2023, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
+
+import contextlib
+import re
+from typing import TYPE_CHECKING
+
import frappe
-import pytz
+from elasticsearch import Elasticsearch
from frappe.core.doctype.access_log.access_log import make_access_log
-from frappe.utils import convert_utc_to_timezone, get_datetime, get_system_timezone
+from frappe.utils import get_datetime
+from frappe.utils.password import get_decrypted_password
+
+if TYPE_CHECKING:
+ from datetime import datetime
+
+
+def fetch_mariadb_error_logs(
+ site: str, start_datetime: datetime, end_datetime: datetime, log_size: int
+) -> list[tuple[str, str]]:
+ server = frappe.get_value("Site", site, "server")
+ database_server = frappe.get_value("Server", server, "database_server")
+ log_server = frappe.db.get_single_value("Press Settings", "log_server")
+ if not log_server:
+ return []
+
+ query = {
+ "bool": {
+ "filter": [
+ {
+ "bool": {
+ "filter": [
+ {
+ "bool": {
+ "minimum_should_match": 1,
+ "should": [{"term": {"host.name": {"value": database_server}}}],
+ }
+ },
+ {
+ "bool": {
+ "minimum_should_match": 1,
+ "should": [{"term": {"event.dataset": {"value": "mysql.error"}}}],
+ }
+ },
+ {
+ "bool": {
+ "minimum_should_match": 1,
+ "should": [{"term": {"log.level": {"value": "Note"}}}],
+ }
+ },
+ {
+ "bool": {
+ "minimum_should_match": 1,
+ "should": [{"match_phrase": {"message": "InnoDB:"}}],
+ }
+ },
+ ]
+ }
+ },
+ {
+ "range": {
+ "@timestamp": {
+ "gte": int(start_datetime.timestamp() * 1000),
+ "lte": int(end_datetime.timestamp() * 1000),
+ }
+ }
+ },
+ ],
+ "must": [],
+ "must_not": [],
+ "should": [],
+ }
+ }
+
+ url = f"https://{log_server}/elasticsearch/"
+ password = get_decrypted_password("Log Server", log_server, "kibana_password")
+ client = Elasticsearch(url, basic_auth=("frappe", password))
+
+ data = client.search(
+ size=log_size,
+ index="filebeat-*",
+ query=query,
+ )
+
+ if not data:
+ return []
+
+ # prepare logs
+ log_map = {}
+ log_timestamp = {}
+
+ for record in data.get("hits", {}).get("hits", []):
+ if record["_source"]["mysql"] and record["_source"]["mysql"]["thread_id"]:
+ thread_id = record["_source"]["mysql"]["thread_id"]
+ if thread_id not in log_map:
+ log_map[thread_id] = []
+ log_timestamp[thread_id] = record["_source"]["@timestamp"]
+ # Strip `InnoDB: ` -> 8 characters
+ log_map[thread_id].append((record["_source"]["log"]["offset"], record["_source"]["message"][8:]))
+
+ # merge logs
+ logs = [] # list of tuples (timestamp, log)
+
+ for thread_id in log_map:
+ # sort in order of offset
+ records = sorted(log_map[thread_id], key=lambda x: x[0])
+ records = [x[1] for x in records]
+ logs.append((log_timestamp[thread_id], "".join(records)))
+
+ return logs
+
+
+# Regex for parsing database logs
+# *** (1) TRANSACTION:
+transaction_pattern = re.compile(r"^\*\*\* \(\d+\) TRANSACTION:")
+# TRANSACTION 988653582, ACTIVE 6 sec starting index read
+transaction_id_pattern = re.compile(r"TRANSACTION (\d+),")
+query_pattern = re.compile(r"MariaDB thread id .*\n([\s\S]*)\*\*\* WAITING FOR THIS LOCK TO BE GRANTED")
+actual_transaction_pattern = re.compile(r"\*\*\* WAITING FOR THIS LOCK TO BE GRANTED:\nRECORD LOCKS (.*)\n")
+conflicted_transaction_pattern = re.compile(r"\*\*\* CONFLICTING WITH:\nRECORD LOCKS (.*)\n")
+trx_id_pattern = re.compile(r"trx id (\d+)")
+db_table_pattern = re.compile(r"table `([^`]+)`.`([^`]+)`")
+
+
+class DatabaseTransactionLog:
+ @staticmethod
+ def parse(data: str, database: str):
+ transaction_info = actual_transaction_pattern.search(data).group(1)
+ found_database = db_table_pattern.search(transaction_info).group(1)
+ if database != found_database:
+ return None
+
+ return DatabaseTransactionLog(data)
+
+ def __init__(self, data: str):
+ self.transaction_id = transaction_id_pattern.search(data).group(1)
+ actual_transaction_info = actual_transaction_pattern.search(data).group(1)
+ db_table_info = db_table_pattern.search(actual_transaction_info)
+ self.database = db_table_info.group(1)
+ self.table = db_table_info.group(2)
+ self.query = query_pattern.search(data).group(1)
+
+ conflicted_transaction_info = conflicted_transaction_pattern.search(data).group(1)
+ self.conflicted_transaction_id = trx_id_pattern.search(conflicted_transaction_info).group(1)
+ conflicted_db_table = db_table_pattern.search(conflicted_transaction_info)
+ self.conflicted_table = conflicted_db_table.group(2)
+
+
+def parse_log(log: str, database: str) -> list[DatabaseTransactionLog]:
+ log_lines = log.split("\n")
+ log_lines = [line.strip() for line in log_lines]
+ log_lines = [line for line in log_lines if line != ""]
+ transactions_content = []
+
+ started_transaction_index = None
+ for index, line in enumerate(log_lines):
+ if transaction_pattern.match(line):
+ if started_transaction_index is not None:
+ transactions_content.append("\n".join(log_lines[started_transaction_index:index]))
+ started_transaction_index = index
+
+ if started_transaction_index is not None:
+ transactions_content.append("\n".join(log_lines[started_transaction_index:]))
+
+ transactions = []
+ for transaction_content in transactions_content:
+ with contextlib.suppress(Exception):
+ trx = DatabaseTransactionLog.parse(transaction_content, database)
+ if trx is not None:
+ transactions.append(trx)
-from press.agent import Agent
+ return transactions
+
+def deadlock_summary(transactions: list[DatabaseTransactionLog]) -> list[dict]:
+ transaction_map: dict[str, DatabaseTransactionLog] = {}
+ for transaction in transactions:
+ transaction_map[transaction.transaction_id] = transaction
+
+ deadlock_transaction_ids = {}
+
+ for transaction in transactions:
+ # usually if there is a deadlock, there will be two records
+ # one record for deadlock of query A due to query B
+ # and another record for deadlock of query B due to query A
+ # so, we want to record only one instance of deadlock
+ if (
+ transaction.conflicted_transaction_id
+ and (
+ transaction.conflicted_transaction_id not in deadlock_transaction_ids
+ or deadlock_transaction_ids[transaction.conflicted_transaction_id]
+ != transaction.transaction_id
+ )
+ and transaction.transaction_id != transaction.conflicted_transaction_id
+ ):
+ deadlock_transaction_ids[transaction.transaction_id] = transaction.conflicted_transaction_id
+
+ deadlock_infos = []
+ for transaction_id in deadlock_transaction_ids:
+ if transaction_id not in transaction_map:
+ continue
+ if transaction.conflicted_transaction_id not in transaction_map:
+ continue
+ transaction = transaction_map[transaction_id]
+ conflicted_transaction = transaction_map[transaction.conflicted_transaction_id]
+ deadlock_infos.append(
+ {
+ "txn_id": transaction.transaction_id,
+ "table": transaction.table,
+ "conflicted_txn_id": transaction.conflicted_transaction_id,
+ "conflicted_table": transaction.conflicted_table,
+ "query": transaction.query,
+ "conflicted_query": conflicted_transaction.query,
+ }
+ )
+ return deadlock_infos
+
+
+# Report
COLUMNS = [
{
"fieldname": "timestamp",
@@ -15,18 +226,34 @@
"fieldtype": "Datetime",
"width": 160,
},
+ {
+ "fieldname": "table",
+ "label": "Table",
+ "fieldtype": "Data",
+ "width": 180,
+ },
+ {
+ "fieldname": "transaction_id",
+ "label": "Transaction",
+ "fieldtype": "Data",
+ "width": 120,
+ },
{
"fieldname": "query",
"label": "Query",
"fieldtype": "Data",
- "width": 1200,
+ "width": 1400,
},
]
def execute(filters=None):
- frappe.only_for(["System Manager", "Site Manager"])
+ frappe.only_for(["System Manager", "Site Manager", "Press Admin", "Press Member"])
filters.database = frappe.db.get_value("Site", filters.site, "database_name")
+ if not filters.database:
+ frappe.throw(
+ f"Database name not found for site {filters.site}\nRun `Sync Info` from Site doctype actions to set the database name.\nThen retry again."
+ )
make_access_log(
doctype="Site",
@@ -35,77 +262,35 @@ def execute(filters=None):
report_name="MariaDB Deadlock Browser",
filters=filters,
)
- data = get_data(filters)
- return COLUMNS, data
-
-
-def convert_user_timezone_to_utc(datetime_obj):
- timezone = pytz.timezone(get_system_timezone())
- datetime_obj = get_datetime(datetime_obj)
- return timezone.localize(datetime_obj).astimezone(pytz.utc).isoformat()
-
-
-def get_data(filters):
- application_server = frappe.db.get_value("Site", filters.site, "server")
- database_server_name = frappe.db.get_value(
- "Server", application_server, "database_server"
+ records = fetch_mariadb_error_logs(
+ filters.site,
+ get_datetime(filters.start_datetime),
+ get_datetime(filters.stop_datetime),
+ filters.max_log_size,
)
+ data = []
- database_server = frappe.get_doc("Database Server", database_server_name)
-
- agent = Agent(database_server.name, "Database Server")
-
- data = {
- "private_ip": database_server.private_ip,
- "mariadb_root_password": database_server.get_password("mariadb_root_password"),
- "database": filters.database,
- "start_datetime": convert_user_timezone_to_utc(filters.start_datetime),
- "stop_datetime": convert_user_timezone_to_utc(filters.stop_datetime),
- "max_lines": filters.max_lines or 1000,
- }
-
- results = agent.post("database/deadlocks", data=data)
-
- return post_process(results)
-
+ for record in records:
+ timestamp = record[0]
+ transactions = parse_log(record[1], filters.database)
+ summaries = deadlock_summary(transactions)
+ for summary in summaries:
+ data.append(
+ {
+ "timestamp": timestamp,
+ "table": summary["table"],
+ "transaction_id": summary["txn_id"],
+ "query": summary["query"],
+ }
+ )
+ data.append(
+ {
+ "timestamp": "",
+ "table": summary["conflicted_table"],
+ "transaction_id": summary["conflicted_txn_id"],
+ "query": summary["conflicted_query"],
+ }
+ )
+ data.append({}) # empty line to separate records
-def post_process(rows):
- results = []
-
- for idx, row in enumerate(rows):
- row["timestamp"] = convert_utc_to_timezone(
- frappe.utils.get_datetime(row["ts"]).replace(tzinfo=None), get_system_timezone()
- )
- results.append(row)
-
- # Two sequential queries are part of same "deadlock", so add empty line for readability
- if idx % 2:
- results.append({})
-
- return results
-
-
-"""
-Deadlock table schema
-
-+-----------+----------------------+------+-----+---------------------+-------+
-| Field | Type | Null | Key | Default | Extra |
-+-----------+----------------------+------+-----+---------------------+-------+
-| ts | timestamp | NO | PRI | current_timestamp() | |
-| user | char(16) | NO | | NULL | |
-| query | text | NO | | NULL | |
-| victim | tinyint(3) unsigned | NO | | NULL | |
-| idx | char(64) | NO | | NULL | |
-| lock_type | char(16) | NO | | NULL | |
-| lock_mode | char(1) | NO | | NULL | |
-| server | char(20) | NO | PRI | NULL | |
-| thread | int(10) unsigned | NO | PRI | NULL | |
-| txn_id | bigint(20) unsigned | NO | | NULL | |
-| txn_time | smallint(5) unsigned | NO | | NULL | |
-| hostname | char(20) | NO | | NULL | |
-| ip | char(15) | NO | | NULL | |
-| db | char(64) | NO | | NULL | |
-| tbl | char(64) | NO | | NULL | |
-| wait_hold | char(1) | NO | | NULL | |
-+-----------+----------------------+------+-----+---------------------+-------+
-"""
+ return COLUMNS, data
diff --git a/press/press/report/mariadb_locks_list/__init__.py b/press/press/report/mariadb_locks_list/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/report/mariadb_locks_list/mariadb_locks_list.js b/press/press/report/mariadb_locks_list/mariadb_locks_list.js
new file mode 100644
index 00000000000..03ff583fefb
--- /dev/null
+++ b/press/press/report/mariadb_locks_list/mariadb_locks_list.js
@@ -0,0 +1,21 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.query_reports['MariaDB Locks List'] = {
+ after_refresh(report) {
+ let should_poll = report.get_filter_value('poll');
+ if (!should_poll || report.polling_interval) return;
+
+ frappe.toast(
+ 'This report will be auto-refreshed every 5 seconds till we find a lock wait.',
+ );
+
+ report.polling_interval = setInterval(() => {
+ if (!report.data.length) {
+ report.refresh();
+ } else {
+ clearInterval(report.polling_interval);
+ }
+ }, 5000);
+ },
+};
diff --git a/press/press/report/mariadb_locks_list/mariadb_locks_list.json b/press/press/report/mariadb_locks_list/mariadb_locks_list.json
new file mode 100644
index 00000000000..e186acd5fc1
--- /dev/null
+++ b/press/press/report/mariadb_locks_list/mariadb_locks_list.json
@@ -0,0 +1,43 @@
+{
+ "add_total_row": 0,
+ "columns": [],
+ "creation": "2024-03-24 11:50:26.510957",
+ "disabled": 0,
+ "docstatus": 0,
+ "doctype": "Report",
+ "filters": [
+ {
+ "fieldname": "database_server",
+ "fieldtype": "Link",
+ "label": "Database Server",
+ "mandatory": 1,
+ "options": "Database Server",
+ "wildcard_filter": 0
+ },
+ {
+ "default": "0",
+ "fieldname": "poll",
+ "fieldtype": "Check",
+ "label": "Poll every 5 seconds",
+ "mandatory": 0,
+ "wildcard_filter": 0
+ }
+ ],
+ "idx": 0,
+ "is_standard": "Yes",
+ "letterhead": null,
+ "modified": "2024-03-24 12:46:56.826427",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "MariaDB Locks List",
+ "owner": "Administrator",
+ "prepared_report": 0,
+ "ref_doctype": "Site",
+ "report_name": "MariaDB Locks List",
+ "report_type": "Script Report",
+ "roles": [
+ {
+ "role": "System Manager"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/press/press/report/mariadb_locks_list/mariadb_locks_list.py b/press/press/report/mariadb_locks_list/mariadb_locks_list.py
new file mode 100644
index 00000000000..5f804d09262
--- /dev/null
+++ b/press/press/report/mariadb_locks_list/mariadb_locks_list.py
@@ -0,0 +1,98 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+import frappe
+
+from press.agent import Agent
+
+
+def execute(filters=None):
+ frappe.only_for(("System Manager", "Support Team"))
+ data = get_data(filters)
+ return get_columns(), data
+
+
+def get_data(filters):
+ server = frappe.get_doc("Database Server", filters.database_server)
+ agent = Agent(server.name, "Database Server")
+
+ data = {
+ "private_ip": server.private_ip,
+ "mariadb_root_password": server.get_password("mariadb_root_password"),
+ }
+ return agent.post("database/locks", data=data)
+
+
+def get_columns():
+ return [
+ {
+ "fieldname": "lock_id",
+ "label": "Lock ID",
+ "fieldtype": "Data",
+ "width": 150,
+ },
+ {
+ "fieldname": "trx_id",
+ "label": "Transaction ID",
+ "fieldtype": "Data",
+ "width": 70,
+ },
+ {
+ "fieldname": "trx_query",
+ "label": "Query",
+ "fieldtype": "Data",
+ "width": 500,
+ },
+ {
+ "fieldname": "lock_mode",
+ "label": "Lock Mode",
+ "fieldtype": "Data",
+ "width": 70,
+ },
+ {
+ "fieldname": "lock_type",
+ "label": "Lock Type",
+ "fieldtype": "Data",
+ "width": 150,
+ },
+ {
+ "fieldname": "lock_table",
+ "label": "Lock Table",
+ "fieldtype": "Data",
+ "width": 150,
+ },
+ {
+ "fieldname": "lock_index",
+ "label": "Lock Index",
+ "fieldtype": "Data",
+ "width": 150,
+ },
+ {
+ "fieldname": "trx_state",
+ "label": "Transaction State",
+ "fieldtype": "Data",
+ "width": 150,
+ },
+ {
+ "fieldname": "trx_operation_state",
+ "label": "Transaction Operation State",
+ "fieldtype": "Data",
+ "width": 150,
+ },
+ {
+ "fieldname": "trx_started",
+ "label": "Transaction Started At",
+ "fieldtype": "Data", # Avoid timezones, we only need to compare two txn
+ "width": 150,
+ },
+ {
+ "fieldname": "trx_rows_locked",
+ "label": "Rows Locked",
+ "fieldtype": "Int",
+ },
+ {
+ "fieldname": "trx_rows_modified",
+ "label": "Rows Modified",
+ "fieldtype": "Int",
+ },
+ ]
diff --git a/press/press/report/mariadb_process_list/mariadb_process_list.js b/press/press/report/mariadb_process_list/mariadb_process_list.js
index 1ab6ccda38c..1eb1d45738b 100644
--- a/press/press/report/mariadb_process_list/mariadb_process_list.js
+++ b/press/press/report/mariadb_process_list/mariadb_process_list.js
@@ -11,7 +11,7 @@ frappe.query_reports['MariaDB Process List'] = {
{
fieldtype: 'Int',
default: 120,
- label: __('Kill Proceeses Running Longer Than (Seconds)'),
+ label: __('Kill Processes Running Longer Than (Seconds)'),
fieldname: 'kill_threshold',
},
],
diff --git a/press/press/report/mariadb_process_list/mariadb_process_list.py b/press/press/report/mariadb_process_list/mariadb_process_list.py
index 1029fc02775..57416b91fcd 100644
--- a/press/press/report/mariadb_process_list/mariadb_process_list.py
+++ b/press/press/report/mariadb_process_list/mariadb_process_list.py
@@ -2,11 +2,11 @@
# For license information, please see license.txt
import frappe
-
import sqlparse
-from press.agent import Agent
from frappe.utils import cint
+from press.agent import Agent
+
def execute(filters=None):
frappe.only_for(["System Manager", "Site Manager"])
diff --git a/press/press/report/mariadb_slow_queries/mariadb_slow_queries.js b/press/press/report/mariadb_slow_queries/mariadb_slow_queries.js
index 88e83850a70..c2e60c43801 100644
--- a/press/press/report/mariadb_slow_queries/mariadb_slow_queries.js
+++ b/press/press/report/mariadb_slow_queries/mariadb_slow_queries.js
@@ -10,36 +10,74 @@ frappe.query_reports['MariaDB Slow Queries'] = {
fieldtype: 'Link',
options: 'Site',
reqd: 1,
+ get_query: function () {
+ return {
+ filters: { status: ["!=", "Archived"] },
+ };
+ },
},
{
fieldname: 'start_datetime',
label: __('Start From'),
fieldtype: 'Datetime',
+ default: frappe.datetime.add_days(frappe.datetime.now_datetime(), -1),
reqd: 1,
},
{
fieldname: 'stop_datetime',
label: __('End At'),
fieldtype: 'Datetime',
+ default: frappe.datetime.now_datetime(),
reqd: 1,
},
{
- fieldname: 'search_pattern',
- label: __('Search Pattern'),
- fieldtype: 'Data',
- default: '.*',
- reqd: 1,
- },
- {
- fieldname: 'format_queries',
- label: __('Format Queries'),
+ fieldname: 'normalize_queries',
+ label: __('Normalize Queries'),
fieldtype: 'Check',
+ default: 1,
},
{
fieldname: 'max_lines',
label: __('Max Lines'),
- default: 100,
+ default: 10000,
fieldtype: 'Int',
},
+ {
+ fieldname: 'search_pattern',
+ label: __('Search Pattern'),
+ fieldtype: 'Data',
+ default: '.*',
+ },
],
+ get_datatable_options(options) {
+ return Object.assign(options, {
+ checkboxColumn: true,
+ });
+ },
+
+ onload(report) {
+ report.page.add_inner_button(__('Add Selected Indexes'), () => {
+ let site = report.get_values().site;
+ let checked_rows =
+ frappe.query_report.datatable.rowmanager.getCheckedRows();
+ let indexes = checked_rows
+ .map((i) => frappe.query_report.data[i])
+ .map((row) => row.suggested_index)
+ .filter(Boolean);
+
+ if (!indexes.length) {
+ frappe.throw(__('Please select rows to create indexes'));
+ }
+
+ frappe.confirm('Are you sure you want to add these indexes?', () => {
+ frappe.xcall(
+ 'press.press.report.mariadb_slow_queries.mariadb_slow_queries.add_suggested_index',
+ {
+ indexes,
+ name: site,
+ },
+ );
+ });
+ });
+ },
};
diff --git a/press/press/report/mariadb_slow_queries/mariadb_slow_queries.json b/press/press/report/mariadb_slow_queries/mariadb_slow_queries.json
index 5897aaffd9d..d32316c08f3 100644
--- a/press/press/report/mariadb_slow_queries/mariadb_slow_queries.json
+++ b/press/press/report/mariadb_slow_queries/mariadb_slow_queries.json
@@ -1,15 +1,15 @@
{
"add_total_row": 0,
"columns": [],
- "creation": "2021-11-01 19:16:08.357082",
- "disable_prepared_report": 0,
+ "creation": "2024-12-23 11:36:40.301426",
"disabled": 0,
"docstatus": 0,
"doctype": "Report",
"filters": [],
"idx": 0,
"is_standard": "Yes",
- "modified": "2022-11-08 17:10:41.382656",
+ "letterhead": null,
+ "modified": "2024-12-23 11:36:40.301426",
"modified_by": "Administrator",
"module": "Press",
"name": "MariaDB Slow Queries",
@@ -25,5 +25,6 @@
{
"role": "Site Manager"
}
- ]
+ ],
+ "timeout": 0
}
\ No newline at end of file
diff --git a/press/press/report/mariadb_slow_queries/mariadb_slow_queries.py b/press/press/report/mariadb_slow_queries/mariadb_slow_queries.py
index 04f43fb139c..0210111c464 100644
--- a/press/press/report/mariadb_slow_queries/mariadb_slow_queries.py
+++ b/press/press/report/mariadb_slow_queries/mariadb_slow_queries.py
@@ -1,22 +1,21 @@
# Copyright (c) 2021, Frappe and contributors
# For license information, please see license.txt
+from __future__ import annotations
+
+import re
+from collections import defaultdict
+
import frappe
-import pytz
import requests
import sqlparse
-
-from frappe.utils import (
- get_datetime,
- convert_utc_to_timezone,
- get_system_timezone,
-)
from frappe.core.doctype.access_log.access_log import make_access_log
+from frappe.utils import convert_utc_to_timezone, get_system_timezone
from frappe.utils.password import get_decrypted_password
def execute(filters=None):
- frappe.only_for(["System Manager", "Site Manager"])
+ frappe.only_for(["System Manager", "Site Manager", "Press Admin", "Press Member"])
filters.database = frappe.db.get_value("Site", filters.site, "database_name")
make_access_log(
@@ -60,32 +59,51 @@ def execute(filters=None):
},
]
+ if filters.normalize_queries:
+ columns = [c for c in columns if c["fieldname"] not in ("timestamp",)]
+ columns.append(
+ {
+ "fieldname": "count",
+ "label": frappe._("Count"),
+ "fieldtype": "Int",
+ },
+ )
+ columns.append(
+ {
+ "fieldname": "example",
+ "label": frappe._("Example Query"),
+ "fieldtype": "Data",
+ "width": 1200,
+ },
+ )
+
data = get_data(filters)
return columns, data
def get_data(filters):
- def convert_user_timezone_to_utc(datetime_obj):
- timezone = pytz.timezone(get_system_timezone())
- datetime_obj = get_datetime(datetime_obj)
- return timezone.localize(datetime_obj).astimezone(pytz.utc).isoformat()
+ from press.utils import convert_user_timezone_to_utc
rows = get_slow_query_logs(
filters.database,
convert_user_timezone_to_utc(filters.start_datetime),
- convert_user_timezone_to_utc(filters.end_datetime),
+ convert_user_timezone_to_utc(filters.stop_datetime),
filters.search_pattern,
int(filters.max_lines) or 100,
)
for row in rows:
- if filters.format_queries:
- row["query"] = sqlparse.format(
- row["query"].strip(), keyword_case="upper", reindent=True
- )
row["timestamp"] = convert_utc_to_timezone(
frappe.utils.get_datetime(row["timestamp"]).replace(tzinfo=None),
get_system_timezone(),
)
+
+ # Filter out queries starting with `SET`
+ dql_stmt = ("select", "update", "delete", "insert")
+ rows = [x for x in rows if x["query"].lower().lstrip().startswith(dql_stmt)]
+
+ if filters.normalize_queries:
+ rows = summarize_by_query(rows)
+
return rows
@@ -110,10 +128,8 @@ def get_slow_query_logs(database, start_datetime, end_datetime, search_pattern,
"size": size,
}
- if search_pattern:
- query["query"]["bool"]["filter"].append(
- {"regexp": {"mysql.slowlog.query": search_pattern}}
- )
+ if search_pattern and search_pattern != ".*":
+ query["query"]["bool"]["filter"].append({"regexp": {"mysql.slowlog.query": search_pattern}})
response = requests.post(url, json=query, auth=("frappe", password)).json()
@@ -124,3 +140,53 @@ def get_slow_query_logs(database, start_datetime, end_datetime, search_pattern,
data["duration"] = d["_source"].get("event", {}).get("duration", 0) / 1e9
out.append(data)
return out
+
+
+def normalize_query(query: str) -> str:
+ q = sqlparse.parse(query)[0]
+ for token in q.flatten():
+ token_type = str(token.ttype)
+ if "Token.Literal" in token_type or token_type == "Token.Keyword.Order":
+ token.value = "?"
+
+ # Format query consistently so identical queries can be matched
+ q = format_query(q, strip_comments=True)
+
+ # Transform IN parts like this: IN (?, ?, ?) -> IN (?)
+ return re.sub(r" IN \(\?[\s\n\?\,]*\)", " IN (?)", q, flags=re.IGNORECASE)
+
+
+def format_query(q, strip_comments=False):
+ return sqlparse.format(
+ str(q).strip(),
+ keyword_case="upper",
+ reindent=True,
+ strip_comments=strip_comments,
+ )
+
+
+def summarize_by_query(data):
+ queries = defaultdict(lambda: defaultdict(float))
+ for row in data:
+ query = row["query"]
+ if "SQL_NO_CACHE" in query and "WHERE" not in query:
+ # These are mysqldump queries, there's no real way to optimize these, it's just dumping entire table.
+ continue
+
+ normalized_query = normalize_query(query)
+ entry = queries[normalized_query]
+ entry["count"] += 1
+ entry["query"] = normalized_query
+ entry["duration"] += row["duration"]
+ entry["rows_examined"] += row["rows_examined"]
+ entry["rows_sent"] += row["rows_sent"]
+ entry["example"] = query
+
+ result = list(queries.values())
+ result.sort(key=lambda r: r["duration"] * r["count"], reverse=True)
+
+ return result
+
+
+def get_doctype_name(table_name: str) -> str:
+ return table_name.removeprefix("tab")
diff --git a/press/press/report/marketplace_app_repository_visibility/__init__.py b/press/press/report/marketplace_app_repository_visibility/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/report/marketplace_app_repository_visibility/marketplace_app_repository_visibility.js b/press/press/report/marketplace_app_repository_visibility/marketplace_app_repository_visibility.js
new file mode 100644
index 00000000000..1923e74ae3b
--- /dev/null
+++ b/press/press/report/marketplace_app_repository_visibility/marketplace_app_repository_visibility.js
@@ -0,0 +1,19 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.query_reports['Marketplace App Repository Visibility'] = {
+ filters: [],
+ onload: async function (report) {
+ report.page.add_inner_button(__('Send Email to Developers'), () => {
+ frappe.confirm('Are you sure you want to send out the e-mails?', () => {
+ frappe.xcall(
+ 'press.press.report.marketplace_app_repository_visibility.marketplace_app_repository_visibility.send_emails',
+ {
+ columns: JSON.stringify(report.columns),
+ data: JSON.stringify(report.data),
+ },
+ );
+ });
+ });
+ },
+};
diff --git a/press/press/report/marketplace_app_repository_visibility/marketplace_app_repository_visibility.json b/press/press/report/marketplace_app_repository_visibility/marketplace_app_repository_visibility.json
new file mode 100644
index 00000000000..13193bd6e8f
--- /dev/null
+++ b/press/press/report/marketplace_app_repository_visibility/marketplace_app_repository_visibility.json
@@ -0,0 +1,31 @@
+{
+ "add_total_row": 0,
+ "columns": [],
+ "creation": "2025-01-21 17:35:11.471086",
+ "disabled": 0,
+ "docstatus": 0,
+ "doctype": "Report",
+ "filters": [],
+ "idx": 0,
+ "is_standard": "Yes",
+ "letterhead": null,
+ "modified": "2025-01-21 20:53:27.388232",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Marketplace App Repository Visibility",
+ "owner": "Administrator",
+ "prepared_report": 1,
+ "query": "SELECT \n ma.name AS app_name,\n mav.version AS version,\n mav.source AS source,\n asrc.repository_url AS repository_url,\n asrc.branch AS branch\nFROM \n `tabMarketplace App` ma\nJOIN \n `tabMarketplace App Version` mav ON ma.name = mav.parent\nJOIN \n `tabApp Source` asrc ON mav.source = asrc.name\n",
+ "ref_doctype": "Marketplace App",
+ "report_name": "Marketplace App Repository Visibility",
+ "report_type": "Script Report",
+ "roles": [
+ {
+ "role": "System Manager"
+ },
+ {
+ "role": "Press Admin"
+ }
+ ],
+ "timeout": 3000
+}
diff --git a/press/press/report/marketplace_app_repository_visibility/marketplace_app_repository_visibility.py b/press/press/report/marketplace_app_repository_visibility/marketplace_app_repository_visibility.py
new file mode 100644
index 00000000000..5921d52f09b
--- /dev/null
+++ b/press/press/report/marketplace_app_repository_visibility/marketplace_app_repository_visibility.py
@@ -0,0 +1,115 @@
+import json
+
+import frappe
+import requests
+
+
+def send_developer_email(email, app_name, repository_url):
+ dev = frappe.get_doc("User", {"email": email})
+ developer_name = dev.full_name
+ email_args = {
+ "recipients": email,
+ "subject": "Frappe Cloud: Make your app's GitHub Repository Public",
+ "template": "marketplace_app_visibility",
+ "args": {
+ "developer_name": developer_name,
+ "app_name": app_name,
+ "repository_url": repository_url,
+ },
+ }
+ frappe.enqueue(method=frappe.sendmail, queue="short", timeout=300, **email_args)
+
+
+@frappe.whitelist()
+def send_emails(columns, data):
+ frappe.only_for("System Manager")
+ data = json.loads(data)
+ for row in data:
+ visibility = row.get("visibility")
+ if visibility != "Private":
+ continue
+ app_name = row.get("app_name")
+ repository_url = row.get("repository_url")
+ email = row.get("team")
+ send_developer_email(email, app_name, repository_url)
+
+
+def check_repository_visibility(repository_url, personal_access_token):
+ try:
+ repo_parts = repository_url.split("github.com/")[1].removesuffix(".git").split("/")
+ owner = repo_parts[0]
+ repo_name = repo_parts[1]
+ except IndexError:
+ return "Error: Invalid repository URL format."
+
+ api_url = f"https://api.github.com/repos/{owner}/{repo_name}"
+
+ headers = {"Authorization": f"token {personal_access_token}"}
+
+ try:
+ response = requests.get(api_url, headers=headers)
+
+ if response.status_code == 200:
+ repo_data = response.json()
+ if repo_data.get("private"):
+ return "Private"
+ return "Public"
+ if response.status_code == 404:
+ return "Private"
+ return "Private"
+ except Exception:
+ return "Error"
+
+
+def execute(filters=None):
+ frappe.only_for("System Manager")
+
+ columns = [
+ {"fieldname": "app_name", "label": "Application Name", "fieldtype": "Data", "width": 200},
+ {"fieldname": "team", "label": "Team", "fieldtype": "Data", "width": 200},
+ {"fieldname": "repository_url", "label": "Repository URL", "fieldtype": "Data", "width": 300},
+ {
+ "fieldname": "visibility",
+ "label": "Visibility",
+ "fieldtype": "Data",
+ "width": 100,
+ },
+ ]
+
+ data = frappe.db.sql(
+ """
+ SELECT
+ ma.name AS app_name,
+ t.user AS team,
+ asrc.repository_url AS repository_url
+ FROM
+ `tabMarketplace App` ma
+ JOIN
+ `tabMarketplace App Version` mav ON ma.name = mav.parent
+ JOIN
+ `tabApp Source` asrc ON mav.source = asrc.name
+ JOIN
+ `tabTeam` t ON ma.team = t.name
+ WHERE
+ asrc.enabled = 1 AND
+ ma.status = 'Published'
+ GROUP BY
+ repository_url
+ """,
+ as_dict=True,
+ )
+ personal_access_token = frappe.db.get_value("Press Settings", "None", "github_pat_token")
+
+ visibility_cache = {}
+ for row in data:
+ repo_url = row["repository_url"]
+ # Check if the visibility status is already cached for this repository URL
+ if repo_url in visibility_cache:
+ row["visibility"] = visibility_cache[repo_url]
+ else:
+ # Check visibility status and cache it
+ visibility_status = check_repository_visibility(repo_url, personal_access_token)
+ row["visibility"] = visibility_status
+ # Store the result in the cache for future reference
+ visibility_cache[repo_url] = visibility_status
+ return columns, data
diff --git a/press/press/report/payment_partner/__init__.py b/press/press/report/payment_partner/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/report/payment_partner/payment_partner.js b/press/press/report/payment_partner/payment_partner.js
new file mode 100644
index 00000000000..3b5c1bc19ea
--- /dev/null
+++ b/press/press/report/payment_partner/payment_partner.js
@@ -0,0 +1,43 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.query_reports['Payment Partner'] = {
+ filters: [
+ {
+ fieldname: 'from_date',
+ label: __('From Date'),
+ fieldtype: 'Date',
+ default: frappe.datetime.add_days(frappe.datetime.get_today(), -30),
+ },
+ {
+ fieldname: 'to_date',
+ label: __('To Date'),
+ fieldtype: 'Date',
+ default: frappe.datetime.get_today(),
+ },
+ {
+ fieldname: 'team',
+ label: __('Team'),
+ fieldtype: 'Link',
+ options: 'Team',
+ },
+
+ {
+ fieldname: 'payment_partner',
+ label: __('Payment Partner'),
+ fieldtype: 'Link',
+ options: 'Team',
+ },
+ {
+ fieldname: 'payment_gateway',
+ label: __('Payment Gateway'),
+ fieldtype: 'Link',
+ options: 'Payment Gateway',
+ },
+ {
+ fieldname: 'submitted_to_frappe',
+ label: __('Submitted to Frappe'),
+ fieldtype: 'Check',
+ },
+ ],
+};
diff --git a/press/press/report/payment_partner/payment_partner.json b/press/press/report/payment_partner/payment_partner.json
new file mode 100644
index 00000000000..051dd7f3783
--- /dev/null
+++ b/press/press/report/payment_partner/payment_partner.json
@@ -0,0 +1,29 @@
+{
+ "add_total_row": 1,
+ "columns": [],
+ "creation": "2024-11-25 15:50:14.609170",
+ "disabled": 0,
+ "docstatus": 0,
+ "doctype": "Report",
+ "filters": [],
+ "idx": 0,
+ "is_standard": "Yes",
+ "letterhead": null,
+ "modified": "2024-11-25 15:50:14.609170",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Payment Partner",
+ "owner": "Administrator",
+ "prepared_report": 0,
+ "ref_doctype": "Payment Partner Transaction",
+ "report_name": "Payment Partner",
+ "report_type": "Script Report",
+ "roles": [
+ {
+ "role": "System Manager"
+ },
+ {
+ "role": "Guest"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/press/press/report/payment_partner/payment_partner.py b/press/press/report/payment_partner/payment_partner.py
new file mode 100644
index 00000000000..40989963908
--- /dev/null
+++ b/press/press/report/payment_partner/payment_partner.py
@@ -0,0 +1,126 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+import frappe
+from frappe import _
+
+
+def execute(filters=None):
+ columns = get_columns()
+ data = get_data(filters)
+ return columns, data
+
+
+def get_columns():
+ return [
+ {
+ "label": _("Transaction ID"),
+ "fieldname": "name",
+ "fieldtype": "Link",
+ "options": "Payment Partner Transaction",
+ "width": 100,
+ },
+ {"label": _("Team"), "fieldname": "team", "fieldtype": "Link", "options": "Team", "width": 150},
+ {"label": _("Posting Date"), "fieldname": "posting_date", "fieldtype": "Date", "width": 120},
+ {
+ "label": _("Payment Gateway"),
+ "fieldname": "payment_gateway",
+ "fieldtype": "Link",
+ "options": "Payment Gateway",
+ "width": 150,
+ },
+ {
+ "label": _("FC Amount"),
+ "fieldname": "amount",
+ "fieldtype": "Currency",
+ "options": "currency",
+ "width": 120,
+ },
+ {
+ "label": _("Actual Amount"),
+ "fieldname": "actual_amount",
+ "fieldtype": "Currency",
+ "options": "actual_currency",
+ "width": 120,
+ },
+ {"label": _("Exchange Rate"), "fieldname": "exchange_rate", "fieldtype": "Float", "width": 100},
+ {
+ "label": _("Payment Partner"),
+ "fieldname": "payment_partner",
+ "fieldtype": "Link",
+ "options": "Team",
+ "width": 150,
+ },
+ {
+ "label": _("Submitted To Frappe"),
+ "fieldname": "submitted_to_frappe",
+ "fieldtype": "Check",
+ "width": 150,
+ },
+ {
+ "label": _("Actual Currency"),
+ "fieldname": "actual_currency",
+ "fieldtype": "Link",
+ "options": "Currency",
+ "width": 100,
+ "hidden": 1,
+ },
+ {
+ "label": _("Currency"),
+ "fieldname": "currency",
+ "fieldtype": "Link",
+ "options": "Currency",
+ "width": 100,
+ "hidden": 1,
+ },
+ ]
+
+
+def get_data(filters):
+ if filters.from_date > filters.to_date:
+ frappe.throw(_("From Date cannot be after To Date"))
+
+ payment_record = frappe.qb.DocType("Payment Partner Transaction")
+
+ query = (
+ frappe.qb.from_(payment_record)
+ .select(
+ "name",
+ "team",
+ "payment_gateway",
+ "payment_partner",
+ "amount",
+ "actual_amount",
+ "submitted_to_frappe",
+ "posting_date",
+ "actual_currency",
+ "exchange_rate",
+ )
+ .where(payment_record.docstatus == 1)
+ )
+
+ query = apply_filters(query, filters, payment_record)
+ data = query.run(as_dict=True)
+ for record in data:
+ record["currency"] = "USD"
+
+ return data
+
+
+def apply_filters(query, filters, payment_record):
+ doc_status = {"Draft": 0, "Submitted": 1, "Cancelled": 2}
+ filter_map = {
+ "from_date": lambda q, v: q.where(payment_record.posting_date >= v),
+ "to_date": lambda q, v: q.where(payment_record.posting_date <= v),
+ "team": lambda q, v: q.where(payment_record.team == v),
+ "payment_partner": lambda q, v: q.where(payment_record.payment_partner == v),
+ "payment_gateway": lambda q, v: q.where(payment_record.payment_gateway == v),
+ "submitted_to_frappe": lambda q, v: q.where(payment_record.submitted_to_frappe == v),
+ "docstatus": lambda q, v: q.where(payment_record.docstatus == doc_status.get(v, 0)),
+ }
+
+ for key, value in filters.items():
+ if key in filter_map:
+ query = filter_map[key](query, value)
+
+ return query
diff --git a/press/press/report/press_endpoints_audit/__init__.py b/press/press/report/press_endpoints_audit/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/press/report/press_endpoints_audit/press_endpoints_audit.js b/press/press/report/press_endpoints_audit/press_endpoints_audit.js
new file mode 100644
index 00000000000..d0fdf6c560b
--- /dev/null
+++ b/press/press/report/press_endpoints_audit/press_endpoints_audit.js
@@ -0,0 +1,6 @@
+// Copyright (c) 2025, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.query_reports['Press Endpoints Audit'] = {
+ filters: [],
+};
diff --git a/press/press/report/press_endpoints_audit/press_endpoints_audit.json b/press/press/report/press_endpoints_audit/press_endpoints_audit.json
new file mode 100644
index 00000000000..31b55981bea
--- /dev/null
+++ b/press/press/report/press_endpoints_audit/press_endpoints_audit.json
@@ -0,0 +1,28 @@
+{
+ "add_total_row": 0,
+ "add_translate_data": 0,
+ "columns": [],
+ "creation": "2025-08-29 19:46:24.442646",
+ "disabled": 0,
+ "docstatus": 0,
+ "doctype": "Report",
+ "filters": [],
+ "idx": 0,
+ "is_standard": "Yes",
+ "letterhead": null,
+ "modified": "2025-08-29 19:46:33.884456",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Press Endpoints Audit",
+ "owner": "Administrator",
+ "prepared_report": 0,
+ "ref_doctype": "Site",
+ "report_name": "Press Endpoints Audit",
+ "report_type": "Script Report",
+ "roles": [
+ {
+ "role": "System Manager"
+ }
+ ],
+ "timeout": 0
+}
\ No newline at end of file
diff --git a/press/press/report/press_endpoints_audit/press_endpoints_audit.py b/press/press/report/press_endpoints_audit/press_endpoints_audit.py
new file mode 100644
index 00000000000..3835f315306
--- /dev/null
+++ b/press/press/report/press_endpoints_audit/press_endpoints_audit.py
@@ -0,0 +1,228 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+import ast
+import logging
+from collections.abc import Generator
+from dataclasses import dataclass, field
+from pathlib import Path
+from typing import Any
+
+import frappe
+import frappe.utils
+
+COLUMNS = [
+ {
+ "fieldname": "file_path",
+ "label": "Path",
+ "fieldtype": "Data",
+ },
+ {
+ "fieldname": "line_number",
+ "label": "Line",
+ "fieldtype": "Int",
+ },
+ {
+ "fieldname": "function_name",
+ "label": "Function",
+ "fieldtype": "Data",
+ },
+ {
+ "fieldname": "allow_guest",
+ "label": "Allow Guest",
+ "fieldtype": "Check",
+ },
+ {
+ "fieldname": "is_protected",
+ "label": "Protected",
+ "fieldtype": "Check",
+ },
+ {
+ "fieldname": "protected_doctypes",
+ "label": "Protected Doctypes",
+ "fieldtype": "Data",
+ },
+ {
+ "fieldname": "is_get_doc_with_input",
+ "label": "Uses get_doc with input",
+ "fieldtype": "Check",
+ },
+ {
+ "fieldname": "parameters",
+ "label": "Parameters",
+ "fieldtype": "Data",
+ },
+]
+
+
+@dataclass
+class FunctionAnalysis:
+ file_path: str
+ function_name: str
+ line_number: int
+ is_protected: bool = False
+ protected_doctypes: list[str] = field(default_factory=list)
+ has_get_doc_with_input: bool = False
+ parameters: list[str] = field(default_factory=list)
+ allow_guest: bool = False
+
+ def to_dict(self) -> dict[str, Any]:
+ return {
+ "file_path": self.file_path,
+ "function_name": self.function_name,
+ "line_number": self.line_number,
+ "is_protected": 1 if self.is_protected else 0,
+ "protected_doctypes": ", ".join(self.protected_doctypes),
+ "is_get_doc_with_input": 1 if self.has_get_doc_with_input else 0,
+ "parameters": ", ".join(self.parameters),
+ "allow_guest": 1 if self.allow_guest else 0,
+ }
+
+
+class ASTAnalyzer:
+ @staticmethod
+ def parse_file(file_path: str | Path) -> ast.AST | None:
+ try:
+ with open(file_path, "r", encoding="utf-8") as f:
+ content = f.read()
+ return ast.parse(content, filename=str(file_path))
+ except (OSError, SyntaxError, UnicodeDecodeError) as e:
+ logging.warning(f"Failed to parse file {file_path}: {e}")
+ return None
+
+ @staticmethod
+ def is_function_call_decorator(decorator: ast.AST) -> bool:
+ return isinstance(decorator, ast.Call) and isinstance(decorator.func, ast.Attribute)
+
+ @staticmethod
+ def is_whitelisted_function(node: ast.FunctionDef) -> bool:
+ for decorator in node.decorator_list:
+ if not ASTAnalyzer.is_function_call_decorator(decorator):
+ continue
+
+ func = decorator.func
+ if (
+ hasattr(func, "value")
+ and isinstance(func.value, ast.Name)
+ and func.value.id == "frappe"
+ and func.attr == "whitelist"
+ ):
+ return True
+ return False
+
+ @staticmethod
+ def get_protected_doctypes(node: ast.FunctionDef) -> list[str] | None:
+ for decorator in node.decorator_list:
+ if isinstance(decorator, ast.Call):
+ func = decorator.func
+ if isinstance(func, ast.Name) and func.id == "protected":
+ if not decorator.args:
+ return []
+
+ arg = decorator.args[0]
+ if isinstance(arg, ast.Constant):
+ return [str(arg.value)]
+ if isinstance(arg, ast.List):
+ return [str(elt.value) for elt in arg.elts if isinstance(elt, ast.Constant)]
+ elif isinstance(decorator, ast.Name) and decorator.id == "protected":
+ return []
+
+ return None
+
+ @staticmethod
+ def get_allow_guest(node: ast.FunctionDef) -> bool:
+ for decorator in node.decorator_list:
+ if not ASTAnalyzer.is_function_call_decorator(decorator):
+ continue
+
+ func = decorator.func
+ if (
+ hasattr(func, "value")
+ and isinstance(func.value, ast.Name)
+ and func.value.id == "frappe"
+ and func.attr == "whitelist"
+ ):
+ for kw in getattr(decorator, "keywords", []):
+ if kw.arg == "allow_guest" and isinstance(kw.value, ast.Constant):
+ return bool(kw.value.value)
+ return False
+ return False
+
+ @staticmethod
+ def get_function_parameters(node: ast.FunctionDef) -> list[str]:
+ return [arg.arg for arg in node.args.args]
+
+ @staticmethod
+ def uses_get_doc_with_input(node: ast.AST, params: list[str]) -> bool:
+ if not isinstance(node, ast.Assign) or not isinstance(node.value, ast.Call):
+ return False
+
+ call_node = node.value
+ if not (
+ isinstance(call_node.func, ast.Attribute)
+ and isinstance(call_node.func.value, ast.Name)
+ and call_node.func.value.id == "frappe"
+ and call_node.func.attr == "get_doc"
+ ):
+ return False
+ return any(isinstance(arg, ast.Name) and arg.id in params for arg in call_node.args)
+
+
+class EndpointAuditor:
+ def __init__(self, root_directory: str | Path):
+ self.root_directory = Path(root_directory).resolve()
+ self.analyzer = ASTAnalyzer()
+
+ def analyze_function(self, node: ast.FunctionDef, file_path: Path) -> FunctionAnalysis | None:
+ if not self.analyzer.is_whitelisted_function(node):
+ return None
+
+ parameters = self.analyzer.get_function_parameters(node)
+ protected_doctypes = self.analyzer.get_protected_doctypes(node)
+ allow_guest = self.analyzer.get_allow_guest(node)
+
+ has_get_doc_with_input = any(
+ self.analyzer.uses_get_doc_with_input(n, parameters) for n in ast.walk(node)
+ )
+
+ relative_path = str(file_path.relative_to(self.root_directory))
+
+ return FunctionAnalysis(
+ file_path=relative_path,
+ function_name=node.name,
+ line_number=node.lineno,
+ is_protected=protected_doctypes is not None,
+ protected_doctypes=protected_doctypes or [],
+ has_get_doc_with_input=has_get_doc_with_input,
+ parameters=parameters,
+ allow_guest=allow_guest,
+ )
+
+ def analyze_file(self, file_path: Path) -> Generator[FunctionAnalysis, None, None]:
+ tree = self.analyzer.parse_file(file_path)
+ if tree is None:
+ return
+
+ for node in ast.walk(tree):
+ if isinstance(node, ast.FunctionDef):
+ analysis = self.analyze_function(node, file_path)
+ if analysis is not None:
+ yield analysis
+
+ def audit_directory(self) -> Generator[FunctionAnalysis, None, None]:
+ try:
+ for file_path in self.root_directory.rglob("*.py"):
+ if file_path.is_file():
+ yield from self.analyze_file(file_path)
+ except OSError as e:
+ logging.error(f"Error walking directory {self.root_directory}: {e}")
+ frappe.throw(f"Failed to access directory: {e}")
+
+
+def execute(filters) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]:
+ bench_root = Path(frappe.utils.get_bench_path()).resolve()
+ audit_directory = bench_root.joinpath("apps", "press", "press")
+ auditor = EndpointAuditor(audit_directory)
+ analyses = list(auditor.audit_directory())
+ data = [analysis.to_dict() for analysis in analyses]
+ return COLUMNS, data
diff --git a/press/press/report/server_stats/server_stats.py b/press/press/report/server_stats/server_stats.py
index 6468d06acbd..f71c2e23f3b 100644
--- a/press/press/report/server_stats/server_stats.py
+++ b/press/press/report/server_stats/server_stats.py
@@ -2,9 +2,10 @@
# For license information, please see license.txt
import frappe
-from press.api.server import usage, total_resource, prometheus_query, calculate_swap
from frappe.utils import rounded
+from press.api.server import calculate_swap, prometheus_query, total_resource, usage
+
def execute(filters=None):
frappe.only_for("System Manager")
@@ -199,11 +200,16 @@ def get_data(filters):
def get_servers(filters):
- server_filters = {"status": "Active"}
- if filters.team:
- server_filters["team"] = filters.team
+ server_filters = {"status": "Active", **filters}
+
+ if filters.server_name:
+ server_filters["name"] = ("like", f"%{filters.server_name}%")
+ server_filters.pop("server_name", None)
+
if filters.exclude_self_hosted:
server_filters["is_self_hosted"] = False
+ server_filters.pop("exclude_self_hosted", None)
+
server_types = (
[filters.server_type]
if filters.server_type
@@ -220,7 +226,13 @@ def get_servers(filters):
)
servers = []
for server_type in server_types:
- for server in frappe.get_all(server_type, server_filters):
+ server_type_filters = server_filters.copy()
+ for field in server_filters:
+ if field == "name":
+ continue
+ if not frappe.get_meta(server_type).has_field(field):
+ server_type_filters.pop(field, None)
+ for server in frappe.get_all(server_type, server_type_filters):
server.update({"server_type": server_type})
servers.append(server)
return servers
diff --git a/press/press/report/shared_app_server_stats/shared_app_server_stats.py b/press/press/report/shared_app_server_stats/shared_app_server_stats.py
index 7b1fe3d7a45..76b39cad6eb 100644
--- a/press/press/report/shared_app_server_stats/shared_app_server_stats.py
+++ b/press/press/report/shared_app_server_stats/shared_app_server_stats.py
@@ -2,9 +2,10 @@
# For license information, please see license.txt
import frappe
-from press.api.server import usage, total_resource, prometheus_query, calculate_swap
from frappe.utils import rounded
+from press.api.server import calculate_swap, prometheus_query, total_resource, usage
+
def execute(filters=None):
frappe.only_for("System Manager")
diff --git a/press/press/workspace/press/press.json b/press/press/workspace/press/press.json
index 567168fc9e9..e1015a67541 100644
--- a/press/press/workspace/press/press.json
+++ b/press/press/workspace/press/press.json
@@ -1,331 +1,225 @@
{
- "category": "Modules",
"charts": [],
- "creation": "2020-03-13 12:00:25.244955",
- "developer_mode_only": 0,
- "disable_user_customization": 0,
+ "content": "[{\"id\":\"zYUVFJZX-t\",\"type\":\"header\",\"data\":{\"text\":\"Frappe Cloud \",\"col\":12}},{\"id\":\"VFzeFzlnB5\",\"type\":\"spacer\",\"data\":{\"col\":12}},{\"id\":\"iNJ8WoS9kD\",\"type\":\"header\",\"data\":{\"text\":\"Sites \",\"col\":12}},{\"id\":\"EYeJrbXv1P\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Active Sites\",\"col\":3}},{\"id\":\"7jAM0HBrde\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Broken Sites\",\"col\":3}},{\"id\":\"ttO5vXfzL_\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Pending Sites\",\"col\":3}},{\"id\":\"nvLoQ_N15n\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Suspended Sites\",\"col\":3}},{\"id\":\"AVO8JCkksy\",\"type\":\"spacer\",\"data\":{\"col\":12}},{\"id\":\"aLqo1uNPYu\",\"type\":\"header\",\"data\":{\"text\":\"Benches \",\"col\":12}},{\"id\":\"BcJPVEkRcy\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Active Benches\",\"col\":3}},{\"id\":\"PhHBf-f1ej\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Broken Benches\",\"col\":3}},{\"id\":\"JJB6cOEiXy\",\"type\":\"spacer\",\"data\":{\"col\":12}},{\"id\":\"7a18ugroq8\",\"type\":\"header\",\"data\":{\"text\":\"Servers \",\"col\":12}},{\"id\":\"M3urSE6cor\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Application Server\",\"col\":3}},{\"id\":\"5qOJXl1CkE\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Database Server\",\"col\":3}},{\"id\":\"PWrCW7DruI\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Proxy Server\",\"col\":3}},{\"id\":\"_GVOmg7C_U\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Hybrid Servers\",\"col\":3}},{\"id\":\"XHWA0NCImO\",\"type\":\"spacer\",\"data\":{\"col\":12}},{\"id\":\"X1dteEUHoR\",\"type\":\"header\",\"data\":{\"text\":\"Settings \",\"col\":12}},{\"id\":\"eeEbYjVj_n\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Press Settings\",\"col\":3}},{\"id\":\"EiWS2tWYwu\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Root Domain\",\"col\":3}},{\"id\":\"EoLO5YYzdR\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Tls Certificate\",\"col\":3}},{\"id\":\"-Jx1Irf-28\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Team\",\"col\":3}},{\"id\":\"n-T0ehr2ca\",\"type\":\"spacer\",\"data\":{\"col\":12}},{\"id\":\"xZEclbenJQ\",\"type\":\"header\",\"data\":{\"text\":\"Masters \",\"col\":12}},{\"id\":\"P6nCUrJreH\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"App\",\"col\":3}},{\"id\":\"1kAyHtrIrJ\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"App Source\",\"col\":3}},{\"id\":\"-N-Xb5MVPV\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"App Release\",\"col\":3}},{\"id\":\"ND9oElFB7R\",\"type\":\"spacer\",\"data\":{\"col\":12}},{\"id\":\"0DYlbnEtOT\",\"type\":\"header\",\"data\":{\"text\":\"Subscription \",\"col\":12}},{\"id\":\"eddRzzqwSZ\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Server Plan\",\"col\":3}},{\"id\":\"GYk3Frsy1L\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Site Plan\",\"col\":3}},{\"id\":\"o_aDSbQlPf\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Subscription\",\"col\":3}},{\"id\":\"VZSHmE1jzA\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Invoice\",\"col\":3}},{\"id\":\"B8T-3IZ8Qg\",\"type\":\"spacer\",\"data\":{\"col\":12}},{\"id\":\"WDyzNHuOas\",\"type\":\"header\",\"data\":{\"text\":\"Operations \",\"col\":12}},{\"id\":\"Ziby8rOfsU\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Agent Job\",\"col\":3}},{\"id\":\"Vm4Tn0dxiD\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Ansible Play\",\"col\":3}},{\"id\":\"n6VE51hPkc\",\"type\":\"shortcut\",\"data\":{\"shortcut_name\":\"Press Job\",\"col\":3}}]",
+ "creation": "2024-07-30 18:43:18.421196",
+ "custom_blocks": [],
"docstatus": 0,
"doctype": "Workspace",
- "extends_another_page": 0,
+ "for_user": "",
"hide_custom": 0,
- "icon": "setting-gear",
+ "icon": "tool",
"idx": 0,
- "is_default": 0,
- "is_standard": 1,
+ "indicator_color": "",
+ "is_hidden": 1,
"label": "Press",
- "links": [
- {
- "hidden": 0,
- "is_query_report": 0,
- "label": "Site",
- "onboard": 0,
- "type": "Card Break"
- },
- {
- "hidden": 0,
- "is_query_report": 0,
- "label": "Site",
- "link_to": "Site",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
- },
- {
- "hidden": 0,
- "is_query_report": 0,
- "label": "Site Activity",
- "link_to": "Site Activity",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
- },
- {
- "hidden": 0,
- "is_query_report": 0,
- "label": "Site Update",
- "link_to": "Site Update",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
- },
+ "links": [],
+ "modified": "2024-08-02 16:33:40.351403",
+ "modified_by": "Administrator",
+ "module": "Press",
+ "name": "Press",
+ "number_cards": [],
+ "owner": "Administrator",
+ "parent_page": "",
+ "public": 1,
+ "quick_lists": [],
+ "roles": [],
+ "sequence_id": 28.0,
+ "shortcuts": [
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Site Backup",
- "link_to": "Site Backup",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
+ "color": "Green",
+ "doc_view": "List",
+ "label": "Root Domain",
+ "link_to": "Root Domain",
+ "stats_filter": "[]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Site Domain",
- "link_to": "Site Domain",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
+ "color": "Grey",
+ "doc_view": "List",
+ "label": "Agent Job",
+ "link_to": "Agent Job",
+ "stats_filter": "[]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Site Usage",
- "link_to": "Site Usage",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
+ "color": "Green",
+ "doc_view": "List",
+ "label": "Server Plan",
+ "link_to": "Server Plan",
+ "stats_filter": "[[\"Server Plan\",\"enabled\",\"=\",1,false]]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Payments",
- "onboard": 0,
- "type": "Card Break"
+ "color": "Grey",
+ "doc_view": "List",
+ "label": "Ansible Play",
+ "link_to": "Ansible Play",
+ "stats_filter": "[]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Invoice",
- "link_to": "Invoice",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
+ "color": "Green",
+ "doc_view": "List",
+ "label": "Team",
+ "link_to": "Team",
+ "stats_filter": "[[\"Team\",\"enabled\",\"=\",1,false]]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Payment Ledger Entry",
- "link_to": "Payment Ledger Entry",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
+ "color": "Grey",
+ "doc_view": "List",
+ "label": "Press Job",
+ "link_to": "Press Job",
+ "stats_filter": "[]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Plan",
- "link_to": "Plan",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
+ "color": "Green",
+ "doc_view": "List",
+ "format": "",
+ "label": "Active Sites",
+ "link_to": "Site",
+ "stats_filter": "[[\"Site\",\"status\",\"=\",\"Active\",false]]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Infrastructure",
- "onboard": 0,
- "type": "Card Break"
+ "color": "Green",
+ "doc_view": "List",
+ "label": "Site Plan",
+ "link_to": "Site Plan",
+ "stats_filter": "[[\"Site Plan\",\"enabled\",\"=\",1,false]]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Server",
- "link_to": "Server",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
+ "color": "Green",
+ "doc_view": "List",
+ "label": "Active Benches",
+ "link_to": "Bench",
+ "stats_filter": "[[\"Bench\",\"status\",\"=\",\"Active\",false]]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Database Server",
- "link_to": "Database Server",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
+ "color": "Green",
+ "doc_view": "List",
+ "label": "Subscription",
+ "link_to": "Subscription",
+ "stats_filter": "[]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Proxy Server",
- "link_to": "Proxy Server",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
+ "color": "Green",
+ "doc_view": "List",
+ "label": "Tls Certificate",
+ "link_to": "TLS Certificate",
+ "stats_filter": "[[\"TLS Certificate\",\"status\",\"=\",\"Active\",false]]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Bench",
- "link_to": "Bench",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
+ "color": "Grey",
+ "doc_view": "List",
+ "label": "Invoice",
+ "link_to": "Invoice",
+ "stats_filter": "[]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Site",
+ "color": "Red",
+ "doc_view": "List",
+ "format": "",
+ "label": "Broken Sites",
"link_to": "Site",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
+ "stats_filter": "[[\"Site\",\"status\",\"=\",\"Broken\",false]]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Apps",
- "onboard": 0,
- "type": "Card Break"
+ "color": "Red",
+ "doc_view": "List",
+ "label": "Broken Benches",
+ "link_to": "Bench",
+ "stats_filter": "[[\"Bench\",\"status\",\"=\",\"Broken\",false]]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
+ "color": "Grey",
+ "doc_view": "List",
"label": "App",
"link_to": "App",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
- },
- {
- "hidden": 0,
- "is_query_report": 0,
- "label": "Release Group",
- "link_to": "Release Group",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
- },
- {
- "hidden": 0,
- "is_query_report": 0,
- "label": "App Release",
- "link_to": "App Release",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
- },
- {
- "hidden": 0,
- "is_query_report": 0,
- "label": "App Tag",
- "link_to": "App Tag",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
- },
- {
- "hidden": 0,
- "is_query_report": 0,
- "label": "GitHub Webhook Log",
- "link_to": "GitHub Webhook Log",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
- },
- {
- "hidden": 0,
- "is_query_report": 0,
- "label": "Teams",
- "onboard": 0,
- "type": "Card Break"
- },
- {
- "hidden": 0,
- "is_query_report": 0,
- "label": "Team",
- "link_to": "Team",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
- },
- {
- "hidden": 0,
- "is_query_report": 0,
- "label": "Account Request",
- "link_to": "Account Request",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
- },
- {
- "hidden": 0,
- "is_query_report": 0,
- "label": "Updates",
- "onboard": 0,
- "type": "Card Break"
+ "stats_filter": "[]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Deploy",
- "link_to": "Deploy",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
+ "color": "Yellow",
+ "doc_view": "List",
+ "format": "",
+ "label": "Pending Sites",
+ "link_to": "Site",
+ "stats_filter": "[[\"Site\",\"status\",\"=\",\"Pending\",false]]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Deploy Candidate Difference",
- "link_to": "Deploy Candidate Difference",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
+ "color": "Green",
+ "doc_view": "List",
+ "label": "Application Server",
+ "link_to": "Server",
+ "stats_filter": "[[\"Server\",\"status\",\"=\",\"Active\",false]]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Settings",
- "onboard": 0,
- "type": "Card Break"
+ "color": "Green",
+ "doc_view": "List",
+ "label": "App Source",
+ "link_to": "App Source",
+ "stats_filter": "[]",
+ "type": "DocType"
},
{
- "hidden": 0,
- "is_query_report": 0,
- "label": "Press Settings",
- "link_to": "Press Settings",
- "link_type": "DocType",
- "onboard": 0,
- "type": "Link"
- }
- ],
- "modified": "2021-05-24 12:44:58.237352",
- "modified_by": "Administrator",
- "module": "Press",
- "name": "Press",
- "owner": "Administrator",
- "pin_to_bottom": 0,
- "pin_to_top": 1,
- "shortcuts": [
- {
- "doc_view": "",
- "label": "Active Servers",
- "link_to": "Server",
- "stats_filter": "{\"status\": \"Active\"}",
+ "color": "Grey",
+ "doc_view": "List",
+ "format": "",
+ "label": "Suspended Sites",
+ "link_to": "Site",
+ "stats_filter": "[[\"Site\",\"status\",\"=\",\"Suspended\",false]]",
"type": "DocType"
},
{
- "doc_view": "",
- "label": "Database Servers",
+ "color": "Green",
+ "doc_view": "List",
+ "label": "Database Server",
"link_to": "Database Server",
- "stats_filter": "{\"status\": \"Active\"}",
+ "stats_filter": "[[\"Database Server\",\"status\",\"=\",\"Active\",false]]",
"type": "DocType"
},
{
- "doc_view": "",
- "label": "Proxy Servers",
- "link_to": "Proxy Server",
- "stats_filter": "{\"status\": \"Active\"}",
+ "color": "Grey",
+ "doc_view": "List",
+ "label": "App Release",
+ "link_to": "App Release",
+ "stats_filter": "[]",
"type": "DocType"
},
{
- "label": "Active Benches",
- "link_to": "Bench",
- "stats_filter": "{\"status\": \"Active\"}",
+ "color": "Green",
+ "doc_view": "List",
+ "label": "Proxy Server",
+ "link_to": "Proxy Server",
+ "stats_filter": "[[\"Proxy Server\",\"status\",\"=\",\"Active\",false]]",
"type": "DocType"
},
{
- "label": "Active Sites",
- "link_to": "Site",
- "stats_filter": "{\"status\": \"Active\"}",
+ "color": "Green",
+ "doc_view": "List",
+ "label": "Hybrid Servers",
+ "link_to": "Self Hosted Server",
+ "stats_filter": "[[\"Self Hosted Server\",\"status\",\"=\",\"Active\",false]]",
"type": "DocType"
},
{
- "label": "Pending Jobs",
- "link_to": "Agent Job",
- "stats_filter": "{\"status\": [\"in\", [\"Running\", \"Pending\"]]}",
+ "color": "Grey",
+ "doc_view": "List",
+ "label": "Press Settings",
+ "link_to": "Press Settings",
"type": "DocType"
}
- ]
+ ],
+ "title": "Press"
}
\ No newline at end of file
diff --git a/press/public/email/style.css b/press/public/email/style.css
index e90638475d4..02b6da1e451 100644
--- a/press/public/email/style.css
+++ b/press/public/email/style.css
@@ -72,7 +72,7 @@
}
a {
- @apply text-blue-600 underline;
+ @apply text-gray-800 underline;
}
}
@@ -81,15 +81,15 @@
}
.button-primary {
- @apply bg-blue-500 rounded-lg;
+ @apply bg-gray-900 rounded-lg;
}
.button a {
- @apply block px-4 py-2 text-base font-semibold leading-normal text-gray-900 no-underline bg-gray-200 rounded-lg;
+ @apply block px-2 py-1 text-base leading-normal text-gray-900 no-underline bg-gray-200 rounded-lg;
}
.button-primary a {
- @apply text-white bg-blue-500 rounded-lg;
+ @apply text-white bg-gray-900 rounded-lg;
}
/* purgecss end ignore */
diff --git a/press/public/images/frappe-cloud-logo.png b/press/public/images/frappe-cloud-logo.png
index 3534a6887f6..56a0d255899 100644
Binary files a/press/public/images/frappe-cloud-logo.png and b/press/public/images/frappe-cloud-logo.png differ
diff --git a/press/public/images/frappe-logo-black.png b/press/public/images/frappe-logo-black.png
new file mode 100644
index 00000000000..3d453917aa7
Binary files /dev/null and b/press/public/images/frappe-logo-black.png differ
diff --git a/press/public/images/frappecloud-logo.png b/press/public/images/frappecloud-logo.png
new file mode 100644
index 00000000000..ae5bae0a58d
Binary files /dev/null and b/press/public/images/frappecloud-logo.png differ
diff --git a/press/public/images/mpesa-logo.svg b/press/public/images/mpesa-logo.svg
new file mode 100644
index 00000000000..4345b346482
--- /dev/null
+++ b/press/public/images/mpesa-logo.svg
@@ -0,0 +1,1457 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/press/public/images/razorpay-logo.svg b/press/public/images/razorpay-logo.svg
new file mode 100644
index 00000000000..96418b28fc4
--- /dev/null
+++ b/press/public/images/razorpay-logo.svg
@@ -0,0 +1,12 @@
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/press/public/images/stripe-logo.svg b/press/public/images/stripe-logo.svg
new file mode 100644
index 00000000000..37b894f9e83
--- /dev/null
+++ b/press/public/images/stripe-logo.svg
@@ -0,0 +1,24 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/press/public/js/marketplace.bundle.js b/press/public/js/marketplace.bundle.js
deleted file mode 100644
index 3dfea2f4262..00000000000
--- a/press/public/js/marketplace.bundle.js
+++ /dev/null
@@ -1,112 +0,0 @@
-import Fuse from 'fuse.js';
-
-const allAppCardNodes = document.getElementsByClassName('app-card');
-const searchInput = document.getElementById('app-search-input');
-const noResultsMessage = document.getElementById('no-results-message');
-
-const appList = [];
-for (let node of allAppCardNodes) {
- appList.push({
- title: node.getAttribute('data-title'),
- description: node.getAttribute('data-description'),
- categories: node.getAttribute('data-categories'),
- name: node.id,
- });
-}
-
-// Initialize fuse.js
-const options = {
- keys: ['title'], // Can add description later if required
- includeScore: true,
- shouldSort: true,
- minMatchCharLength: 3,
-};
-const fuse = new Fuse(appList, options);
-
-searchInput.addEventListener('input', (e) => {
- // TODO: Debounce/Throttle
- const searchText = e.target.value;
- if (!searchText) {
- displayAllApps();
- return;
- }
-
- const results = fuse.search(searchText);
- updateAppList(results);
-});
-
-function updateAppList(results) {
- for (let node of allAppCardNodes) {
- node.style.display = 'none';
- }
-
- if (results.length === 0) {
- noResultsMessage.style.display = '';
- return;
- } else {
- noResultsMessage.style.display = 'none';
- }
-
- // For sorting according to score
- for (let result of results) {
- let app = document.getElementById(result.item.name);
- app.style.display = '';
- document.querySelector('#all-apps-list').appendChild(app);
- }
-}
-
-function displayAllApps() {
- noResultsMessage.style.display = 'none';
- for (let node of allAppCardNodes) {
- node.style.display = '';
- }
-}
-
-const btns = document.querySelectorAll('#category-button');
-btns.forEach((btn) => {
- btn.addEventListener('click', (e) => {
- const category = e.target.value;
- window.location.replace(
- location.origin + location.pathname + `?category=${category}`,
- );
- });
-});
-
-const removeCategoryBtn = document.getElementById('remove-category');
-removeCategoryBtn.addEventListener('click', (e) => {
- removeCategoryBtn.classList.add('hidden');
- window.location.replace(location.origin + location.pathname);
-});
-
-function updateCategories(category) {
- let set = 0;
- for (let node of allAppCardNodes) {
- node.style.display = 'none';
- }
-
- for (let app of allAppCardNodes) {
- if (app.dataset.categories.includes(category)) {
- app.style.display = '';
- set = 1;
- }
- }
-
- if (set == 0) {
- for (let node of allAppCardNodes) {
- node.style.display = '';
- }
- }
-
- var button = document.querySelector(`button[value="${category}"]`);
- button.classList.add('bg-gray-200');
-
- removeCategoryBtn.classList.remove('hidden');
- document.getElementById('remove-category-name').innerText = category;
-}
-
-var category = new URLSearchParams(window.location.search).get('category');
-if (category != null && category.length > 0) {
- updateCategories(category);
-} else if (category == null) {
- updateCategories('');
-}
diff --git a/press/public/js/press.bundle.js b/press/public/js/press.bundle.js
new file mode 100644
index 00000000000..919dc3b1fbb
--- /dev/null
+++ b/press/public/js/press.bundle.js
@@ -0,0 +1 @@
+import './utils';
diff --git a/press/public/js/utils.js b/press/public/js/utils.js
index 4fe214ec61e..4467f913a15 100644
--- a/press/public/js/utils.js
+++ b/press/public/js/utils.js
@@ -1,3 +1,28 @@
+frappe.provide('press');
+frappe.provide('press.utils');
+
+$.extend(press, {
+ set_hostname_abbreviation: function (frm) {
+ if (frm.doc.hostname) {
+ let parts = frm.doc.hostname.split('-');
+
+ let first_part = parts[0];
+
+ let sub_parts_abbr = $.map(parts.slice(1), function (p) {
+ return p ? p.substr(0, 1) : null;
+ }).join('');
+
+ if (sub_parts_abbr) {
+ abbr = first_part + '-' + sub_parts_abbr;
+ } else {
+ abbr = first_part;
+ }
+
+ frm.set_value('hostname_abbreviation', abbr);
+ }
+ },
+});
+
function clear_block(frm, block) {
clear_wrapper(frm.get_field(block).$wrapper);
}
diff --git a/press/public/marketplace/tailwind.config.js b/press/public/marketplace/tailwind.config.js
index 45f8a09b258..11f36e9acf0 100644
--- a/press/public/marketplace/tailwind.config.js
+++ b/press/public/marketplace/tailwind.config.js
@@ -1,6 +1,7 @@
const config = require('../../../dashboard/tailwind.config');
module.exports = {
+ presets: config.presets,
theme: config.theme,
plugins: config.plugins,
content: [
diff --git a/press/public/migrate b/press/public/migrate
index 5121a41df2b..5af6f6bba49 100755
--- a/press/public/migrate
+++ b/press/public/migrate
@@ -1,64 +1,52 @@
#!./env/bin/python
-# Fallback Script to run the easy migrate script in place of `bench --site {site} migrate-to frappecloud.com`
+# Fallback Script to run the easy migrate script in place of `bench migrate-to`
# This file was originally hosted on Frappe Cloud. For more information, contact us at frappecloud.com
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd.
import os
-import sys
-import tempfile
-
import click
import requests
from html2text import html2text
-import frappe
-
-
-def frappecloud_migrator(local_site):
- print("Retreiving Site Migrator...")
- remote_site = frappe.conf.frappecloud_url or "frappecloud.com"
- request_url = "https://{}/api/method/press.api.script".format(remote_site)
+def get_remote_script(remote_site):
+ print("Retrieving Site Migrator...")
+ request_url = f"https://{remote_site}/api/method/press.api.script"
request = requests.get(request_url)
if request.status_code / 100 != 2:
- print(
- "Request exitted with Status Code: {}\nPayload: {}".format(
- request.status_code, html2text(request.text)
- )
- )
+ print(f"Request exited with Status Code: {request.status_code}\nPayload: {html2text(request.text)}")
click.secho(
- "Some errors occurred while recovering the migration script. Please contact"
- " us @ Frappe Cloud if this issue persists",
+ "Some errors occurred while recovering the migration script. Please contact us @ Frappe Cloud if this issue persists",
fg="yellow",
)
return
- script_contents = request.json()["message"]
+ return request.json()["message"]
+
+
+def frappecloud_migrator():
+ remote_site_name = "frappecloud.com"
+ script_contents = get_remote_script(remote_site=remote_site_name)
+ import os
+ import sys
+ import tempfile
py = sys.executable
script = tempfile.NamedTemporaryFile(mode="w")
script.write(script_contents)
- print("Site Migrator stored at {}".format(script.name))
- os.execv(py, [py, script.name, local_site])
+ script.flush()
+ print(f"Site Migrator stored at {script.name}")
+ os.execv(py, [py, script.name])
+
if __name__ == "__main__":
os.chdir("sites")
-
try:
- local_site = sys.argv[1]
- except Exception:
- local_site = input("Name of the site you want to migrate: ").strip()
-
- try:
- frappe.init(site=local_site)
- frappe.connect()
- frappecloud_migrator(local_site)
+ frappecloud_migrator()
except (KeyboardInterrupt, click.exceptions.Abort):
print("\nExitting...")
except Exception:
from frappe.utils import get_traceback
print(get_traceback())
-
- frappe.destroy()
diff --git a/press/redis-setup/Dockerfile b/press/redis-setup/Dockerfile
new file mode 100644
index 00000000000..5b40c0279bb
--- /dev/null
+++ b/press/redis-setup/Dockerfile
@@ -0,0 +1,68 @@
+FROM ubuntu:22.04
+
+# Prevent interactive prompts during package installation
+ENV DEBIAN_FRONTEND=noninteractive
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ redis-server \
+ supervisor \
+ python3 \
+ python3-pip \
+ python3-venv \
+ nodejs \
+ npm \
+ openssh-server \
+ git \
+ curl \
+ wget \
+ vim \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+# Create frappe user
+RUN useradd -m -s /bin/bash frappe
+
+# Create necessary directories
+RUN mkdir -p /home/frappe/frappe-bench/sites \
+ /home/frappe/frappe-bench/logs \
+ /home/frappe/frappe-bench/config \
+ /home/frappe/frappe-bench/config/ssh \
+ /home/frappe/frappe-bench/env \
+ /home/frappe/frappe-bench/apps
+
+# Set ownership
+RUN chown -R frappe:frappe /home/frappe
+
+# Copy supervisord configuration
+COPY supervisord.conf /etc/supervisor/supervisord.conf
+
+# Expose ports
+# Redis cache and redis queue ports
+EXPOSE 11000 13000
+
+# Switch to frappe user for remaining setup
+USER frappe
+WORKDIR /home/frappe/frappe-bench
+
+
+# Install bench in .local bin
+ENV PATH "$PATH:/home/frappe/.local/bin"
+RUN wget https://bootstrap.pypa.io/get-pip.py && python3.10 get-pip.py
+RUN pip install --upgrade frappe-bench=="5.25.1"
+RUN pip install Jinja2~=3.0.3
+RUN pip install --upgrade setuptools
+
+RUN git config --global advice.detachedHead false
+
+ENV PYTHONUNBUFFERED 1
+
+
+# Switch back to root for final setup
+USER root
+
+# Create volume mount points
+VOLUME ["/home/frappe/frappe-bench/sites", "/home/frappe/frappe-bench/logs", "/home/frappe/frappe-bench/config"]
+
+# Start supervisord
+CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/supervisord.conf"]
diff --git a/press/redis-setup/supervisord.conf b/press/redis-setup/supervisord.conf
new file mode 100644
index 00000000000..916a7760274
--- /dev/null
+++ b/press/redis-setup/supervisord.conf
@@ -0,0 +1,21 @@
+[unix_http_server]
+file=/tmp/supervisor.sock
+
+[supervisord]
+logfile=/tmp/supervisord.log
+logfile_maxbytes=50MB
+logfile_backups=10
+loglevel=info
+pidfile=/tmp/supervisord.pid
+nodaemon=true
+minfds=1024
+minprocs=200
+
+[rpcinterface:supervisor]
+supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
+
+[supervisorctl]
+serverurl=unix:///tmp/supervisor.sock
+
+[include]
+files = /home/frappe/frappe-bench/config/supervisor.conf
diff --git a/press/runner.py b/press/runner.py
index 41e839dcfd5..5a689be24df 100644
--- a/press/runner.py
+++ b/press/runner.py
@@ -1,8 +1,13 @@
import json
+import typing
+from collections.abc import Callable
+from dataclasses import dataclass
+from enum import Enum
+from typing import Literal
+import frappe
import wrapt
-from ansible import context
-from ansible import constants
+from ansible import constants, context
from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.executor.task_executor import TaskExecutor
from ansible.inventory.manager import InventoryManager
@@ -13,35 +18,40 @@
from ansible.plugins.callback import CallbackBase
from ansible.utils.display import Display
from ansible.vars.manager import VariableManager
-from pymysql.err import InterfaceError
-
-import frappe
+from frappe.model.document import Document
+from frappe.utils import cstr
from frappe.utils import now_datetime as now
+from press.press.doctype.ansible_play.ansible_play import AnsiblePlay
+
+if typing.TYPE_CHECKING:
+ from press.press.doctype.agent_job.agent_job import AgentJob
+ from press.press.doctype.virtual_machine.virtual_machine import VirtualMachine
+
def reconnect_on_failure():
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
try:
return wrapped(*args, **kwargs)
- except InterfaceError:
- frappe.db.connect()
- return wrapped(*args, **kwargs)
+ except Exception as e:
+ if frappe.db.is_interface_error(e):
+ frappe.db.connect()
+ return wrapped(*args, **kwargs)
+ raise
return wrapper
class AnsibleCallback(CallbackBase):
def __init__(self, *args, **kwargs):
- super(AnsibleCallback, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
@reconnect_on_failure()
def process_task_success(self, result):
result, action = frappe._dict(result._result), result._task.action
if action == "user":
- server_type, server = frappe.db.get_value(
- "Ansible Play", self.play, ["server_type", "server"]
- )
+ server_type, server = frappe.db.get_value("Ansible Play", self.play, ["server_type", "server"])
server = frappe.get_doc(server_type, server)
if result.name == "root":
server.root_public_key = result.ssh_public_key
@@ -76,7 +86,7 @@ def update_play(self, status=None, stats=None):
play = frappe.get_doc("Ansible Play", self.play)
if stats:
# Assume we're running on one host
- host = list(stats.processed.keys())[0]
+ host = next(iter(stats.processed.keys()))
play.update(stats.summarize(host))
if play.failures or play.unreachable:
play.status = "Failure"
@@ -116,8 +126,18 @@ def update_task(self, status, result=None, task=None):
else:
task.start = now()
task.save()
+ self.publish_play_progress(task.name)
frappe.db.commit()
+ def publish_play_progress(self, task):
+ frappe.publish_realtime(
+ "ansible_play_progress",
+ {"progress": self.task_list.index(task), "total": len(self.task_list), "play": self.play},
+ doctype="Ansible Play",
+ docname=self.play,
+ user=frappe.session.user,
+ )
+
def parse_result(self, result):
task = result._task.name
role = result._task._role.get_name()
@@ -134,9 +154,7 @@ def on_async_start(self, role, task, job_id):
@reconnect_on_failure()
def on_async_poll(self, result):
job_id = result["ansible_job_id"]
- task_name = frappe.get_value(
- "Ansible Task", {"play": self.play, "job_id": job_id}, "name"
- )
+ task_name = frappe.get_value("Ansible Task", {"play": self.play, "job_id": job_id}, "name")
task = frappe.get_doc("Ansible Task", task_name)
task.result = json.dumps(result, indent=4)
task.duration = now() - task.start
@@ -159,11 +177,12 @@ def __init__(self, server, playbook, user="root", variables=None, port=22):
check=False,
connection="ssh",
# This is the only way to pass variables that preserves newlines
- extra_vars=[f"{key}='{value}'" for key, value in self.variables.items()],
+ extra_vars=[f"{cstr(key)}='{cstr(value)}'" for key, value in self.variables.items()],
remote_user=user,
start_at_task=None,
syntax=False,
- verbosity=3,
+ verbosity=1,
+ ssh_common_args=self._get_ssh_proxy_commad(server),
)
self.loader = DataLoader()
@@ -175,9 +194,25 @@ def __init__(self, server, playbook, user="root", variables=None, port=22):
self.callback = AnsibleCallback()
self.display = Display()
- self.display.verbosity = 3
+ self.display.verbosity = 1
self.create_ansible_play()
+ def _get_ssh_proxy_commad(self, server):
+ # Note: ProxyCommand must be enclosed in double quotes
+ # because it contains spaces
+ # and the entire argument must be enclosed in single quotes
+ # because it is passed via the CLI
+ # See https://docs.ansible.com/ansible/latest/user_guide/connection_details.html#ssh-args
+ # and https://unix.stackexchange.com/a/303717
+ # for details
+ proxy_command = None
+ if hasattr(self.server, "bastion_host") and self.server.bastion_host:
+ proxy_command = f'-o ProxyCommand="ssh -W %h:%p \
+ {server.bastion_host.ssh_user}@{server.bastion_host.ip} \
+ -p {server.bastion_host.ssh_port}"'
+
+ return proxy_command
+
def patch(self):
def modified_action_module_run(*args, **kwargs):
result = self.action_module_run(*args, **kwargs)
@@ -202,7 +237,7 @@ def unpatch(self):
TaskExecutor._poll_async_result = self._poll_async_result
ActionModule.run = self.action_module_run
- def run(self):
+ def run(self) -> AnsiblePlay:
self.executor = PlaybookExecutor(
playbooks=[self.playbook_path],
inventory=self.inventory,
@@ -214,6 +249,7 @@ def run(self):
self.executor._tqm._stdout_callback = self.callback
self.callback.play = self.play
self.callback.tasks = self.tasks
+ self.callback.task_list = self.task_list
self.executor.run()
self.unpatch()
return frappe.get_doc("Ansible Play", self.play)
@@ -237,6 +273,7 @@ def create_ansible_play(self):
).insert()
self.play = play_doc.name
self.tasks = {}
+ self.task_list = []
for role in play.get_roles():
for block in role.get_task_blocks():
for task in block.block:
@@ -249,3 +286,170 @@ def create_ansible_play(self):
}
).insert()
self.tasks.setdefault(role.get_name(), {})[task.name] = task_doc.name
+ self.task_list.append(task_doc.name)
+
+
+class Status(str, Enum):
+ Pending = "Pending"
+ Running = "Running"
+ Success = "Success"
+ Skipped = "Skipped"
+ Failure = "Failure"
+
+ def __str__(self):
+ return self.value
+
+
+class GenericStep(Document):
+ attempt: int
+ job_type: Literal["Ansible Play", "Agent Job"]
+ job: str | None
+ status: Status
+ method_name: str
+
+
+@dataclass
+class StepHandler:
+ save: Callable
+ reload: Callable
+ doctype: str
+ name: str
+
+ def handle_vm_status_job(
+ self,
+ step: GenericStep,
+ virtual_machine: str,
+ expected_status: str,
+ ) -> None:
+ step.attempt = 1 if not step.attempt else step.attempt + 1
+
+ # Try to sync status in every attempt
+ try:
+ virtual_machine_doc: "VirtualMachine" = frappe.get_doc("Virtual Machine", virtual_machine)
+ virtual_machine_doc.sync()
+ except Exception:
+ pass
+
+ machine_status = frappe.db.get_value("Virtual Machine", virtual_machine, "status")
+ step.status = Status.Running if machine_status != expected_status else Status.Success
+ step.save()
+
+ def handle_agent_job(self, step: GenericStep, job: str, poll: bool = False) -> None:
+ if poll:
+ job_doc: AgentJob = frappe.get_doc("Agent Job", job)
+ job_doc.get_status()
+
+ job_status = frappe.db.get_value("Agent Job", job, "status")
+
+ status_map = {
+ "Delivery Failure": Status.Failure,
+ "Undelivered": Status.Pending,
+ }
+ job_status = status_map.get(job_status, job_status)
+ step.attempt = 1 if not step.attempt else step.attempt + 1
+
+ step.status = job_status
+ step.save()
+
+ if step.status == Status.Failure:
+ raise
+
+ def handle_ansible_play(self, step: GenericStep, ansible: Ansible) -> None:
+ step.job_type = "Ansible Play"
+ step.job = ansible.play
+ step.save()
+ ansible_play = ansible.run()
+ step.status = ansible_play.status
+ step.save()
+
+ if step.status == Status.Failure:
+ raise
+
+ def _fail_ansible_step(
+ self,
+ step: GenericStep,
+ ansible: Ansible,
+ e: Exception | None = None,
+ ) -> None:
+ step.job = getattr(ansible, "play", None)
+ step.status = Status.Failure
+ step.output = str(e)
+ step.save()
+
+ def _fail_job_step(self, step: GenericStep, e: Exception | None = None) -> None:
+ step.status = Status.Failure
+ step.output = str(e)
+ step.save()
+
+ def fail(self):
+ self.status = Status.Failure
+ self.save()
+ frappe.db.commit()
+
+ def succeed(self):
+ self.status = Status.Success
+ self.save()
+ frappe.db.commit()
+
+ def handle_step_failure(self):
+ # can be implemented by the controller
+ pass
+
+ def get_steps(self, methods: list) -> list[dict]:
+ """Generate a list of steps to be executed for NFS volume attachment."""
+ return [
+ {
+ "step_name": method.__doc__,
+ "method_name": method.__name__,
+ "status": "Pending",
+ }
+ for method in methods
+ ]
+
+ def _get_method(self, method_name: str):
+ """Retrieve a method object by name."""
+ return getattr(self, method_name)
+
+ def next_step(self, steps: list[GenericStep]) -> GenericStep | None:
+ for step in steps:
+ if step.status not in (Status.Success, Status.Failure, Status.Skipped):
+ return step
+
+ return None
+
+ def _execute_steps(self, steps: list[GenericStep]):
+ """It is now required to be with a `enqueue_doc` else the first step executes in the web worker"""
+ self.status = Status.Running
+ self.save()
+ frappe.db.commit()
+
+ step = self.next_step(steps)
+ if not step:
+ self.succeed()
+ return
+
+ # Run a single step in this job
+ step = step.reload()
+ method = self._get_method(step.method_name)
+
+ try:
+ method(step)
+ frappe.db.commit()
+ except Exception:
+ self.reload()
+ self.fail()
+ self.handle_step_failure()
+ frappe.db.commit()
+ return
+
+ # After step completes, queue the next step
+ frappe.enqueue_doc(
+ self.doctype,
+ self.name,
+ "_execute_steps",
+ steps=steps,
+ timeout=18000,
+ at_front=True,
+ queue="long",
+ enqueue_after_commit=True,
+ )
diff --git a/press/saas/README.md b/press/saas/README.md
index e69de29bb2d..3764f5624a7 100644
--- a/press/saas/README.md
+++ b/press/saas/README.md
@@ -0,0 +1,68 @@
+### New SaaS Flow (Product Trial)
+
+It has 2 doctypes.
+
+1. **Product Trial** - Hold the configuration for a specific product.
+2. **Product Trial Request** - This holds the records of request for a specific product from a user.
+
+#### How to know, which site is available for allocation to user ?
+
+In **Site** doctype, there will be a field `standby_for_product`, this field should have the link to the product trial (e.g. erpnext, crm)
+If `is_standby` field is checked, that site can be allocated to a user.
+
+#### Configure a new Product Trial
+- Create a new record in `Product Trial` doctype
+- **Details Tab**
+ - **Name** - should be a unique one and will be used as a id in signup/login flows. e.g. For `Frappe CRM` it could be `crm`
+ - **Published**, **Title**, **Logo**, **Domain**, **Release Group**, **Trial Duration (days)**, **Trial Plan** - as the name implies, all fields are mandatory.
+ - **Apps** - List of apps those will be installed on the site. First app should be `Frappe` in the list.
+- **Pooling Tab**
+ - **Enable Pooling** - Checkbox to enable/disable pooling. If you enable pooling, you will have standby sites and will be quick to provision sites.
+ - **Standby Pool Size** - The total number of sites that will be maintained in the pool.
+ - **Standby Queue Size** - Number of standby sites that will be queued at a time.
+- **Sign-up Details Tab**
+ - **Sign-up Fields** - If you need some information from user at the time of sign-up, you can configure this. Check the field description of this field in doctype.
+ - **E-mail Account** - If you want to use some specific e-mail account for the saas sign-up, you can configure it here
+ - **E-mail Full Logo** - This logo will be sent in verification e-mails.
+ - **E-mail Subject** - Subject of verification e-mail. You can put `{otp}` to insert the value in subject. Example - `{otp} - OTP for CRM Registration`
+ - **E-mail Header Content** - Header part of e-mail.
+ ```html
+ You're almost done!
+ Just one quick step left to get you started with Frappe CRM!
+ ```
+- **Setup Wizard Tab**-
+ - **Setup Wizard Completion Mode** -
+ - **auto** - setup wizard of site will be completed in background and after signup + setup, user will get direct access to desk or portal of app
+ - **manual** - after signup, user will be logged in to the site and user need to complete the setup wizard of framework
+ - **Setup Wizard Payload Generator Script** [only for **auto** mode] - Check the field description in doctype.
+
+ Sample Payload Script -
+ ```python
+ payload = {
+ "language":"English",
+ "country": team.country,
+ "timezone":"Asia/Kolkata",
+ "currency": team.currency,
+ "full_name": team.user.full_name,
+ "email": team.user.email,
+ "password": decrypt_password(signup_details.login_password)
+ }
+ ```
+ - **Create Additional System User** [only for **manual** mode] - If this is checked, we will add an additional system user with the team's information after creating a new site.
+ - **Redirect To After Login** - After SaaS signup/login, user is directly logged-in to his site. By default, we redirect the user to desk of site. With this option, we can configure the redirect path. For example, for gameplan the path would be `/g`
+
+#### FC Dashboard
+- UI/UX - The pages are available in https://github.com/frappe/press/tree/master/dashboard/src2/pages/saas
+- The required apis for these pages are available in https://github.com/frappe/press/blob/master/press/api/product_trial.py
+
+#### Billing APIs for Integration in Framework
+
+> [!CAUTION]
+> Changes in any of these APIs can cause disruption in on-site billing system.
+
+- All the required APIs for billing in site is available in https://github.com/frappe/press/tree/master/press/saas/api
+- These APIs use a different type of authentication mechanism. Check this readme for more info https://github.com/frappe/press/blob/master/press/saas/api/readme.md
+- Reference of integration in framework
+ - https://github.com/frappe/frappe/tree/develop/billing
+ - https://github.com/frappe/frappe/blob/develop/frappe/integrations/frappe_providers/frappecloud_billing.py
+
diff --git a/press/saas/api/__init__.py b/press/saas/api/__init__.py
new file mode 100644
index 00000000000..2311a1700f5
--- /dev/null
+++ b/press/saas/api/__init__.py
@@ -0,0 +1,87 @@
+# Copyright (c) 2020, Frappe and contributors
+# For license information, please see license.txt
+
+from contextlib import suppress
+
+import frappe
+
+
+def whitelist_saas_api(func): # noqa: C901
+ def whitelist_wrapper(fn):
+ return frappe.whitelist(allow_guest=True, methods=["POST"])(fn)
+
+ def auth_wrapper(*args, **kwargs):
+ headers = frappe.request.headers
+ site_access_token = headers.get("x-site-access-token")
+ site_user = headers.get("x-site-user")
+ site = None
+ site_token = None
+ # check when x-site-access-token is provided
+ if site_access_token:
+ splitted = site_access_token.split(":")
+ if len(splitted) != 2:
+ frappe.throw("Invalid x-site-access-token provided", frappe.AuthenticationError)
+ accessTokenDocName = splitted[0]
+ token = splitted[1]
+ with suppress(frappe.DoesNotExistError):
+ record = frappe.get_doc("Site Access Token", accessTokenDocName)
+ if record.token != token:
+ frappe.throw("Invalid x-site-access-token provided", frappe.AuthenticationError)
+ # set site and site token from access token record
+ site = record.site
+ site_token = frappe.db.get_value("Site", site, "saas_communication_secret")
+ # check when x-site and x-site-token are provided
+ else:
+ # set site and site token from headers
+ site = headers.get("x-site")
+ site_token = headers.get("x-site-token")
+
+ # check for valid values
+ if not site or not site_token:
+ frappe.throw(
+ "(x-site and x-site-token) or x-site-access-token headers are mandatory",
+ frappe.AuthenticationError,
+ )
+
+ # validate site
+ site_record = frappe.get_value(
+ "Site",
+ site,
+ [
+ "name",
+ "team",
+ "is_standby",
+ "standby_for_product",
+ "saas_communication_secret",
+ ],
+ as_dict=True,
+ ignore=True,
+ )
+
+ if not site_record:
+ frappe.throw("Invalid x-site provided", frappe.AuthenticationError)
+
+ if site_record.saas_communication_secret != site_token:
+ frappe.throw("Invalid x-site-token provided", frappe.AuthenticationError)
+
+ if site_record.is_standby is None and site_record.standby_for_product is None:
+ frappe.throw("Sorry, this is not a SaaS site", frappe.AuthenticationError)
+
+ # set site and team name in context
+ frappe.local.site_name = site_record.name
+ frappe.local.team_name = site_record.team
+
+ # set team user as current user
+ frappe.set_user(site_user)
+
+ # set utility function to get team and site info
+ frappe.local.get_site = lambda: frappe.get_doc("Site", frappe.local.site_name)
+ frappe.local.get_team = lambda: frappe.get_doc("Team", frappe.local.team_name)
+
+ # remove cmd from kwargs
+ kwargsCopy = kwargs.copy()
+ kwargsCopy.pop("cmd", None)
+
+ return func(*args, **kwargsCopy)
+
+ return whitelist_wrapper(auth_wrapper)
diff --git a/press/saas/api/auth.py b/press/saas/api/auth.py
new file mode 100644
index 00000000000..0f478280af7
--- /dev/null
+++ b/press/saas/api/auth.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2020, Frappe and contributors
+# For license information, please see license.txt
+
+import frappe
+from press.saas.api import whitelist_saas_api
+
+
+@whitelist_saas_api
+def generate_access_token():
+ from press.saas.doctype.site_access_token.site_access_token import SiteAccessToken
+
+ return SiteAccessToken.generate(frappe.local.site_name)
+
+
+@frappe.whitelist(allow_guest=True)
+def is_access_token_valid(token: str):
+ split = token.split(":")
+ if len(split) != 2:
+ return False
+ actual_token = frappe.db.get_value("Site Access Token", split[0], "token")
+ return actual_token == split[1]
diff --git a/press/saas/api/billing.py b/press/saas/api/billing.py
new file mode 100644
index 00000000000..79a639f77b7
--- /dev/null
+++ b/press/saas/api/billing.py
@@ -0,0 +1,162 @@
+# Copyright (c) 2020, Frappe and contributors
+# For license information, please see license.txt
+
+import os
+
+import frappe
+
+from press.api import account as account_api
+from press.api import billing as billing_api
+from press.saas.api import whitelist_saas_api
+
+
+@whitelist_saas_api
+def country_list():
+ return account_api.country_list()
+
+
+# Billing Information Related APIs
+@whitelist_saas_api
+def get_information(timezone=None):
+ return account_api.get_billing_information(timezone)
+
+
+@whitelist_saas_api
+def update_information(billing_details: dict):
+ team = frappe.local.get_team()
+ team.update_billing_details(frappe._dict(billing_details))
+
+
+@whitelist_saas_api
+def validate_gst(address: dict):
+ return billing_api.validate_gst(address)
+
+
+@whitelist_saas_api
+def change_payment_mode(mode: str):
+ return billing_api.change_payment_mode(mode)
+
+
+# Stripe Payment Gateway Related APIs
+@whitelist_saas_api
+def get_publishable_key_and_setup_intent():
+ return billing_api.get_publishable_key_and_setup_intent()
+
+
+@whitelist_saas_api
+def setup_intent_success(setup_intent, address=None):
+ return billing_api.setup_intent_success(setup_intent, address)
+
+
+@whitelist_saas_api
+def create_payment_intent_for_micro_debit(payment_method_name):
+ return billing_api.create_payment_intent_for_micro_debit(payment_method_name)
+
+
+@whitelist_saas_api
+def create_payment_intent_for_buying_credits(amount):
+ return billing_api.create_payment_intent_for_buying_credits(amount)
+
+
+# Razorpay Payment Gateway Related APIs
+@whitelist_saas_api
+def create_razorpay_order(amount, type, doc_name=None):
+ return billing_api.create_razorpay_order(amount, type, doc_name=doc_name)
+
+
+@whitelist_saas_api
+def handle_razorpay_payment_failed():
+ return billing_api.handle_razorpay_payment_failed()
+
+
+# Invoice Related APIs
+@whitelist_saas_api
+def get_invoices():
+ return frappe.get_list(
+ "Invoice",
+ fields=[
+ "name",
+ "type",
+ "invoice_pdf",
+ "payment_mode",
+ "stripe_invoice_id",
+ "stripe_invoice_url",
+ "due_date",
+ "period_start",
+ "period_end",
+ "status",
+ "total",
+ "amount_paid",
+ "amount_due",
+ "stripe_payment_failed",
+ ],
+ filters={"team": frappe.local.team_name},
+ order_by="due_date desc, creation desc",
+ )
+
+
+@whitelist_saas_api
+def upcoming_invoice():
+ return billing_api.upcoming_invoice()
+
+
+@whitelist_saas_api
+def get_unpaid_invoices():
+ invoices = billing_api.unpaid_invoices()
+ unpaid_invoices = [invoice for invoice in invoices if invoice.status == "Unpaid"]
+ if len(unpaid_invoices) == 1:
+ return get_invoice(unpaid_invoices[0].name)
+ return unpaid_invoices
+
+
+@whitelist_saas_api
+def total_unpaid_amount():
+ return billing_api.total_unpaid_amount()
+
+
+@whitelist_saas_api
+def get_invoice(name: str):
+ invoice = frappe.get_doc("Invoice", name)
+ invoice.has_permission("read")
+ data = invoice.as_dict()
+ invoice.get_doc(data)
+ return data
+
+
+@whitelist_saas_api
+def download_invoice(name: str):
+ invoice_pdf = frappe.get_value("Invoice", name, "invoice_pdf")
+ if not invoice_pdf:
+ frappe.throw("Invoice PDF not found")
+ file_name = os.path.basename(invoice_pdf)
+ file = frappe.get_doc("File", {"file_name": file_name})
+ frappe.local.response.filename = file.file_name
+ frappe.local.response.filecontent = file.get_content()
+ frappe.local.response.type = "download"
+
+
+@whitelist_saas_api
+def get_stripe_payment_url_for_invoice(name: str) -> str | None:
+ try:
+ invoice = frappe.get_doc("Invoice", name)
+ if invoice.stripe_invoice_url:
+ return invoice.stripe_invoice_url
+ return invoice.get_stripe_payment_url()
+ except frappe.DoesNotExistError:
+ frappe.throw("Invoice not found")
+
+
+# Payment Method Related APIs
+@whitelist_saas_api
+def get_payment_methods():
+ return billing_api.get_payment_methods()
+
+
+@whitelist_saas_api
+def set_as_default(name):
+ return billing_api.set_as_default(name)
+
+
+@whitelist_saas_api
+def remove_payment_method(name):
+ return billing_api.remove_payment_method(name)
diff --git a/press/saas/api/readme.md b/press/saas/api/readme.md
new file mode 100644
index 00000000000..6a7163693e2
--- /dev/null
+++ b/press/saas/api/readme.md
@@ -0,0 +1,73 @@
+## Press SaaS API
+
+These APIs has been introduced with the release of SaaS v2. It will provide a interface to communicate back to Frappe Cloud from deployed site.
+
+
+### Authentication using secret token
+
+In Site configuration, the authentication token will be provided - **fc_communication_secret**
+
+For any kind of requests, from client-end, we need to set the credentials in header
+
+```
+X-Site: example.erpnext.com
+X-Site-Token: 319f41d07d430ed77df3d41a82787f4edff1440f12e43784a7ce8b4e
+```
+
+> All the api functions are wrapped in frappe.whitelist(allow_guest=True) .
+> However, due to the custom authentication wrapper, guest can't access the endpoints
+
+### Authentication using access token
+
+**Why ?**
+
+Sometimes, we may need to pass the secret token to frontend for some specific tasks (example - in-desk checkout). In those case, instead of using our authentication secret token, we can generate a temporary access token from frappe cloud and use that for the session.
+
+> Note: Generated access tokens are **valid for 15 minutes**.
+
+#### Generate Access Token
+
+**Request**
+
+```bash
+curl --location --request POST 'http://fc.local:8000/api/method/press.saas.api.auth.generate_access_token' \
+--header 'x-site: oka-hdz-qpj.tanmoy.fc.frappe.dev' \
+--header 'x-site-token: 004f85a3ae93927d2f0fcc668d11cb71'
+```
+
+**Response**
+
+```json
+{
+ "message": "fbk23eva6k:3e2882eff23d4145ddfefaebf5ac6135"
+}
+```
+
+After we generated our access token, set this specific header to any saas api requests to frappe cloud.
+```
+X-Site-Access-Token: fbk23eva6k:3e2882eff23d4145ddfefaebf5ac6135
+```
+
+
+### Usage Guide
+
+- In `press.saas.api` `__init__` file, there is a decorator `@whitelist_saas_api` which can be used to convert the functions to api
+- `@whitelist_saas_api` also set the site's team's user as current session user to make the session properly authenticated.
+- `@whitelist_saas_api` also add couple of variable and functions to `frappe.local`.
+ | Type | Name | Description |
+ | ---- | ---- | ----------- |
+ | Variable | frappe.local.site_name | Site name |
+ | Variable | frappe.local.team_name | Current team name |
+ | Function | frappe.local.get_site() -> Site | Fetch current site doctype record |
+ | Function | frappe.local.get_team() -> Team | Fetch current team doctype record |
+ | Variable (Additional) | frappe.session.user | Logged in user name, This will be also available as we have set `team.user` to logged in user |
+- Sample Code
+ ```python
+ @whitelist_saas_api
+ def hello():
+ print(frappe.local.site_name)
+ print(frappe.local.get_site())
+ print(frappe.local.team_name)
+ print(frappe.local.get_team())
+ return f"👋 Hi! {frappe.local.site_name} is authenticated"
+ ```
diff --git a/press/saas/api/site.py b/press/saas/api/site.py
new file mode 100644
index 00000000000..cc92d4b2c6b
--- /dev/null
+++ b/press/saas/api/site.py
@@ -0,0 +1,105 @@
+# Copyright (c) 2020, Frappe and contributors
+# For license information, please see license.txt
+
+import frappe
+
+from press.api import site as site_api
+from press.saas.api import whitelist_saas_api
+
+
+@whitelist_saas_api
+def info():
+ is_fc_user = False
+ site = frappe.get_value(
+ "Site",
+ frappe.local.site_name,
+ ["trial_end_date", "plan", "standby_for_product", "team"],
+ as_dict=True,
+ )
+ site_user = frappe.request.headers.get("x-site-user")
+
+ team_members = frappe.get_doc("Team", site.team).get_user_list()
+ if site_user and site_user in team_members:
+ is_fc_user = True
+ plan = (
+ site.plan
+ if site.plan
+ else frappe.db.get_value("Product Trial", site.standby_for_product, "trial_plan")
+ )
+ return {
+ "is_fc_user": is_fc_user,
+ "name": frappe.local.site_name,
+ "trial_end_date": site.trial_end_date,
+ "plan": frappe.db.get_value("Site Plan", plan, ["is_trial_plan"], as_dict=True) if plan else None,
+ }
+
+
+@whitelist_saas_api
+def change_plan(plan: str):
+ site = frappe.local.get_site()
+ site.set_plan(plan)
+
+
+@whitelist_saas_api
+def get_plans():
+ site = frappe.get_value("Site", frappe.local.site_name, ["server", "group", "plan"], as_dict=True)
+ is_site_on_private_bench = frappe.db.get_value("Release Group", site.group, "public") is False
+ is_site_on_shared_server = frappe.db.get_value("Server", site.server, "public")
+ plans = site_api.get_site_plans()
+ filtered_plans = []
+
+ for plan in plans:
+ if plan.name != site.plan:
+ if plan.restricted_plan or plan.is_frappe_plan or plan.is_trial_plan:
+ continue
+ if is_site_on_private_bench and not plan.private_benches:
+ continue
+ if plan.dedicated_server_plan and is_site_on_shared_server:
+ continue
+ if not plan.dedicated_server_plan and not is_site_on_shared_server:
+ continue
+ filtered_plans.append(plan)
+
+ """
+ plans `site_api.get_site_plans()` doesn't include trial plan, as we don't have any roles specified for trial plan
+ because from backend only we set the trial plan, end-user can't subscribe to trial plan directly
+ If the site is on a trial plan, add it to the starting of the list
+ """
+
+ current_plan = frappe.get_doc("Site Plan", site.plan)
+ if current_plan.is_trial_plan:
+ filtered_plans.insert(
+ 0,
+ {
+ "name": current_plan.name,
+ "plan_title": current_plan.plan_title,
+ "price_usd": current_plan.price_usd,
+ "price_inr": current_plan.price_inr,
+ "cpu_time_per_day": current_plan.cpu_time_per_day,
+ "max_storage_usage": current_plan.max_storage_usage,
+ "max_database_usage": current_plan.max_database_usage,
+ "database_access": current_plan.database_access,
+ "support_included": current_plan.support_included,
+ "offsite_backups": current_plan.offsite_backups,
+ "private_benches": current_plan.private_benches,
+ "monitor_access": current_plan.monitor_access,
+ "dedicated_server_plan": current_plan.dedicated_server_plan,
+ "is_trial_plan": current_plan.is_trial_plan,
+ "allow_downgrading_from_other_plan": False,
+ "clusters": [],
+ "allowed_apps": [],
+ "bench_versions": [],
+ "restricted_plan": False,
+ },
+ )
+
+ return filtered_plans
+
+
+@whitelist_saas_api
+def get_first_support_plan():
+ plans = get_plans()
+ for plan in plans:
+ if plan.support_included and not plan.is_trial_plan:
+ return plan
+ return None
diff --git a/press/saas/api/team.py b/press/saas/api/team.py
new file mode 100644
index 00000000000..5bf2d386e5b
--- /dev/null
+++ b/press/saas/api/team.py
@@ -0,0 +1,14 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2020, Frappe and contributors
+# For license information, please see license.txt
+
+import frappe
+from press.saas.api import whitelist_saas_api
+from time import sleep
+
+@whitelist_saas_api
+def info():
+ team = frappe.local.get_team()
+ data = team.as_dict()
+ team.get_doc(data)
+ return data
\ No newline at end of file
diff --git a/press/saas/doctype/hybrid_pool_item/__init__.py b/press/saas/doctype/hybrid_pool_item/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/saas/doctype/hybrid_pool_item/hybrid_pool_item.json b/press/saas/doctype/hybrid_pool_item/hybrid_pool_item.json
new file mode 100644
index 00000000000..6ea6cd60da4
--- /dev/null
+++ b/press/saas/doctype/hybrid_pool_item/hybrid_pool_item.json
@@ -0,0 +1,67 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2025-05-22 11:45:09.986862",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "field",
+ "value",
+ "app",
+ "custom_pool_size",
+ "preferred_cluster"
+ ],
+ "fields": [
+ {
+ "fieldname": "app",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "App",
+ "options": "App",
+ "reqd": 1
+ },
+ {
+ "fieldname": "custom_pool_size",
+ "fieldtype": "Int",
+ "in_list_view": 1,
+ "label": "Custom Pool Size",
+ "non_negative": 1
+ },
+ {
+ "fieldname": "preferred_cluster",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Preferred Cluster",
+ "options": "Cluster"
+ },
+ {
+ "fieldname": "field",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Field",
+ "reqd": 1
+ },
+ {
+ "fieldname": "value",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Value",
+ "reqd": 1
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2025-05-29 12:12:58.602008",
+ "modified_by": "Administrator",
+ "module": "SaaS",
+ "name": "Hybrid Pool Item",
+ "owner": "Administrator",
+ "permissions": [],
+ "row_format": "Dynamic",
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/saas/doctype/hybrid_pool_item/hybrid_pool_item.py b/press/saas/doctype/hybrid_pool_item/hybrid_pool_item.py
new file mode 100644
index 00000000000..95a521d10ea
--- /dev/null
+++ b/press/saas/doctype/hybrid_pool_item/hybrid_pool_item.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+# import frappe
+from frappe.model.document import Document
+
+
+class HybridPoolItem(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ app: DF.Link
+ custom_pool_size: DF.Int
+ field: DF.Data
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ preferred_cluster: DF.Link | None
+ value: DF.Data
+ # end: auto-generated types
+
+ pass
diff --git a/press/saas/doctype/product_trial/__init__.py b/press/saas/doctype/product_trial/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/saas/doctype/product_trial/patches/rename_saas_product_doctypes_to_product_trial.py b/press/saas/doctype/product_trial/patches/rename_saas_product_doctypes_to_product_trial.py
new file mode 100644
index 00000000000..e7b53173e7c
--- /dev/null
+++ b/press/saas/doctype/product_trial/patches/rename_saas_product_doctypes_to_product_trial.py
@@ -0,0 +1,31 @@
+# Copyright (c) 2024, Frappe Technologies Pvt. Ltd. and Contributors
+
+from __future__ import unicode_literals
+
+import frappe
+from frappe.model.utils.rename_field import rename_field
+
+
+def execute():
+ rename_doctypes()
+ rename_fields()
+
+
+def rename_doctypes():
+ renames = {
+ "SaaS Product": "Product Trial",
+ "SaaS Product App": "Product Trial App",
+ "SaaS Product Signup Field": "Product Trial Signup Field",
+ "SaaS Product Site Request": "Product Trial Request",
+ }
+ for from_doctype, to_doctype in renames.items():
+ if frappe.db.table_exists(from_doctype) and not frappe.db.table_exists(to_doctype):
+ frappe.rename_doc("DocType", from_doctype, to_doctype, force=True)
+
+
+def rename_fields():
+ frappe.reload_doctype("Account Request")
+ rename_field("Account Request", "saas_product", "product_trial")
+
+ frappe.reload_doctype("Product Trial Request")
+ rename_field("Product Trial Request", "saas_product", "product_trial")
diff --git a/press/saas/doctype/product_trial/product_trial.js b/press/saas/doctype/product_trial/product_trial.js
new file mode 100644
index 00000000000..1e96fbd86b3
--- /dev/null
+++ b/press/saas/doctype/product_trial/product_trial.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2023, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Product Trial", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/saas/doctype/product_trial/product_trial.json b/press/saas/doctype/product_trial/product_trial.json
new file mode 100644
index 00000000000..21ebd2b2277
--- /dev/null
+++ b/press/saas/doctype/product_trial/product_trial.json
@@ -0,0 +1,270 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "autoname": "prompt",
+ "creation": "2023-08-01 13:11:14.281968",
+ "default_view": "List",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "title",
+ "column_break_bqrh",
+ "logo",
+ "published",
+ "section_break_rvnt",
+ "domain",
+ "release_group",
+ "column_break_cpkv",
+ "trial_days",
+ "trial_plan",
+ "section_break_ipmu",
+ "apps",
+ "pooling_tab",
+ "site_pool_section",
+ "enable_pooling",
+ "standby_pool_size",
+ "standby_queue_size",
+ "section_break_klpr",
+ "enable_hybrid_pooling",
+ "hybrid_pool_rules",
+ "signup_details_tab",
+ "email_section",
+ "email_account",
+ "column_break_gokr",
+ "email_full_logo",
+ "section_break_ncgc",
+ "email_subject",
+ "email_header_content",
+ "suspension_email_section",
+ "suspension_email_subject",
+ "suspension_email_content",
+ "setup_wizard_tab",
+ "redirect_to_after_login"
+ ],
+ "fields": [
+ {
+ "fieldname": "title",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Title",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_bqrh",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "published",
+ "fieldtype": "Check",
+ "label": "Published"
+ },
+ {
+ "fieldname": "section_break_ipmu",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "apps",
+ "fieldtype": "Table",
+ "label": "Apps",
+ "options": "Product Trial App",
+ "reqd": 1
+ },
+ {
+ "fieldname": "logo",
+ "fieldtype": "Attach Image",
+ "label": "Logo"
+ },
+ {
+ "fieldname": "section_break_rvnt",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "domain",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Domain",
+ "options": "Root Domain",
+ "reqd": 1
+ },
+ {
+ "fieldname": "column_break_cpkv",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "release_group",
+ "fieldtype": "Link",
+ "label": "Release Group",
+ "options": "Release Group",
+ "reqd": 1
+ },
+ {
+ "fieldname": "site_pool_section",
+ "fieldtype": "Section Break"
+ },
+ {
+ "default": "0",
+ "fieldname": "enable_pooling",
+ "fieldtype": "Check",
+ "label": "Enable Pooling"
+ },
+ {
+ "default": "10",
+ "fieldname": "standby_pool_size",
+ "fieldtype": "Int",
+ "label": "Standby Pool Size"
+ },
+ {
+ "default": "2",
+ "fieldname": "standby_queue_size",
+ "fieldtype": "Int",
+ "label": "Standby Queue Size"
+ },
+ {
+ "fieldname": "pooling_tab",
+ "fieldtype": "Tab Break",
+ "label": "Pooling"
+ },
+ {
+ "fieldname": "signup_details_tab",
+ "fieldtype": "Tab Break",
+ "label": "Signup Details"
+ },
+ {
+ "default": "14",
+ "fieldname": "trial_days",
+ "fieldtype": "Int",
+ "label": "Trial Duration (Days)"
+ },
+ {
+ "fieldname": "trial_plan",
+ "fieldtype": "Link",
+ "label": "Trial Plan",
+ "options": "Site Plan",
+ "reqd": 1
+ },
+ {
+ "fieldname": "setup_wizard_tab",
+ "fieldtype": "Tab Break",
+ "label": "After Signup"
+ },
+ {
+ "fieldname": "email_section",
+ "fieldtype": "Section Break",
+ "label": "E-mail"
+ },
+ {
+ "fieldname": "email_account",
+ "fieldtype": "Link",
+ "label": "Account",
+ "options": "Email Account"
+ },
+ {
+ "fieldname": "column_break_gokr",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "email_subject",
+ "fieldtype": "Data",
+ "label": "Subject",
+ "reqd": 1
+ },
+ {
+ "fieldname": "email_full_logo",
+ "fieldtype": "Attach Image",
+ "label": "Full Logo"
+ },
+ {
+ "fieldname": "section_break_ncgc",
+ "fieldtype": "Section Break",
+ "label": "Signup E-mail"
+ },
+ {
+ "default": "You're almost done!
\nJust one quick step left to get you started with Frappe Cloud!
",
+ "fieldname": "email_header_content",
+ "fieldtype": "Code",
+ "label": "Header Content",
+ "options": "html",
+ "reqd": 1
+ },
+ {
+ "default": "/app",
+ "fieldname": "redirect_to_after_login",
+ "fieldtype": "Data",
+ "label": "Redirect To After Login",
+ "reqd": 1
+ },
+ {
+ "fieldname": "suspension_email_section",
+ "fieldtype": "Section Break",
+ "label": "Suspension E-mail"
+ },
+ {
+ "default": "Your {product_title} trial just expired",
+ "fieldname": "suspension_email_subject",
+ "fieldtype": "Data",
+ "label": "Subject"
+ },
+ {
+ "default": "{% import \"templates/emails/macros.html\" as utils %}\n\n{% block content %}\n\n\t\n\t\t
\n\t\t The trial period of your {{ product.title }} site {{ site.host_name }} has ended on {{ frappe.utils.getdate(site.trial_end_date).strftime('%d %B, %Y') }}.\n\t\t Your site will now be suspended. Choose a plan and add your billing information for your site to resume service.\n\t\t
\n\t\t{{ utils.button('Open Dashboard', frappe.get_url('/dashboard/sites/' + site.name)) }}\n\t\t{{ utils.separator() }}\n\t\t{{ utils.signature() }}\n\t
\n\n{% endblock %}\n",
+ "fieldname": "suspension_email_content",
+ "fieldtype": "HTML Editor",
+ "label": "Content"
+ },
+ {
+ "fieldname": "section_break_klpr",
+ "fieldtype": "Section Break",
+ "label": "Hybrid Pooling"
+ },
+ {
+ "fieldname": "hybrid_pool_rules",
+ "fieldtype": "Table",
+ "label": "Hybrid Pool Rules",
+ "options": "Hybrid Pool Item"
+ },
+ {
+ "default": "0",
+ "fieldname": "enable_hybrid_pooling",
+ "fieldtype": "Check",
+ "label": "Enable Hybrid Pooling"
+ }
+ ],
+ "grid_page_length": 50,
+ "image_field": "logo",
+ "index_web_pages_for_search": 1,
+ "links": [
+ {
+ "link_doctype": "Site",
+ "link_fieldname": "standby_for_product"
+ },
+ {
+ "link_doctype": "Product Trial Request",
+ "link_fieldname": "product_trial"
+ }
+ ],
+ "modified": "2026-01-12 18:55:46.861737",
+ "modified_by": "Administrator",
+ "module": "SaaS",
+ "name": "Product Trial",
+ "naming_rule": "Set by user",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": []
+}
diff --git a/press/saas/doctype/product_trial/product_trial.py b/press/saas/doctype/product_trial/product_trial.py
new file mode 100644
index 00000000000..5b177ec232e
--- /dev/null
+++ b/press/saas/doctype/product_trial/product_trial.py
@@ -0,0 +1,611 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+import json
+from typing import Any
+
+import frappe
+import frappe.utils
+from frappe.model.document import Document
+from frappe.utils.data import get_url
+
+from press.utils import log_error
+from press.utils.jobs import has_job_timeout_exceeded
+from press.utils.unique_name_generator import generate as generate_random_name
+
+
+class ProductTrial(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ from press.press.doctype.site.site import Site
+ from press.saas.doctype.hybrid_pool_item.hybrid_pool_item import HybridPoolItem
+ from press.saas.doctype.product_trial_app.product_trial_app import ProductTrialApp
+
+ apps: DF.Table[ProductTrialApp]
+ domain: DF.Link
+ email_account: DF.Link | None
+ email_full_logo: DF.AttachImage | None
+ email_header_content: DF.Code
+ email_subject: DF.Data
+ enable_hybrid_pooling: DF.Check
+ enable_pooling: DF.Check
+ hybrid_pool_rules: DF.Table[HybridPoolItem]
+ logo: DF.AttachImage | None
+ published: DF.Check
+ redirect_to_after_login: DF.Data
+ release_group: DF.Link
+ standby_pool_size: DF.Int
+ standby_queue_size: DF.Int
+ suspension_email_content: DF.HTMLEditor | None
+ suspension_email_subject: DF.Data | None
+ title: DF.Data
+ trial_days: DF.Int
+ trial_plan: DF.Link
+ # end: auto-generated types
+
+ dashboard_fields = (
+ "title",
+ "logo",
+ "domain",
+ "trial_days",
+ "trial_plan",
+ "redirect_to_after_login",
+ )
+
+ def get_doc(self, doc):
+ if not self.published:
+ frappe.throw("Not permitted")
+
+ doc.proxy_servers = self.get_proxy_servers_for_available_clusters()
+ doc.prefilled_subdomain = self.get_unique_site_name()
+ return doc
+
+ def validate(self):
+ plan = frappe.get_doc("Site Plan", self.trial_plan)
+ if plan.document_type != "Site":
+ frappe.throw("Selected plan is not for site")
+ if not plan.is_trial_plan:
+ frappe.throw("Selected plan is not a trial plan")
+
+ if not self.redirect_to_after_login.startswith("/"):
+ frappe.throw("Redirection route after login should start with /")
+
+ self.validate_hybrid_rules()
+
+ def validate_hybrid_rules(self):
+ for rule in self.hybrid_pool_rules:
+ if not frappe.db.exists("Release Group App", {"parent": self.release_group, "app": rule.app}):
+ frappe.throw(
+ f"App {rule.app} is not present in release group {self.release_group}. "
+ "Please add the app to the release group."
+ )
+
+ def setup_trial_site(
+ self,
+ subdomain: str,
+ domain: str,
+ team: str,
+ cluster: str | None = None,
+ account_request: str | None = None,
+ ):
+ from press.press.doctype.site.site import Site, get_plan_config
+
+ if Site.exists(subdomain, domain):
+ frappe.throw("Site with this subdomain already exists")
+
+ site_domain = f"{subdomain}.{domain}"
+
+ standby_site = self.get_standby_site(cluster, account_request)
+
+ trial_end_date = frappe.utils.add_days(None, self.trial_days or 14)
+ agent_job_name: str | None = None
+ plan = self.trial_plan
+
+ if standby_site:
+ site: Site = frappe.get_doc("Site", standby_site)
+ site.is_standby = False
+ site.team = team
+ site.trial_end_date = trial_end_date
+ site.account_request = account_request
+ apps_site_config = get_app_subscriptions_site_config([d.app for d in self.apps], standby_site)
+ site._update_configuration(apps_site_config, save=False)
+ site._update_configuration(get_plan_config(plan), save=False)
+ site.signup_time = frappe.utils.now()
+ site.generate_saas_communication_secret(create_agent_job=True, save=False)
+ site.save() # Save is needed for create_subscription to work TODO: remove this
+ site.reload()
+ self.set_site_domain(site, site_domain)
+ else:
+ # Create a site in the cluster, if standby site is not available
+ apps = self.get_site_apps(account_request)
+ is_frappe_app_present = any(d["app"] == "frappe" for d in apps)
+ if not is_frappe_app_present:
+ apps.insert(0, {"app": "frappe"})
+
+ site = frappe.get_doc(
+ doctype="Site",
+ subdomain=subdomain,
+ domain=domain,
+ group=self.release_group,
+ cluster=cluster,
+ account_request=account_request,
+ is_standby=False,
+ standby_for_product=self.name,
+ subscription_plan=plan,
+ team=team,
+ apps=apps,
+ trial_end_date=trial_end_date,
+ signup_time=frappe.utils.now(),
+ )
+ apps_site_config = get_app_subscriptions_site_config([d.app for d in self.apps], site.name)
+ site._update_configuration(apps_site_config, save=False)
+ site._update_configuration(get_plan_config(plan), save=False)
+ site.generate_saas_communication_secret(create_agent_job=False, save=False)
+ site.insert()
+ agent_job_name = site.flags.get("new_site_agent_job_name", None)
+
+ return site, agent_job_name, bool(standby_site)
+
+ def get_site_apps(self, account_request: str | None = None):
+ """Get the list of site apps to include in the site creation
+ Also includes hybrid apps if account request has relevant fields
+ """
+ apps = [{"app": d.app} for d in self.apps]
+
+ if account_request and self.enable_hybrid_pooling:
+ fields = [rule.field for rule in self.hybrid_pool_rules]
+ acc_req = (
+ frappe.db.get_value(
+ "Account Request",
+ account_request,
+ fields,
+ as_dict=True,
+ )
+ if account_request
+ else None
+ )
+
+ for rule in self.hybrid_pool_rules:
+ value = acc_req.get(rule.field) if acc_req else None
+ if not value:
+ break
+
+ if rule.value == value:
+ apps += [{"app": rule.app}]
+ break
+
+ return apps
+
+ def get_proxy_servers_for_available_clusters(self):
+ clusters = self.get_available_clusters()
+ proxy_servers = frappe.db.get_all(
+ "Proxy Server",
+ {
+ "cluster": ("in", clusters),
+ },
+ ["name", "is_primary", "cluster"],
+ )
+ proxy_servers_by_cluster = {}
+ for proxy_server in proxy_servers:
+ cluster = proxy_server.cluster
+ proxy_servers_by_cluster.setdefault(cluster, []).append(proxy_server)
+
+ proxy_servers_for_available_clusters = {}
+ for cluster, proxy_servers in proxy_servers_by_cluster.items():
+ primary_proxy = [d for d in proxy_servers if d.is_primary]
+ if primary_proxy:
+ proxy_server_name = primary_proxy[0].name
+ proxy_servers_for_available_clusters[proxy_server_name] = cluster
+
+ return proxy_servers_for_available_clusters
+
+ def set_site_domain(self, site: Site, site_domain: str):
+ if not site_domain:
+ return
+
+ if site.name == site_domain or site.host_name == site_domain:
+ return
+
+ site.add_domain_for_product_site(site_domain)
+ site.add_domain_to_config(site_domain)
+
+ def get_available_clusters(self):
+ release_group = frappe.get_doc("Release Group", self.release_group)
+ clusters = frappe.db.get_all(
+ "Server",
+ {"name": ("in", [d.server for d in release_group.servers])},
+ order_by="name asc",
+ pluck="Cluster",
+ )
+ clusters = list(set(clusters))
+ return frappe.db.get_all(
+ "Cluster", {"name": ("in", clusters), "public": 1}, order_by="name asc", pluck="name"
+ )
+
+ @staticmethod
+ def get_preferred_site(filters) -> str | None:
+ sites = frappe.db.get_all(
+ "Site",
+ filters=filters,
+ pluck="name",
+ order_by="status,standby_for,creation asc",
+ limit=10,
+ )
+ if not sites:
+ return None
+ Site = frappe.qb.DocType("Site")
+ Incident = frappe.qb.DocType("Incident")
+ sites_without_incident = (
+ frappe.qb.from_(Site)
+ .select(Site.name)
+ .left_join(Incident)
+ .on(
+ (Site.server == Incident.server)
+ & (Incident.status.isin(["Confirmed", "Validating", "Acknowledged"]))
+ )
+ .where(Site.name.isin(sites))
+ .where(Incident.name.isnull())
+ .run(as_dict=True)
+ )
+ sites_without_incident = [site["name"] for site in sites_without_incident]
+ return sites_without_incident[0] if sites_without_incident else sites[0]
+
+ def get_standby_site(self, cluster: str | None = None, account_request: str | None = None) -> str | None:
+ filters = {
+ "is_standby": True,
+ "standby_for_product": self.name,
+ "status": "Active",
+ }
+ if cluster:
+ filters["cluster"] = cluster
+
+ fields = [rule.field for rule in self.hybrid_pool_rules]
+ acc_req = (
+ frappe.db.get_value(
+ "Account Request",
+ account_request,
+ fields,
+ as_dict=True,
+ )
+ if account_request
+ else None
+ )
+ for rule in self.hybrid_pool_rules:
+ value = acc_req.get(rule.field) if acc_req else None
+ if not value:
+ break
+
+ if rule.value == value:
+ filters["hybrid_for"] = rule.app
+ break
+
+ return ProductTrial.get_preferred_site(filters)
+
+ def create_standby_sites_in_each_cluster(self):
+ if not self.enable_pooling:
+ return
+
+ clusters = self.get_available_clusters()
+ for cluster in clusters:
+ try:
+ self.create_standby_sites(cluster)
+ frappe.db.commit()
+ except Exception as e:
+ log_error(
+ "Unable to Create Standby Sites",
+ data=e,
+ reference_doctype="Product Trial",
+ reference_name=self.name,
+ )
+ frappe.db.rollback()
+
+ def create_standby_sites(self, cluster):
+ if not self.enable_pooling:
+ return
+
+ self._create_standby_sites(cluster)
+
+ if self.enable_hybrid_pooling:
+ for rule in self.hybrid_pool_rules:
+ self._create_standby_sites(cluster, rule)
+
+ def _create_standby_sites(self, cluster: str, rule: HybridPoolItem | None = None):
+ if rule and rule.preferred_cluster and rule.preferred_cluster != cluster:
+ return
+
+ standby_pool_size = rule.custom_pool_size if rule else self.standby_pool_size
+ sites_to_create = standby_pool_size - self.get_standby_sites_count(
+ cluster, rule.app if rule else None
+ )
+ if sites_to_create <= 0:
+ return
+ if sites_to_create > self.standby_queue_size:
+ sites_to_create = self.standby_queue_size
+
+ for _i in range(sites_to_create):
+ self.create_standby_site(cluster, rule)
+ frappe.db.commit()
+
+ def create_standby_site(self, cluster: str, rule: HybridPoolItem | None = None):
+ from frappe.core.utils import find
+
+ administrator = frappe.db.get_value("Team", {"user": "Administrator"}, "name")
+ apps = [{"app": d.app} for d in self.apps]
+
+ if rule:
+ apps += [{"app": rule.app}]
+
+ server = self.get_server_from_cluster(cluster)
+ cluster_domains = frappe.db.get_all(
+ "Root Domain", {"name": ("like", f"%.{self.domain}")}, ["name", "default_cluster as cluster"]
+ )
+ cluster_domain = find(
+ cluster_domains,
+ lambda d: d.cluster == cluster if cluster else False,
+ )
+ domain = cluster_domain.name if cluster_domain else self.domain
+ site = frappe.get_doc(
+ doctype="Site",
+ subdomain=self.get_unique_site_name(),
+ domain=domain,
+ group=self.release_group,
+ cluster=cluster,
+ server=server,
+ is_standby=True,
+ standby_for_product=self.name,
+ hybrid_for=rule.app if rule else None,
+ team=administrator,
+ apps=apps,
+ )
+ site.insert(ignore_permissions=True)
+
+ def get_standby_sites_count(self, cluster: str, hybrid_for: str | None = None):
+ one_hour_ago = frappe.utils.add_to_date(None, hours=-1)
+ Site = frappe.qb.DocType("Site")
+ query = (
+ frappe.qb.from_(Site)
+ .select(Site.name)
+ .distinct()
+ .where(
+ (Site.cluster == cluster) & (Site.is_standby == 1) & (Site.standby_for_product == self.name)
+ )
+ )
+
+ if hybrid_for is None:
+ query = query.where(Site.hybrid_for.isnull())
+ else:
+ query = query.where(Site.hybrid_for == hybrid_for)
+
+ query = query.where(
+ (Site.status == "Active")
+ | ((Site.creation > one_hour_ago) & (Site.status.notin(["Archived", "Suspended"])))
+ )
+ standby_sites = query.run(pluck=True)
+ return len(standby_sites)
+
+ def get_unique_site_name(self):
+ subdomain = f"{self.name}-{generate_random_name(segment_length=3, num_segments=2)}"
+ filters = {
+ "subdomain": subdomain,
+ "domain": self.domain,
+ "status": ("!=", "Archived"),
+ }
+ while frappe.db.exists("Site", filters):
+ subdomain = f"{self.name}-{generate_random_name(segment_length=3, num_segments=2)}"
+ return subdomain
+
+ def get_server_from_cluster(self, cluster):
+ """Return the server with the least number of standby sites in the cluster"""
+ ReleaseGroupServer = frappe.qb.DocType("Release Group Server")
+ Server = frappe.qb.DocType("Server")
+ Bench = frappe.qb.DocType("Bench")
+ servers = (
+ frappe.qb.from_(ReleaseGroupServer)
+ .select(ReleaseGroupServer.server)
+ .distinct()
+ .where(ReleaseGroupServer.parent == self.release_group)
+ .join(Server)
+ .on(Server.name == ReleaseGroupServer.server)
+ .where(Server.cluster == cluster)
+ .join(Bench)
+ .on(Bench.server == ReleaseGroupServer.server)
+ .run(pluck="server")
+ )
+ server_sites = {}
+ for server in servers:
+ server_sites[server] = frappe.db.count(
+ "Site",
+ {
+ "server": server,
+ "status": ("!=", "Archived"),
+ "is_standby": 1,
+ },
+ )
+
+ # get the server with the least number of sites
+ return min(server_sites, key=server_sites.get)
+
+
+def create_free_app_subscription(app: str, site: str | None = None):
+ from press.utils import get_current_team
+
+ free_plan = frappe.get_all(
+ "Marketplace App Plan",
+ {"enabled": 1, "price_usd": ("<=", 0), "app": app},
+ pluck="name",
+ )
+ if not free_plan:
+ return None
+ return frappe.get_doc(
+ {
+ "doctype": "Subscription",
+ "document_type": "Marketplace App",
+ "document_name": app,
+ "plan_type": "Marketplace App Plan",
+ "plan": free_plan[0],
+ "site": site,
+ "enabled": 1,
+ "team": get_current_team(),
+ }
+ ).insert(ignore_permissions=True)
+
+
+def get_app_subscriptions_site_config(apps: list[str], site: str | None = None) -> dict:
+ subscriptions = []
+ site_config: dict[str, Any] = {}
+
+ for app in apps:
+ if not (s := create_free_app_subscription(app, site)):
+ continue
+ subscriptions.append(s)
+ config = frappe.db.get_value("Marketplace App", app, "site_config")
+ config = json.loads(config) if config else {}
+ site_config.update(config)
+ for s in subscriptions:
+ site_config.update({"sk_" + s.document_name: s.secret_key})
+
+ return site_config
+
+
+def replenish_standby_sites():
+ """Create standby sites for all products with pooling enabled. This is called by the scheduler."""
+ products = frappe.get_all("Product Trial", {"enable_pooling": 1}, pluck="name")
+ for product in products:
+ if has_job_timeout_exceeded():
+ return
+ product: ProductTrial = frappe.get_doc("Product Trial", product)
+ try:
+ product.create_standby_sites_in_each_cluster()
+ frappe.db.commit()
+ except Exception as e:
+ log_error(
+ "Replenish Standby Sites Error",
+ data=e,
+ reference_doctype="Product Trial",
+ reference_name=product.name,
+ )
+ frappe.db.rollback()
+
+
+def send_verification_mail_for_login(email: str, product: str, code: str):
+ """Send verification mail for login."""
+ if frappe.conf.developer_mode:
+ print(f"\nVerification Code for {product}:")
+ print(f"Email : {email}")
+ print(f"Code : {code}")
+ print()
+ return
+ product_trial: ProductTrial = frappe.get_doc("Product Trial", product)
+ sender = ""
+ subject = f"{code} - Verification Code for {product_trial.title} Login"
+ args = {
+ "header_content": f"You have requested a verification code to login to your {product_trial.title} site. The code is valid for 5 minutes.
",
+ "otp": code,
+ }
+ inline_images = []
+ if product_trial.email_full_logo:
+ args.update({"image_path": get_url(product_trial.email_full_logo, True)})
+ try:
+ logo_name = product_trial.email_full_logo[1:]
+ args.update({"logo_name": logo_name})
+ with open(frappe.utils.get_site_path("public", logo_name), "rb") as logo_file:
+ inline_images.append(
+ {
+ "filename": logo_name,
+ "filecontent": logo_file.read(),
+ }
+ )
+ except Exception as ex:
+ log_error(
+ "Error reading logo for inline images in email",
+ data=ex,
+ )
+ if product_trial.email_account:
+ sender = frappe.get_value("Email Account", product_trial.email_account, "email_id")
+
+ frappe.sendmail(
+ sender=sender,
+ recipients=email,
+ subject=subject,
+ template="product_trial_verify_account",
+ args=args,
+ now=True,
+ inline_images=inline_images,
+ )
+
+
+def sync_product_site_users():
+ """Fetch and sync users from product sites, so that they can be used for login to the site from FC."""
+
+ product_groups = frappe.db.get_all(
+ "Product Trial", {"published": 1}, ["release_group"], pluck="release_group"
+ )
+ product_benches = frappe.get_all(
+ "Bench", {"group": ("in", product_groups), "status": "Active"}, pluck="name"
+ )
+ for bench_name in product_benches:
+ frappe.enqueue_doc(
+ "Bench",
+ bench_name,
+ "sync_product_site_users",
+ queue="sync",
+ job_id=f"sync_product_site_users||{bench_name}",
+ deduplicate=True,
+ enqueue_after_commit=True,
+ )
+
+
+def send_suspend_mail(site_name: str, product_name: str) -> None:
+ """Send suspension mail to the site owner."""
+
+ site = frappe.db.get_value(
+ "Site", site_name, ["team", "trial_end_date", "name", "host_name"], as_dict=True
+ )
+ product = frappe.db.get_value(
+ "Product Trial",
+ product_name,
+ ["title", "suspension_email_subject", "suspension_email_content", "email_full_logo", "logo"],
+ as_dict=True,
+ )
+
+ if not site or not product:
+ return
+
+ sender = ""
+ subject = (
+ product.suspension_email_subject.format(product_title=product.title)
+ or f"Your {product.title} site is expired"
+ )
+ recipient = frappe.get_value("Team", site.team, "user")
+ args = {}
+
+ # TODO: enable it when we use the full logo
+ # if product.email_full_logo:
+ # args.update({"image_path": get_url(product.email_full_logo, True)})
+ if product.logo:
+ args.update({"logo": get_url(product.logo, True), "title": product.title})
+ if product.email_account:
+ sender = frappe.get_value("Email Account", product.email_account, "email_id")
+
+ context = {
+ "site": site,
+ "product": product,
+ }
+ message = frappe.render_template(product.suspension_email_content, context)
+ args.update({"message": message})
+
+ frappe.sendmail(
+ sender=sender,
+ recipients=recipient,
+ subject=subject,
+ template="product_trial_email",
+ args=args,
+ )
diff --git a/press/saas/doctype/product_trial/test_product_trial.py b/press/saas/doctype/product_trial/test_product_trial.py
new file mode 100644
index 00000000000..8411995006f
--- /dev/null
+++ b/press/saas/doctype/product_trial/test_product_trial.py
@@ -0,0 +1,52 @@
+# Copyright (c) 2023, Frappe and Contributors
+# See license.txt
+
+import typing
+
+import frappe
+from frappe.tests.utils import FrappeTestCase
+
+if typing.TYPE_CHECKING:
+ from press.press.doctype.app.app import App
+
+from press.press.doctype.app.test_app import create_test_app
+from press.press.doctype.release_group.test_release_group import (
+ create_test_release_group,
+)
+from press.press.doctype.root_domain.test_root_domain import create_test_root_domain
+from press.press.doctype.site_plan.test_site_plan import create_test_plan
+
+
+def create_test_product_trial(
+ app: "App",
+):
+ frappe_app = create_test_app()
+ trial_plan = create_test_plan("Site", is_trial_plan=True)
+ domain = create_test_root_domain("local.fc.frappe.dev")
+ release_group = create_test_release_group([frappe_app, app])
+ product_trial = frappe.get_doc(
+ {
+ "doctype": "Product Trial",
+ "name": app.name,
+ "title": app.title,
+ "apps": [
+ {
+ "app": "frappe",
+ },
+ {
+ "app": app.name,
+ },
+ ],
+ "trial_plan": trial_plan.name,
+ "domain": domain.name,
+ "release_group": release_group.name,
+ "email_subject": "Test Subject",
+ "email_header_content": "Test Header",
+ }
+ ).insert(ignore_if_duplicate=True)
+ product_trial.reload()
+ return product_trial
+
+
+class TestProductTrial(FrappeTestCase):
+ pass
diff --git a/press/saas/doctype/product_trial_app/__init__.py b/press/saas/doctype/product_trial_app/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/saas/doctype/product_trial_app/product_trial_app.json b/press/saas/doctype/product_trial_app/product_trial_app.json
new file mode 100644
index 00000000000..ea86c603a55
--- /dev/null
+++ b/press/saas/doctype/product_trial_app/product_trial_app.json
@@ -0,0 +1,33 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-06-13 16:41:42.707140",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "app"
+ ],
+ "fields": [
+ {
+ "fieldname": "app",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "App",
+ "options": "App",
+ "reqd": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2024-06-13 16:42:00.344213",
+ "modified_by": "Administrator",
+ "module": "SaaS",
+ "name": "Product Trial App",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/saas/doctype/product_trial_app/product_trial_app.py b/press/saas/doctype/product_trial_app/product_trial_app.py
new file mode 100644
index 00000000000..17565666617
--- /dev/null
+++ b/press/saas/doctype/product_trial_app/product_trial_app.py
@@ -0,0 +1,23 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class ProductTrialApp(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ app: DF.Link
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ # end: auto-generated types
+
+ pass
diff --git a/press/saas/doctype/product_trial_request/__init__.py b/press/saas/doctype/product_trial_request/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/saas/doctype/product_trial_request/patches/set_subscription_created_flag.py b/press/saas/doctype/product_trial_request/patches/set_subscription_created_flag.py
new file mode 100644
index 00000000000..5b307623a05
--- /dev/null
+++ b/press/saas/doctype/product_trial_request/patches/set_subscription_created_flag.py
@@ -0,0 +1,8 @@
+# Copyright (c) 2025, Frappe and contributors
+# For license information, please see license.txt
+
+import frappe
+
+
+def execute():
+ frappe.db.set_value("Product Trial Request", {"is_subscription_created": 0}, "is_subscription_created", 1)
diff --git a/press/saas/doctype/product_trial_request/product_trial_request.js b/press/saas/doctype/product_trial_request/product_trial_request.js
new file mode 100644
index 00000000000..0bda19b358f
--- /dev/null
+++ b/press/saas/doctype/product_trial_request/product_trial_request.js
@@ -0,0 +1,29 @@
+// Copyright (c) 2023, Frappe and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Product Trial Request', {
+ refresh(frm) {
+ frm.add_custom_button(__('Preview Setup Wizard Payload'), function () {
+ frappe.call({
+ method: 'get_setup_wizard_payload',
+ doc: frm.doc,
+ args: {},
+ freeze: true,
+ freeze_message: __('Generating Setup Wizard Payload'),
+ callback: function (r) {
+ if (r.exc) {
+ frappe.msgprint(r.exc);
+ } else {
+ frappe.msgprint({
+ title: __('Setup Wizard Payload'),
+ message: JSON.stringify(r.message, null, 4),
+ });
+ }
+ },
+ error: function (r) {
+ frappe.msgprint(r.message);
+ },
+ });
+ });
+ },
+});
diff --git a/press/saas/doctype/product_trial_request/product_trial_request.json b/press/saas/doctype/product_trial_request/product_trial_request.json
new file mode 100644
index 00000000000..c7c159a4464
--- /dev/null
+++ b/press/saas/doctype/product_trial_request/product_trial_request.json
@@ -0,0 +1,200 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2023-08-01 21:53:18.416281",
+ "default_view": "List",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "product_trial",
+ "team",
+ "site",
+ "domain",
+ "column_break_cubd",
+ "status",
+ "account_request",
+ "agent_job",
+ "cluster",
+ "section_break_zlzx",
+ "site_creation_started_on",
+ "is_standby_site",
+ "is_subscription_created",
+ "column_break_bvut",
+ "site_creation_completed_on",
+ "is_site_accessible",
+ "errors_tab",
+ "error"
+ ],
+ "fields": [
+ {
+ "fieldname": "team",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Team",
+ "options": "Team",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "account_request",
+ "fieldtype": "Link",
+ "label": "Account Request",
+ "options": "Account Request",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "column_break_cubd",
+ "fieldtype": "Column Break"
+ },
+ {
+ "default": "Pending",
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "in_standard_filter": 1,
+ "label": "Status",
+ "options": "Pending\nWait for Site\nPrefilling Setup Wizard\nAdding Domain\nSite Created\nError\nExpired"
+ },
+ {
+ "fieldname": "site",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Site",
+ "options": "Site",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "agent_job",
+ "fieldtype": "Link",
+ "label": "Agent Job",
+ "options": "Agent Job",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "product_trial",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Product Trial",
+ "options": "Product Trial",
+ "read_only": 1,
+ "search_index": 1
+ },
+ {
+ "fieldname": "section_break_zlzx",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "site_creation_started_on",
+ "fieldtype": "Datetime",
+ "label": "Site Creation Started On",
+ "read_only": 1
+ },
+ {
+ "fieldname": "column_break_bvut",
+ "fieldtype": "Column Break"
+ },
+ {
+ "fieldname": "site_creation_completed_on",
+ "fieldtype": "Datetime",
+ "label": "Site Creation Completed On",
+ "read_only": 1
+ },
+ {
+ "fieldname": "domain",
+ "fieldtype": "Data",
+ "label": "Domain"
+ },
+ {
+ "fieldname": "cluster",
+ "fieldtype": "Link",
+ "label": "Cluster",
+ "options": "Cluster"
+ },
+ {
+ "default": "0",
+ "fieldname": "is_standby_site",
+ "fieldtype": "Check",
+ "label": "Is Standby Site"
+ },
+ {
+ "default": "Not Checked",
+ "fieldname": "is_site_accessible",
+ "fieldtype": "Select",
+ "label": "Is Site Accessible",
+ "options": "Not Checked\nYes\nNo",
+ "read_only": 1
+ },
+ {
+ "fieldname": "errors_tab",
+ "fieldtype": "Tab Break",
+ "label": "Errors"
+ },
+ {
+ "fieldname": "error",
+ "fieldtype": "Code",
+ "label": "Error",
+ "read_only": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "is_subscription_created",
+ "fieldtype": "Check",
+ "label": "Is Subscription Created"
+ }
+ ],
+ "grid_page_length": 50,
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2025-09-22 14:10:22.140755",
+ "modified_by": "Administrator",
+ "module": "SaaS",
+ "name": "Product Trial Request",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Admin",
+ "share": 1,
+ "write": 1
+ },
+ {
+ "create": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "Press Member",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "row_format": "Dynamic",
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": [],
+ "title_field": "product_trial",
+ "track_changes": 1
+}
diff --git a/press/saas/doctype/product_trial_request/product_trial_request.py b/press/saas/doctype/product_trial_request/product_trial_request.py
new file mode 100644
index 00000000000..8e95b1a1f6d
--- /dev/null
+++ b/press/saas/doctype/product_trial_request/product_trial_request.py
@@ -0,0 +1,487 @@
+# Copyright (c) 2023, Frappe and contributors
+# For license information, please see license.txt
+
+from __future__ import annotations
+
+import urllib
+import urllib.parse
+from contextlib import suppress
+from typing import TYPE_CHECKING
+
+import frappe
+from frappe.model.document import Document
+from frappe.utils.caching import redis_cache
+from frappe.utils.data import add_to_date, now_datetime
+from frappe.utils.telemetry import init_telemetry
+
+from press.api.client import dashboard_whitelist
+from press.press.doctype.root_domain.root_domain import get_domains
+from press.press.doctype.telegram_message.telegram_message import TelegramMessage
+from press.utils import log_error, validate_subdomain
+
+if TYPE_CHECKING:
+ from press.press.doctype.site.site import Site
+ from press.saas.doctype.product_trial.product_trial import ProductTrial
+
+
+class ProductTrialRequest(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ account_request: DF.Link | None
+ agent_job: DF.Link | None
+ cluster: DF.Link | None
+ domain: DF.Data | None
+ error: DF.Code | None
+ is_site_accessible: DF.Literal["Not Checked", "Yes", "No"]
+ is_standby_site: DF.Check
+ is_subscription_created: DF.Check
+ product_trial: DF.Link | None
+ site: DF.Link | None
+ site_creation_completed_on: DF.Datetime | None
+ site_creation_started_on: DF.Datetime | None
+ status: DF.Literal[
+ "Pending",
+ "Wait for Site",
+ "Prefilling Setup Wizard",
+ "Adding Domain",
+ "Site Created",
+ "Error",
+ "Expired",
+ ]
+ team: DF.Link | None
+ # end: auto-generated types
+
+ dashboard_fields = ("site", "status", "product_trial", "domain")
+
+ agent_job_step_to_frontend_step = { # noqa: RUF012
+ "New Site": {
+ "New Site": "Building Site",
+ "Install Apps": "Installing Apps",
+ "Update Site Configuration": "Updating Configuration",
+ "Enable Scheduler": "Finalizing Site",
+ "Bench Setup Nginx": "Finalizing Site",
+ "Reload Nginx": "Just a moment",
+ },
+ "Rename Site": {
+ "Enable Maintenance Mode": "Starting",
+ "Wait for Enqueued Jobs": "Starting",
+ "Update Site Configuration": "Preparing Site",
+ "Rename Site": "Preparing Site",
+ "Bench Setup NGINX": "Preparing Site",
+ "Reload NGINX": "Finalizing Site",
+ "Disable Maintenance Mode": "Finalizing Site",
+ "Enable Scheduler": "Just a moment",
+ },
+ }
+
+ def get_email(self):
+ return frappe.db.get_value("Team", self.team, "user")
+
+ @redis_cache(ttl=2 * 60)
+ def is_first_trial_request(self) -> bool:
+ return (
+ frappe.db.count(
+ "Product Trial Request",
+ filters={
+ "account_request": self.account_request,
+ "name": ("!=", self.name),
+ "status": ("not in", ["Expired", "Error", "Pending"]),
+ },
+ )
+ < 1
+ )
+
+ def capture_posthog_event(self, event_name):
+ if not self.is_first_trial_request():
+ # Only capture events for the first trial request
+ return
+
+ init_telemetry()
+ ph = getattr(frappe.local, "posthog", None)
+ with suppress(Exception):
+ ph and ph.capture(
+ distinct_id=self.account_request,
+ event=f"fc_product_trial_{event_name}",
+ properties={
+ "product_trial": True,
+ "product_trial_request_id": self.name,
+ "product_trial_id": self.product_trial,
+ "email": self.get_email(),
+ },
+ )
+
+ def set_posthog_alias(self, new_alias: str):
+ if not self.is_first_trial_request():
+ # Only set alias for the first trial request
+ return
+
+ init_telemetry()
+ ph = getattr(frappe.local, "posthog", None)
+ with suppress(Exception):
+ ph and ph.alias(previous_id=self.account_request, distinct_id=new_alias)
+
+ def check_site_accessible(self):
+ """
+ Checks if the site is accessible (HTTP 200, no redirects).
+ Sets self.is_site_accessible to Yes, No, or Not Checked.
+ """
+ import requests
+
+ url = f"https://{self.domain or self.site}"
+ try:
+ response = requests.get(url, allow_redirects=False, timeout=5)
+ if response.status_code == 200:
+ self.db_set("is_site_accessible", "Yes")
+ else:
+ self.db_set({"is_site_accessible": "No"})
+ except Exception as e:
+ self.db_set({"is_site_accessible": "No", "error": str(e)})
+
+ def after_insert(self):
+ self.capture_posthog_event("product_trial_request_created")
+
+ def on_update(self):
+ if self.has_value_changed("site") and self.site:
+ self.set_posthog_alias(self.site)
+
+ if self.has_value_changed("status"):
+ match self.status:
+ case "Error":
+ self.capture_posthog_event("product_trial_request_failed")
+ case "Wait for Site":
+ self.capture_posthog_event("product_trial_request_initiated_site_creation")
+ case "Prefilling Setup Wizard":
+ self.capture_posthog_event("product_trial_request_started_setup_wizard_prefilled")
+ case "Site Created":
+ self.capture_posthog_event("product_trial_request_site_created")
+
+ # this is to create a webhook record in the site
+ # so that the user records can be synced with press
+ site: Site = frappe.get_doc("Site", self.site)
+ site.create_sync_user_webhook()
+
+ @frappe.whitelist()
+ def get_setup_wizard_payload(self):
+ import json
+
+ try:
+ team_details = frappe.db.get_value(
+ "Team", self.team, ["name", "user", "country", "currency"], as_dict=True
+ )
+ team_user = frappe.db.get_value(
+ "User", team_details.user, ["first_name", "last_name", "full_name", "email"], as_dict=True
+ )
+
+ if self.account_request:
+ account_request_geo_data = frappe.db.get_value(
+ "Account Request", self.account_request, "geo_location"
+ )
+ else:
+ account_request_geo_data = frappe.db.get_value(
+ "Account Request", {"email": team_user.email}, "geo_location"
+ )
+
+ timezone = frappe.parse_json(account_request_geo_data or {}).get("timezone", "Asia/Kolkata")
+
+ return json.dumps(
+ {
+ "email": team_user.email,
+ "first_name": team_user.first_name,
+ "last_name": team_user.last_name,
+ "full_name": team_user.full_name,
+ }
+ ), json.dumps(
+ {
+ "country": team_details.country,
+ "time_zone": timezone,
+ "language": "en",
+ "currency": team_details.currency,
+ # setup wizard will override currency anyway
+ # but adding this since ERPNext will throw an error otherwise
+ }
+ )
+ except Exception as e:
+ log_error(
+ title="Product Trial Request Setup Wizard Payload Generation Error",
+ data=e,
+ reference_doctype=self.doctype,
+ reference_name=self.name,
+ )
+ frappe.throw(f"Failed to generate payload for Setup Wizard: {e}")
+
+ def validate_subdomain_and_domain(self, subdomain: str, domain: str):
+ validate_subdomain(subdomain)
+ if domain not in get_domains():
+ frappe.throw("Invalid domain")
+
+ @dashboard_whitelist()
+ def create_site(self, subdomain: str, domain: str):
+ """
+ Trigger the site creation process for the product trial request.
+ Args:
+ subdomain (str): The subdomain for the new site.
+ domain (str): The domain for the new site.
+ """
+ if self.status != "Pending":
+ return
+
+ self.validate_subdomain_and_domain(subdomain, domain)
+
+ try:
+ product: ProductTrial = frappe.get_doc("Product Trial", self.product_trial)
+ self.status = "Wait for Site"
+ self.site_creation_started_on = now_datetime()
+ self.domain = f"{subdomain}.{domain}"
+ cluster = frappe.db.get_value("Root Domain", domain, "default_cluster")
+ self.cluster = cluster
+ site, agent_job_name, is_standby_site = product.setup_trial_site(
+ subdomain=subdomain,
+ domain=domain,
+ team=self.team,
+ cluster=cluster,
+ account_request=self.account_request,
+ )
+ self.is_standby_site = is_standby_site
+ self.agent_job = agent_job_name
+ self.site = site.name
+ if not is_standby_site:
+ self.is_subscription_created = 1
+ self.save()
+
+ if is_standby_site:
+ self.prefill_setup_wizard_data()
+
+ user_mail = frappe.db.get_value("Team", self.team, "user")
+ frappe.get_doc(
+ {
+ "doctype": "Site User",
+ "site": site.name,
+ "user": user_mail,
+ "enabled": 1,
+ }
+ ).insert(ignore_permissions=True)
+ except frappe.exceptions.ValidationError:
+ raise
+ except Exception as e:
+ log_error(
+ title="Product Trial Request Site Creation Error",
+ data=e,
+ reference_doctype=self.doctype,
+ reference_name=self.name,
+ )
+ self.status = "Error"
+ self.error = str(e)
+ self.save()
+
+ @dashboard_whitelist()
+ def get_progress(self, current_progress=None): # noqa: C901
+ current_progress = current_progress or 10
+ if self.agent_job:
+ filters = {"name": self.agent_job, "site": self.site}
+ else:
+ filters = {"site": self.site, "job_type": ["in", ["New Site", "Rename Site"]]}
+ job_name, status, job_type = frappe.db.get_value(
+ "Agent Job",
+ filters,
+ ["name", "status", "job_type"],
+ )
+ if status == "Success":
+ if self.status == "Site Created":
+ return {"progress": 100, "current_step": self.status}
+ if self.status == "Adding Domain":
+ return {"progress": 90, "current_step": self.status}
+ return {"progress": 80, "current_step": self.status}
+
+ if status == "Running":
+ steps = frappe.db.get_all(
+ "Agent Job Step",
+ filters={"agent_job": job_name},
+ fields=["step_name", "status"],
+ order_by="creation asc",
+ )
+ done = [s for s in steps if s.status in ("Success", "Skipped", "Failure")]
+ steps_count = len(steps)
+ steps_count += 1
+ progress = (len(done) / steps_count) * 100
+ progress = max(progress, current_progress)
+ current_running_step = ""
+ for step in steps:
+ if step.status == "Running":
+ current_running_step = self.agent_job_step_to_frontend_step.get(job_type, {}).get(
+ step.step_name, step.step_name
+ )
+ break
+ return {"progress": progress + 0.1, "current_step": current_running_step}
+
+ if self.status == "Error":
+ return {"progress": current_progress, "error": True}
+
+ # If agent job is undelivered, pending
+ return {"progress": current_progress + 0.1}
+
+ def prefill_setup_wizard_data(self):
+ if self.status == "Prefilling Setup Wizard":
+ return
+
+ site: Site = frappe.get_doc("Site", self.site)
+ try:
+ user_payload, system_settings_payload = self.get_setup_wizard_payload()
+ site.prefill_setup_wizard(system_settings_payload, user_payload)
+ if self.site != self.domain:
+ self.status = "Prefilling Setup Wizard"
+ self.save()
+ except Exception as e:
+ log_error(
+ title="Product Trial Request Prefill Setup Wizard Error",
+ data=e,
+ reference_doctype=self.doctype,
+ reference_name=self.name,
+ )
+
+ @dashboard_whitelist()
+ def get_login_sid(self):
+ site: Site = frappe.get_doc("Site", self.site)
+ redirect_to_after_login = frappe.db.get_value(
+ "Product Trial",
+ self.product_trial,
+ "redirect_to_after_login",
+ )
+ if site.additional_system_user_created and site.setup_wizard_complete:
+ # go to setup wizard as admin only
+ # they'll log in as user after setup wizard
+ email = frappe.db.get_value("Team", self.team, "user")
+ sid = site.get_login_sid(user=email)
+ return f"https://{self.domain or self.site}{redirect_to_after_login}?sid={sid}"
+
+ sid = site.get_login_sid()
+ self.check_site_accessible()
+ return f"https://{self.domain or self.site}/app?sid={sid}"
+
+
+def get_app_trial_page_url():
+ referer = frappe.request.headers.get("referer", "")
+ if not referer:
+ return None
+ try:
+ # parse the referer url
+ site = urllib.parse.urlparse(referer).hostname
+ # check if any product trial request exists for the site
+ product_trial_name = frappe.db.get_value("Product Trial Request", {"site": site}, "product_trial")
+ if product_trial_name:
+ # Check site status
+ # site_status = frappe.db.get_value("Site", site, "status")
+ # if site_status in ("Active", "Inactive", "Suspended"):
+ return f"/dashboard/signup?product={product_trial_name}"
+ except Exception:
+ frappe.log_error(title="App Trial Page URL Error")
+ return None
+
+
+def expire_long_pending_trial_requests():
+ frappe.db.set_value(
+ "Product Trial Request",
+ {"status": "Pending", "creation": ("<", add_to_date(now_datetime(), hours=-6))},
+ "status",
+ "Expired",
+ update_modified=False,
+ )
+
+
+def gather_stats(time_ago):
+ stats = {
+ "total_trials": 0,
+ "failed_trials": 0,
+ "succeeded_trials": 0,
+ "expired_trials": 0,
+ "pending_trials": 0,
+ "app_wise_failures": {},
+ "total_creation_time": 0,
+ "valid_trials_with_timing": 0,
+ }
+ try:
+ trial_requests = frappe.db.get_all(
+ "Product Trial Request",
+ {"creation": (">", time_ago), "owner": ("not like", "fc-signup-test_%")},
+ ["name", "status", "product_trial", "site_creation_started_on", "site_creation_completed_on"],
+ )
+ stats["total_trials"] = len(trial_requests)
+ for req in trial_requests:
+ if req.status == "Error":
+ stats["failed_trials"] = stats["failed_trials"] + 1
+ stats["app_wise_failures"][req.product_trial] = (
+ stats["app_wise_failures"].get(req.product_trial, 0) + 1
+ )
+ elif req.status == "Site Created":
+ stats["succeeded_trials"] = stats["succeeded_trials"] + 1
+ elif req.status == "Expired":
+ stats["expired_trials"] = stats["expired_trials"] + 1
+ elif req.status == "Pending":
+ stats["pending_trials"] = stats["pending_trials"] + 1
+
+ # avg time taken for the day
+ if req.site_creation_started_on and req.site_creation_completed_on:
+ start_to_end_time = (
+ req.site_creation_completed_on - req.site_creation_started_on
+ ).total_seconds()
+ stats["total_creation_time"] += start_to_end_time
+ stats["valid_trials_with_timing"] += 1
+ return stats
+ except Exception as e:
+ log_error(
+ title="Error gathering stats in Product Trial Request",
+ data=e,
+ )
+ return None
+
+
+def push_stats_message(stats, message):
+ if stats:
+ message += f"**Total Trials**: {stats['total_trials']}\n\n"
+ message = (
+ message
+ + f"[Succeeded trial requests](https://frappecloud.com/app/product-trial-request?status=Site+Created): {stats['succeeded_trials']}\n"
+ )
+ message = (
+ message
+ + f"[Failed trial requests](https://frappecloud.com/app/product-trial-request?status=Error): {stats['failed_trials']}\n"
+ )
+
+ # add app failure counts to message
+ if stats["app_wise_failures"]:
+ message += "**Application Failure Breakdown:**\n"
+ for app, count in stats["app_wise_failures"].items():
+ message = message + f"{app} failed {count!s} time(s)\n"
+
+ if stats["valid_trials_with_timing"] > 0:
+ avg_time = stats["total_creation_time"] / stats["valid_trials_with_timing"]
+ message += f"**Average Site Creation Time**: {avg_time:.2f}s\n"
+ else:
+ message += "**Average Site Creation Time**: No data available\n"
+ TelegramMessage.enqueue(message=message, topic="Signups")
+
+
+def gather_weekly_stats():
+ one_week_ago = frappe.utils.add_to_date(None, days=-7)
+ message = "*Weekly Signup stats*\n\n"
+ stats = gather_stats(one_week_ago)
+ push_stats_message(stats, message)
+
+
+def gather_daily_stats():
+ one_day_ago = frappe.utils.add_to_date(None, days=-1)
+ message = "*Daily Signup stats*\n\n"
+ stats = gather_stats(one_day_ago)
+ push_stats_message(stats, message)
+
+
+def gather_hourly_stats():
+ one_hour_ago = frappe.utils.add_to_date(None, hours=-1)
+ message = "*Hourly Signup stats*\n\n"
+ stats = gather_stats(one_hour_ago)
+ push_stats_message(stats, message)
diff --git a/press/saas/doctype/product_trial_request/test_product_trial_request.py b/press/saas/doctype/product_trial_request/test_product_trial_request.py
new file mode 100644
index 00000000000..cc2ab45af87
--- /dev/null
+++ b/press/saas/doctype/product_trial_request/test_product_trial_request.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2023, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestProductTrialRequest(FrappeTestCase):
+ pass
diff --git a/press/saas/doctype/product_trial_signup_field/__init__.py b/press/saas/doctype/product_trial_signup_field/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/saas/doctype/product_trial_signup_field/product_trial_signup_field.json b/press/saas/doctype/product_trial_signup_field/product_trial_signup_field.json
new file mode 100644
index 00000000000..900ebc4e7e3
--- /dev/null
+++ b/press/saas/doctype/product_trial_signup_field/product_trial_signup_field.json
@@ -0,0 +1,73 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-06-13 16:44:03.404154",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "label",
+ "fieldtype",
+ "fieldname",
+ "required",
+ "options",
+ "min_password_score"
+ ],
+ "fields": [
+ {
+ "fieldname": "label",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Label",
+ "reqd": 1
+ },
+ {
+ "fieldname": "fieldtype",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Fieldtype",
+ "options": "Data\nSelect\nCheck\nDate\nPassword",
+ "reqd": 1
+ },
+ {
+ "fieldname": "fieldname",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Fieldname",
+ "reqd": 1
+ },
+ {
+ "depends_on": "eval: doc.fieldtype == \"Select\"",
+ "fieldname": "options",
+ "fieldtype": "Small Text",
+ "label": "Options"
+ },
+ {
+ "default": "1",
+ "fieldname": "required",
+ "fieldtype": "Check",
+ "in_list_view": 1,
+ "label": "Required"
+ },
+ {
+ "default": "2",
+ "depends_on": "eval: doc.fieldtype == \"Password\"",
+ "fieldname": "min_password_score",
+ "fieldtype": "Select",
+ "label": "Min Password Score",
+ "options": "2\n3\n4"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2024-08-19 12:55:36.433514",
+ "modified_by": "Administrator",
+ "module": "SaaS",
+ "name": "Product Trial Signup Field",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/saas/doctype/product_trial_signup_field/product_trial_signup_field.py b/press/saas/doctype/product_trial_signup_field/product_trial_signup_field.py
new file mode 100644
index 00000000000..9acf623df40
--- /dev/null
+++ b/press/saas/doctype/product_trial_signup_field/product_trial_signup_field.py
@@ -0,0 +1,28 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+
+class ProductTrialSignupField(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ fieldname: DF.Data
+ fieldtype: DF.Literal["Data", "Select", "Check", "Date", "Password"]
+ label: DF.Data
+ min_password_score: DF.Literal["2", "3", "4"]
+ options: DF.SmallText | None
+ parent: DF.Data
+ parentfield: DF.Data
+ parenttype: DF.Data
+ required: DF.Check
+ # end: auto-generated types
+
+ pass
diff --git a/press/saas/doctype/saas_app/saas_app.py b/press/saas/doctype/saas_app/saas_app.py
index 5d28ae99c73..d0b03650bab 100644
--- a/press/saas/doctype/saas_app/saas_app.py
+++ b/press/saas/doctype/saas_app/saas_app.py
@@ -3,6 +3,7 @@
import frappe
from frappe.model.document import Document
+
from press.saas.doctype.saas_app_plan.saas_app_plan import get_app_plan_features
from press.utils import get_current_team
@@ -63,7 +64,7 @@ def get_plans_for_app(app, site):
def get_plan_prices(plan_name):
plan_prices = frappe.db.get_value(
- "Plan", plan_name, ["plan_title", "price_usd", "price_inr"], as_dict=True
+ "Site Plan", plan_name, ["plan_title", "price_usd", "price_inr"], as_dict=True
)
return plan_prices
diff --git a/press/saas/doctype/saas_app/test_saas_app.py b/press/saas/doctype/saas_app/test_saas_app.py
index aedfc2fb658..78e5bb7f242 100644
--- a/press/saas/doctype/saas_app/test_saas_app.py
+++ b/press/saas/doctype/saas_app/test_saas_app.py
@@ -2,14 +2,11 @@
# See license.txt
import frappe
-
from frappe.tests.utils import FrappeTestCase
def create_test_saas_app(app: str):
- return frappe.get_doc({"doctype": "Saas App", "app": app}).insert(
- ignore_if_duplicate=True
- )
+ return frappe.get_doc({"doctype": "Saas App", "app": app}).insert(ignore_if_duplicate=True)
class TestSaasApp(FrappeTestCase):
diff --git a/press/saas/doctype/saas_app_plan/saas_app_plan.json b/press/saas/doctype/saas_app_plan/saas_app_plan.json
index 30ce5ed44ff..45ce67e28d3 100644
--- a/press/saas/doctype/saas_app_plan/saas_app_plan.json
+++ b/press/saas/doctype/saas_app_plan/saas_app_plan.json
@@ -58,7 +58,7 @@
"fieldtype": "Link",
"in_list_view": 1,
"label": "Plan",
- "options": "Plan",
+ "options": "Site Plan",
"reqd": 1
},
{
@@ -83,7 +83,7 @@
"fieldname": "site_plan",
"fieldtype": "Link",
"label": "Site Plan",
- "options": "Plan",
+ "options": "Site Plan",
"reqd": 1
},
{
@@ -145,7 +145,7 @@
],
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2022-07-19 09:26:01.277657",
+ "modified": "2024-02-05 23:05:22.125111",
"modified_by": "Administrator",
"module": "SaaS",
"name": "Saas App Plan",
diff --git a/press/saas/doctype/saas_app_plan/saas_app_plan.py b/press/saas/doctype/saas_app_plan/saas_app_plan.py
index 55be526ba86..7037161a00c 100644
--- a/press/saas/doctype/saas_app_plan/saas_app_plan.py
+++ b/press/saas/doctype/saas_app_plan/saas_app_plan.py
@@ -1,9 +1,11 @@
# Copyright (c) 2022, Frappe and contributors
# For license information, please see license.txt
-import frappe
from typing import List
+
+import frappe
from frappe.model.document import Document
+
from press.press.doctype.invoice.invoice import calculate_gst
from press.utils import get_current_team
@@ -14,7 +16,7 @@ def validate(self):
self.validate_payout_percentage()
def validate_plan(self):
- dt = frappe.db.get_value("Plan", self.plan, "document_type")
+ dt = frappe.db.get_value("Site Plan", self.plan, "document_type")
if dt != "Saas App":
frappe.throw("The plan must be a Saas App plan.")
@@ -25,7 +27,7 @@ def get_total_amount(self, payment_option):
:option "Monthly" or "Annual"
"""
team = get_current_team(True)
- amount = frappe.db.get_value("Plan", self.plan, f"price_{team.currency.lower()}")
+ amount = frappe.db.get_value("Site Plan", self.plan, f"price_{team.currency.lower()}")
amount = amount * 12 if payment_option == "Annual" else amount
if team.country == "India" and self.gst_inclusive:
@@ -42,8 +44,8 @@ def validate_payout_percentage(self):
if self.is_free:
return
- site_plan = frappe.db.get_value("Plan", self.site_plan, "price_usd")
- saas_plan = frappe.db.get_value("Plan", self.plan, "price_usd")
+ site_plan = frappe.db.get_value("Site Plan", self.site_plan, "price_usd")
+ saas_plan = frappe.db.get_value("Site Plan", self.plan, "price_usd")
self.payout_percentage = 100 - float("{:.2f}".format((site_plan / saas_plan) * 100))
diff --git a/press/saas/doctype/saas_app_subscription/saas_app_subscription.py b/press/saas/doctype/saas_app_subscription/saas_app_subscription.py
index 653e24e3723..35455040255 100644
--- a/press/saas/doctype/saas_app_subscription/saas_app_subscription.py
+++ b/press/saas/doctype/saas_app_subscription/saas_app_subscription.py
@@ -2,11 +2,13 @@
# For license information, please see license.txt
import json
+from datetime import datetime
+
import frappe
from frappe.model.document import Document
from frappe.utils import add_to_date
+
from press.utils import log_error
-from datetime import datetime
class SaasAppSubscription(Document):
@@ -22,9 +24,9 @@ def set_secret_key(self):
def create_site_config_key(self):
if not frappe.db.exists("Site Config Key", {"key": f"sk_{self.app}"}):
- frappe.get_doc(
- doctype="Site Config Key", internal=True, key=f"sk_{self.app}"
- ).insert(ignore_permissions=True)
+ frappe.get_doc(doctype="Site Config Key", internal=True, key=f"sk_{self.app}").insert(
+ ignore_permissions=True
+ )
def before_insert(self):
self.validate_duplicate_subscription()
@@ -79,9 +81,7 @@ def change_plan(self, new_plan, ignore_card_setup=False):
# TODO: Remove this from here
if self.app == "erpnext_smb":
site = frappe.get_doc("Site", self.site)
- config = json.loads(
- frappe.db.get_value("Saas App Plan", self.saas_app_plan, "config")
- )
+ config = json.loads(frappe.db.get_value("Saas App Plan", self.saas_app_plan, "config"))
site.update_site_config({"plan": config["plan"]})
def update_end_date(self, payment_option):
@@ -89,9 +89,7 @@ def update_end_date(self, payment_option):
self.end_date = add_to_date(self.end_date or datetime.today().date(), days=days)
def validate_duplicate_subscription(self):
- already_exists = frappe.db.exists(
- "Saas App Subscription", {"app": self.app, "site": self.site}
- )
+ already_exists = frappe.db.exists("Saas App Subscription", {"app": self.app, "site": self.site})
if already_exists:
frappe.throw(
@@ -123,14 +121,12 @@ def disable(self):
def calculate_payout(self, amount, saas_app_plan=None):
# Amount of money that is supposed to be paidout to the developers
saas_app_plan = saas_app_plan or self.saas_app_plan
- payout_percentage = frappe.db.get_value(
- "Saas App Plan", saas_app_plan, "payout_percentage"
- )
+ payout_percentage = frappe.db.get_value("Saas App Plan", saas_app_plan, "payout_percentage")
return (amount / 100) * float(payout_percentage)
def create_usage_record(self):
if self.is_usage_record_created():
- return
+ return None
team_name = frappe.db.get_value("Site", self.site, "team")
team = frappe.get_cached_doc("Team", team_name)
@@ -138,10 +134,13 @@ def create_usage_record(self):
if team.parent_team:
team = frappe.get_cached_doc("Team", team.parent_team)
+ if team.billing_team:
+ team = frappe.get_cached_doc("Team", team.billing_team)
+
if not team.get_upcoming_invoice():
team.create_upcoming_invoice()
- plan = frappe.get_cached_doc("Plan", self.plan)
+ plan = frappe.get_cached_doc("Site Plan", self.plan)
amount = plan.get_price_for_interval(self.interval, team.currency)
payout = self.calculate_payout(amount)
@@ -197,9 +196,7 @@ def suspend_prepaid_subscriptions():
subscription.suspend()
frappe.db.commit()
except Exception:
- frappe.db.rollback(
- "Saas Subscription: Cannot suspend prepaid subscription", subscription.name
- )
+ frappe.db.rollback("Saas Subscription: Cannot suspend prepaid subscription", subscription.name)
def create_usage_records():
@@ -298,7 +295,7 @@ def create_saas_invoice(
"description": "Saas Prepaid Purchase",
"document_type": "Saas App",
"document_name": document_name,
- "plan": frappe.db.get_value("Plan", plan, "plan_title"),
+ "plan": frappe.db.get_value("Site Plan", plan, "plan_title"),
"quantity": 1,
"rate": amount,
},
diff --git a/press/saas/doctype/saas_settings/saas_settings.js b/press/saas/doctype/saas_settings/saas_settings.js
index 4bc72d9e7dc..af20eb0a177 100644
--- a/press/saas/doctype/saas_settings/saas_settings.js
+++ b/press/saas/doctype/saas_settings/saas_settings.js
@@ -5,7 +5,7 @@ frappe.ui.form.on('Saas Settings', {
refresh: function (frm) {
frm.set_query('plan', () => {
return {
- filters: { app: frm.doc.name, is_free: 1 },
+ filters: { app: frm.doc.name, price_usd: ['=', 0] },
};
});
},
diff --git a/press/saas/doctype/saas_settings/saas_settings.json b/press/saas/doctype/saas_settings/saas_settings.json
index 8296f8bd059..7840896e084 100644
--- a/press/saas/doctype/saas_settings/saas_settings.json
+++ b/press/saas/doctype/saas_settings/saas_settings.json
@@ -1,6 +1,4 @@
{
- "_comments": "[]",
- "_liked_by": "[]",
"actions": [],
"allow_rename": 1,
"autoname": "field:app",
@@ -14,6 +12,7 @@
"domain",
"cluster",
"group",
+ "email_account",
"column_break_9",
"apps",
"multi_subscription",
@@ -95,7 +94,7 @@
"fieldname": "site_plan",
"fieldtype": "Link",
"label": "Site Plan",
- "options": "Plan"
+ "options": "Site Plan"
},
{
"fieldname": "billing_type",
@@ -152,11 +151,18 @@
"fieldname": "multi_subscription",
"fieldtype": "Check",
"label": "Multi Subscription"
+ },
+ {
+ "description": "Email account for sending signup/verification mails",
+ "fieldname": "email_account",
+ "fieldtype": "Link",
+ "label": "Email Account",
+ "options": "Email Account"
}
],
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2023-05-31 12:25:49.138663",
+ "modified": "2024-09-13 09:55:11.473243",
"modified_by": "Administrator",
"module": "SaaS",
"name": "Saas Settings",
diff --git a/press/saas/doctype/saas_settings/saas_settings.py b/press/saas/doctype/saas_settings/saas_settings.py
index a3fa0b23b51..f143919d68d 100644
--- a/press/saas/doctype/saas_settings/saas_settings.py
+++ b/press/saas/doctype/saas_settings/saas_settings.py
@@ -6,4 +6,32 @@
class SaasSettings(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+ from press.press.doctype.erpnext_app.erpnext_app import ERPNextApp
+
+ app: DF.Link | None
+ apps: DF.Table[ERPNextApp]
+ billing_type: DF.Literal["prepaid", "postpaid"]
+ cluster: DF.Link | None
+ default_team: DF.Link | None
+ domain: DF.Link | None
+ email_account: DF.Link | None
+ enable_hybrid_pools: DF.Check
+ enable_pooling: DF.Check
+ group: DF.Link | None
+ multi_subscription: DF.Check
+ multiplier_pricing: DF.Check
+ plan: DF.Link | None
+ site_plan: DF.Link | None
+ standby_pool_size: DF.Int
+ standby_queue_size: DF.Int
+ whitelisted_apps: DF.Table[ERPNextApp]
+ # end: auto-generated types
+
pass
diff --git a/press/saas/doctype/saas_settings/test_saas_settings.py b/press/saas/doctype/saas_settings/test_saas_settings.py
index ccfbbf363d2..64f23e5a53c 100644
--- a/press/saas/doctype/saas_settings/test_saas_settings.py
+++ b/press/saas/doctype/saas_settings/test_saas_settings.py
@@ -1,35 +1,43 @@
# Copyright (c) 2022, Frappe and Contributors
# See license.txt
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
import frappe
from frappe.tests.utils import FrappeTestCase
-from press.marketplace.doctype.marketplace_app_plan.test_marketplace_app_plan import (
- create_test_marketplace_app_plan,
-)
+
from press.press.doctype.app.test_app import create_test_app
-from press.press.doctype.plan.test_plan import create_test_plan
-from press.press.doctype.release_group.release_group import ReleaseGroup
from press.press.doctype.release_group.test_release_group import (
create_test_release_group,
)
+from press.press.doctype.site_plan.test_site_plan import create_test_plan
+
+if TYPE_CHECKING:
+ from press.press.doctype.app.app import App
+ from press.press.doctype.release_group.release_group import ReleaseGroup
-def create_test_saas_settings(group: ReleaseGroup = None):
+def create_test_saas_settings(group: ReleaseGroup | None = None, apps: list[App] | None = None):
"""Create a test saas_settings"""
+ if not apps:
+ apps = [create_test_app()]
+ app = apps[-1]
if not group:
- group = (create_test_release_group([create_test_app()]),)
+ group = create_test_release_group(apps)
+ plan = create_test_plan("Site")
return frappe.get_doc(
{
"doctype": "Saas Settings",
- "app": "frappe",
- "apps": [{"app": "frappe"}],
+ "app": app.name,
+ "apps": [{"app": app.name}],
"domain": "fc.dev",
"cluster": "Default",
"group": group.name,
- "plan": create_test_marketplace_app_plan().name,
- "site_plan": create_test_plan("Site"),
+ "plan": plan.name,
+ "site_plan": plan.name,
}
- ).insert(ignore_permissions=True)
+ ).insert(ignore_permissions=True, ignore_links=True)
class TestSaasSettings(FrappeTestCase):
diff --git a/press/saas/doctype/saas_setup_account_generator/saas_setup_account_generator.json b/press/saas/doctype/saas_setup_account_generator/saas_setup_account_generator.json
index 3b2ad325dff..9a2611832fd 100644
--- a/press/saas/doctype/saas_setup_account_generator/saas_setup_account_generator.json
+++ b/press/saas/doctype/saas_setup_account_generator/saas_setup_account_generator.json
@@ -1,5 +1,6 @@
{
"actions": [],
+ "allow_guest_to_view": 1,
"allow_rename": 1,
"autoname": "field:app",
"creation": "2022-06-09 10:00:09.583228",
@@ -78,7 +79,7 @@
"has_web_view": 1,
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2022-08-24 16:29:55.293281",
+ "modified": "2024-03-28 21:38:55.941020",
"modified_by": "Administrator",
"module": "SaaS",
"name": "Saas Setup Account Generator",
diff --git a/press/saas/doctype/saas_setup_account_generator/saas_setup_account_generator.py b/press/saas/doctype/saas_setup_account_generator/saas_setup_account_generator.py
index f1244800862..370def35d03 100644
--- a/press/saas/doctype/saas_setup_account_generator/saas_setup_account_generator.py
+++ b/press/saas/doctype/saas_setup_account_generator/saas_setup_account_generator.py
@@ -6,6 +6,24 @@
class SaasSetupAccountGenerator(WebsiteGenerator):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ app: DF.Link | None
+ app_title: DF.Data | None
+ custom_route: DF.Check
+ domain: DF.Data | None
+ headless: DF.Check
+ image_path: DF.Data | None
+ publish: DF.Check
+ route: DF.Data | None
+ # end: auto-generated types
+
website = frappe._dict(
template="templates/saas/setup-account.html",
condition_field="publish",
diff --git a/press/saas/doctype/saas_signup_generator/saas_signup_generator.json b/press/saas/doctype/saas_signup_generator/saas_signup_generator.json
index fd4a09203ea..7ef7199f886 100644
--- a/press/saas/doctype/saas_signup_generator/saas_signup_generator.json
+++ b/press/saas/doctype/saas_signup_generator/saas_signup_generator.json
@@ -1,6 +1,7 @@
{
"actions": [],
"allow_rename": 1,
+ "allow_guest_to_view": 1,
"autoname": "field:app",
"creation": "2022-06-08 18:24:06.174595",
"doctype": "DocType",
@@ -61,7 +62,7 @@
"has_web_view": 1,
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2022-08-24 15:44:59.671817",
+ "modified": "2024-03-28 15:44:59.671817",
"modified_by": "Administrator",
"module": "SaaS",
"name": "Saas Signup Generator",
diff --git a/press/saas/doctype/saas_signup_generator/saas_signup_generator.py b/press/saas/doctype/saas_signup_generator/saas_signup_generator.py
index 243154b01e1..eae0a62741d 100644
--- a/press/saas/doctype/saas_signup_generator/saas_signup_generator.py
+++ b/press/saas/doctype/saas_signup_generator/saas_signup_generator.py
@@ -3,6 +3,7 @@
import frappe
from frappe.website.website_generator import WebsiteGenerator
+
from press.utils import get_country_info
diff --git a/press/saas/doctype/site_access_token/__init__.py b/press/saas/doctype/site_access_token/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/press/saas/doctype/site_access_token/site_access_token.js b/press/saas/doctype/site_access_token/site_access_token.js
new file mode 100644
index 00000000000..431c5ebe63f
--- /dev/null
+++ b/press/saas/doctype/site_access_token/site_access_token.js
@@ -0,0 +1,8 @@
+// Copyright (c) 2024, Frappe and contributors
+// For license information, please see license.txt
+
+// frappe.ui.form.on("Site Access Token", {
+// refresh(frm) {
+
+// },
+// });
diff --git a/press/saas/doctype/site_access_token/site_access_token.json b/press/saas/doctype/site_access_token/site_access_token.json
new file mode 100644
index 00000000000..4843250bc72
--- /dev/null
+++ b/press/saas/doctype/site_access_token/site_access_token.json
@@ -0,0 +1,53 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2024-08-22 15:31:12.166979",
+ "doctype": "DocType",
+ "engine": "InnoDB",
+ "field_order": [
+ "site",
+ "token"
+ ],
+ "fields": [
+ {
+ "fieldname": "site",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Site",
+ "options": "Site",
+ "reqd": 1
+ },
+ {
+ "description": "Token will be expired in 15 minutes from creation time.",
+ "fieldname": "token",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Token",
+ "reqd": 1
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2024-08-22 17:01:29.201628",
+ "modified_by": "Administrator",
+ "module": "SaaS",
+ "name": "Site Access Token",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "creation",
+ "sort_order": "DESC",
+ "states": []
+}
\ No newline at end of file
diff --git a/press/saas/doctype/site_access_token/site_access_token.py b/press/saas/doctype/site_access_token/site_access_token.py
new file mode 100644
index 00000000000..db681d2768c
--- /dev/null
+++ b/press/saas/doctype/site_access_token/site_access_token.py
@@ -0,0 +1,41 @@
+# Copyright (c) 2024, Frappe and contributors
+# For license information, please see license.txt
+
+import frappe
+from frappe.model.document import Document
+
+
+class SiteAccessToken(Document):
+ # begin: auto-generated types
+ # This code is auto-generated. Do not modify anything in this block.
+
+ from typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from frappe.types import DF
+
+ site: DF.Link
+ token: DF.Data
+ # end: auto-generated types
+
+ @staticmethod
+ def generate(site: str) -> str:
+ record = frappe.get_doc(
+ {
+ "doctype": "Site Access Token",
+ "site": site,
+ "token": frappe.generate_hash(length=32),
+ }
+ ).insert(ignore_permissions=True)
+ return f"{record.name}:{record.token}"
+
+
+def cleanup_expired_access_tokens():
+ # cleanup expired tokens
+ frappe.db.sql(
+ """
+ DELETE FROM `tabSite Access Token`
+ WHERE TIMESTAMPDIFF(MINUTE, creation, NOW()) > 30
+ """
+ )
+ frappe.db.commit()
diff --git a/press/saas/doctype/site_access_token/test_site_access_token.py b/press/saas/doctype/site_access_token/test_site_access_token.py
new file mode 100644
index 00000000000..ff2be6658d0
--- /dev/null
+++ b/press/saas/doctype/site_access_token/test_site_access_token.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2024, Frappe and Contributors
+# See license.txt
+
+# import frappe
+from frappe.tests.utils import FrappeTestCase
+
+
+class TestSiteAccessToken(FrappeTestCase):
+ pass
diff --git a/press/sanity.py b/press/sanity.py
index d27d85165a3..2236929808c 100644
--- a/press/sanity.py
+++ b/press/sanity.py
@@ -1,15 +1,19 @@
+import contextlib
import os
+import platform
import re
-import urllib.request
-from selenium import webdriver
-import requests
import subprocess
+import urllib.request
+from urllib.parse import urlsplit, urlunsplit
import click
import frappe
+import requests
from bs4 import BeautifulSoup, SoupStrainer
-from selenium.webdriver.chrome.options import Options
-from urllib.parse import urlsplit, urlunsplit
+from frappe.core.utils import find
+from selenium import webdriver
+from selenium.common import WebDriverException
+from selenium.webdriver.chrome.service import Service as ChromeService
CHROMEDRIVER_PATH = os.path.expanduser("~/chromedriver")
@@ -33,6 +37,9 @@ def checks():
except Exception as e:
click.secho(f"An error occurred: {e}", fg="yellow")
return
+ finally:
+ with contextlib.suppress(Exception):
+ chrome.quit()
def initialize_webdriver():
@@ -45,37 +52,54 @@ def initialize_webdriver():
global chrome
- options = Options()
+ options = webdriver.ChromeOptions()
options.add_argument("--headless")
options.add_argument("--no-sandbox")
options.add_argument("--disable-dev-shm-usage")
options.add_argument("--disable-setuid-sandbox")
+ service = ChromeService(executable_path=CHROMEDRIVER_PATH)
try:
- chrome = webdriver.Chrome(CHROMEDRIVER_PATH, options=options)
- except Exception as e:
+ chrome = webdriver.Chrome(service=service, options=options)
+ except WebDriverException as e:
version = re.search(r"is (\d+.\d+.\d+.\d+) with", e.msg).group(1)
- download_chromedriver(version=version.rsplit(".", 1)[0])
- chrome = webdriver.Chrome(CHROMEDRIVER_PATH, options=options)
+ download_chromedriver(version=version)
+ chrome = webdriver.Chrome(service=service, options=options)
return True
def download_chromedriver(version=None):
if version:
- latest_release_url = (
- f"https://chromedriver.storage.googleapis.com/LATEST_RELEASE_{version}"
- )
+ build_version = version.rsplit(".", 1)[0]
+
+ release_url = "https://googlechromelabs.github.io/chrome-for-testing/latest-patch-versions-per-build-with-downloads.json"
+ releases = requests.get(release_url).json()
+
+ builds = releases["builds"][build_version]["downloads"]["chromedriver"]
else:
- latest_release_url = "https://chromedriver.storage.googleapis.com/LATEST_RELEASE"
+ release_url = "https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions-with-downloads.json"
+ releases = requests.get(release_url).json()
- latest_release = requests.get(latest_release_url).text
- subprocess.check_output(
- f"curl -o chromedriver.zip https://chromedriver.storage.googleapis.com/{latest_release}/chromedriver_linux64.zip".split()
- )
+ builds = releases["channels"]["Stable"]["downloads"]["chromedriver"]
+
+ platform = get_platform()
+ download_url = find(builds, lambda x: x["platform"] == platform)["url"]
+
+ subprocess.check_output(f"curl -o chromedriver.zip {download_url}".split())
subprocess.check_output(
- f"unzip -o chromedriver.zip -d {os.path.expanduser('~')}".split()
+ f"unzip -o -j chromedriver.zip chromedriver-{platform}/chromedriver -d {os.path.expanduser('~')}".split()
)
+def get_platform():
+ if platform.system().lower() == "linux":
+ return "linux64"
+ if platform.system().lower() == "darwin":
+ if platform.machine().lower() == "arm64":
+ return "mac-arm64"
+ return "mac-x64"
+ return None
+
+
def test_browser_assets():
print(f"\nChecking health of assets and links for {WEBSITE}")
hyperlinks = extract_hyperlinks(WEBSITE)
@@ -130,8 +154,7 @@ def pattern_adjust(a, address):
if re.match("^//", d):
m = re.search(r"(?<=//)\S+", d)
d = m.group(0)
- m = "https://" + d
- return m
+ return "https://" + d
elif r.scheme == "" and r.netloc == "":
return address + a
else:
@@ -152,9 +175,8 @@ def extract_hyperlinks(address):
for link in BeautifulSoup(response, "html.parser", parse_only=SoupStrainer(key)):
if link.has_attr(value):
p = pattern_adjust(link[value], address)
- if p:
- if p not in hyperlinks:
- hyperlinks.add(p)
+ if p and (p not in hyperlinks):
+ hyperlinks.add(p)
except Exception as err:
click.secho(f"{address} ⚠️ ({err})", fg="yellow")
diff --git a/press/scripts/infrastructure/server_conversion.py b/press/scripts/infrastructure/server_conversion.py
new file mode 100644
index 00000000000..54e19529e21
--- /dev/null
+++ b/press/scripts/infrastructure/server_conversion.py
@@ -0,0 +1,283 @@
+from __future__ import annotations
+
+import os
+import typing
+
+import click
+import frappe
+
+if typing.TYPE_CHECKING:
+ from press.infrastructure.doctype.arm_build_record.arm_build_record import ARMBuildRecord
+ from press.infrastructure.doctype.arm_docker_image.arm_docker_image import ARMDockerImage
+ from press.infrastructure.doctype.virtual_machine_migration.virtual_machine_migration import (
+ VirtualMachineMigration,
+ )
+ from press.press.doctype.server.server import Server
+
+
+arm_machine_mappings = {
+ "t2": "t4g",
+ "c6i": "c8g",
+ "m6i": "m8g",
+ "m7i": "m8g",
+ "r6i": "r8g",
+ # Following are for Zurich due to lack of newer processors in that region
+ "r5": "r7g",
+ "m5": "m7g",
+ "c5": "c7g",
+}
+
+amd_machine_mappings = {"r6i": "m6a", "m6i": "m6a", "c6i": "m6a", "m5": "m6a", "r7i": "m6a", "m7i": "m6a"}
+
+
+def has_arm_build_record(server: str) -> bool:
+ return bool(frappe.get_value("ARM Build Record", {"server": server}))
+
+
+def check_image_build_failure(arm_build_record: ARMBuildRecord) -> bool:
+ return any(arm_image.status != "Success" for arm_image in arm_build_record.arm_images)
+
+
+def create_vmm(server: str, virtual_machine_image: str, target_machine_type: str) -> VirtualMachineMigration:
+ virtual_machine_migration: VirtualMachineMigration = frappe.get_doc(
+ {
+ "doctype": "Virtual Machine Migration",
+ "virtual_machine_image": virtual_machine_image,
+ "machine_type": target_machine_type,
+ "virtual_machine": server,
+ }
+ )
+ return virtual_machine_migration.insert()
+
+
+def vmm(server, vmi, amd_conversion: bool = False) -> VirtualMachineMigration:
+ machine_type = frappe.db.get_value("Virtual Machine", {"name": server}, "machine_type")
+ machine_series, machine_size = machine_type.split(".")
+
+ machine_mappings = arm_machine_mappings if not amd_conversion else amd_machine_mappings
+ if amd_conversion and ("r6i" in machine_series or "r7i" in machine_series):
+ if machine_size == "xlarge":
+ machine_size = "2xlarge"
+ else:
+ machine_size = machine_size.replace("2", "4")
+
+ virtual_machine_migration: VirtualMachineMigration = create_vmm(
+ server=server,
+ virtual_machine_image=vmi,
+ target_machine_type=f"{machine_mappings[machine_series]}.{machine_size}",
+ )
+ return virtual_machine_migration
+
+
+def connect(bench_dir, site_dir):
+ sites_dir = os.path.join(bench_dir, "sites")
+ frappe.init(site=site_dir, sites_path=sites_dir)
+ frappe.connect()
+
+
+def load_servers_from_file(file_path: str) -> list[str]:
+ with open(file_path) as server_file:
+ return server_file.read().strip().split("\n")
+
+
+@click.group()
+@click.option("--site", "site_name", required=True, help="Frappe site name")
+def cli(site_name):
+ """CLI entry point."""
+ bench_dir = os.path.dirname(__file__).split("apps")[0]
+ site_dir = os.path.join(bench_dir, "sites", site_name)
+ connect(bench_dir, site_dir)
+
+
+@cli.command()
+@click.option(
+ "--server-file", type=click.Path(exists=True), help="Path to a file containing a list of servers."
+)
+@click.argument("servers", nargs=-1, type=str)
+def trigger_arm_build(servers: list[str], server_file: str):
+ """Trigger ARM build for one or more servers."""
+ if server_file:
+ servers = load_servers_from_file(server_file)
+
+ for server in servers:
+ if has_arm_build_record(server):
+ continue
+
+ server: Server = frappe.get_doc("Server", server)
+ server.collect_arm_images()
+ frappe.db.commit()
+
+
+@cli.command()
+@click.option(
+ "--server-file", type=click.Path(exists=True), help="Path to a file containing a list of servers."
+)
+@click.argument("servers", nargs=-1, type=str)
+def pull_images_on_servers(servers: list[str], server_file: str):
+ """Trigger image pulls on Intel server to be converted"""
+ if server_file:
+ servers = load_servers_from_file(server_file)
+
+ for server in servers:
+ arm_build_record: ARMBuildRecord = frappe.get_doc("ARM Build Record", {"server": server})
+
+ try:
+ arm_build_record.pull_images()
+ print(f"Pulled image on {server}")
+ except frappe.ValidationError:
+ print(f"Skipping server {server} due to failed builds")
+
+ frappe.db.commit()
+
+
+@cli.command()
+@click.option("--vmi", default="f377-mumbai.frappe.cloud")
+@click.option("--vmi-cluster", required=True)
+@click.option(
+ "--server-file", type=click.Path(exists=True), help="Path to a file containing a list of servers."
+)
+@click.argument("servers", nargs=-1, type=str)
+def update_image_and_create_migration(
+ vmi: str,
+ vmi_cluster: str,
+ servers: list[str],
+ server_file: str,
+):
+ """Update docker image on bench config and create virtual machine migration"""
+ vmi = frappe.get_value("Virtual Machine Image", {"virtual_machine": vmi, "cluster": vmi_cluster}, "name")
+ if not vmi:
+ print(f"Aborting VMI not found {vmi}!")
+ return
+
+ if server_file:
+ servers = load_servers_from_file(server_file)
+
+ for server in servers:
+ arm_build_record: ARMBuildRecord = frappe.get_doc("ARM Build Record", {"server": server})
+ try:
+ arm_build_record.update_image_tags_on_benches()
+ virtual_machine_migration = vmm(server, vmi)
+ frappe.db.commit()
+ print(f"Created {virtual_machine_migration.name}")
+ except frappe.ValidationError as e:
+ print(f"Aborting: {e}!")
+ break
+
+
+@cli.command()
+@click.option("--vmi", default="m263-mumbai.frappe.cloud")
+@click.option("--vmi-cluster", required=True)
+@click.option(
+ "--server-file",
+ type=click.Path(exists=True),
+ help="Path to a file containing a list of servers.",
+)
+@click.option("--start", type=bool, default=False)
+@click.argument("servers", nargs=-1, type=str)
+def convert_database_servers(
+ vmi: str, vmi_cluster: str, servers: list[str], server_file: str, start: bool = False
+):
+ vmi = frappe.get_value("Virtual Machine Image", {"virtual_machine": vmi, "cluster": vmi_cluster}, "name")
+ if not vmi:
+ print(f"Aborting VMI not found {vmi}!")
+ return
+
+ if server_file:
+ servers = load_servers_from_file(server_file)
+
+ for server in servers:
+ virtual_machine_migration = vmm(server, vmi, amd_conversion=True)
+ frappe.db.commit()
+ print(f"Created {virtual_machine_migration.name}")
+
+ if start:
+ for server in servers:
+ virtual_machine_migration: VirtualMachineMigration = frappe.get_doc(
+ "Virtual Machine Migration", {"virtual_machine": server}
+ )
+ virtual_machine_migration.execute()
+ frappe.db.commit()
+
+
+@cli.command()
+@click.option(
+ "--server-file", type=click.Path(exists=True), help="Path to a file containing a list of servers."
+)
+@click.argument("servers", nargs=-1, type=str)
+def arm_build_info(servers: list[str], server_file: str):
+ total, successful, failed, running = 0, 0, 0, 0
+ if server_file:
+ servers = load_servers_from_file(server_file)
+
+ def _status_info(images: list[ARMDockerImage], status: str):
+ return len([image for image in images if image.status == status])
+
+ for server in servers:
+ arm_build_record: ARMBuildRecord = frappe.get_doc("ARM Build Record", {"server": server})
+ arm_build_record.sync_status()
+ total += len(arm_build_record.arm_images)
+ running += _status_info(arm_build_record.arm_images, "Running")
+ successful += _status_info(arm_build_record.arm_images, "Success")
+ failed += _status_info(arm_build_record.arm_images, "Failure")
+
+ print(f"Total: {total}\nSuccessful: {successful}\nRunning: {running}\nFailed: {failed}")
+
+
+@cli.command()
+@click.option(
+ "--server-file", type=click.Path(exists=True), help="Path to a file containing a list of servers."
+)
+@click.option("--vmi", default="f436-mumbai.frappe.cloud")
+@click.option("--vmi-cluster", required=True)
+@click.argument("servers", nargs=-1, type=str)
+def convert_to_amd(servers: list[str], vmi: str, server_file: str, vmi_cluster: str):
+ """Update docker image on bench config and create virtual machine migration"""
+ vmi = frappe.get_value("Virtual Machine Image", {"virtual_machine": vmi, "cluster": vmi_cluster}, "name")
+ if not vmi:
+ print(f"Aborting VMI not found {vmi}!")
+ return
+
+ if server_file:
+ servers = load_servers_from_file(server_file)
+
+ for server in servers:
+ try:
+ virtual_machine_migration = vmm(server, vmi, amd_conversion=True)
+ frappe.db.commit()
+ print(f"Created {virtual_machine_migration.name}")
+ except frappe.ValidationError as e:
+ print(f"Aborting: {e}!")
+ break
+
+
+@cli.command()
+@click.option(
+ "--server-file", type=click.Path(exists=True), help="Path to a file containing a list of servers."
+)
+@click.argument("servers", nargs=-1, type=str)
+def database_post_migration_steps(servers: list[str], server_file: str):
+ """Not a part of the migration script since"""
+ if server_file:
+ servers = load_servers_from_file(server_file)
+
+ for server in servers:
+ server = frappe.get_doc("Database Server", server)
+ server.set_swappiness()
+ server.add_glass_file()
+ server.install_filebeat()
+ server.adjust_memory_config()
+ server.setup_logrotate()
+ server.save()
+
+
+@cli.result_callback()
+def cleanup(*args, **kwargs):
+ frappe.destroy()
+
+
+def main():
+ cli(obj={})
+
+
+if __name__ == "__main__":
+ main()
diff --git a/press/scripts/migrate.py b/press/scripts/migrate.py
index 25a32af2010..f9e17566a6b 100644
--- a/press/scripts/migrate.py
+++ b/press/scripts/migrate.py
@@ -3,6 +3,7 @@
import atexit
import getpass
import json
+import mimetypes
import os
import re
import shlex
@@ -13,13 +14,11 @@
# imports - module imports
import frappe
import frappe.utils.backups
-from frappe.core.utils import find
-from frappe.utils import get_installed_apps_info, update_progress_bar
+from frappe.utils import update_progress_bar
from frappe.utils.change_log import get_versions
from frappe.utils.commands import add_line_after, add_line_before, render_table
# third party imports
-
try:
print("Setting Up requirements...")
# imports - third party imports
@@ -48,7 +47,7 @@
install_command = shlex.split(
"{} -m pip install {}".format(sys.executable, " ".join(dependencies))
)
- subprocess.call(install_command, stdout=open(os.devnull, "w"))
+ subprocess.check_call(install_command, stdout=open(os.devnull, "w"))
import click
import html2text
import requests
@@ -68,28 +67,6 @@
sys.setdefaultencoding("utf-8")
-@retry(stop=stop_after_attempt(5))
-def get_new_site_options():
- site_options_sc = session.post(options_url)
-
- if site_options_sc.ok:
- site_options = site_options_sc.json()["message"]
- return site_options
- else:
- print("Couldn't retrive New site information: {}".format(site_options_sc.status_code))
-
-
-@retry(stop=stop_after_attempt(5))
-def is_subdomain_available(subdomain):
- res = session.post(site_exists_url, {"subdomain": subdomain})
- if res.ok:
- available = not res.json()["message"]
- if not available:
- print("Subdomain already exists! Try another one")
-
- return available
-
-
@retry(
stop=stop_after_attempt(2) | retry_if_exception_type(SystemExit), wait=wait_fixed(5)
)
@@ -121,7 +98,6 @@ def _update_progress_bar(monitor):
# retreive upload link
upload_ticket = session.get(remote_link_url, data={"file": file_name, "parts": parts})
-
if not upload_ticket.ok:
handle_request_failure(upload_ticket)
@@ -161,7 +137,6 @@ def get_file_data(path, part):
"parts": json.dumps(file_parts),
},
)
- print()
if not upload_remote.ok:
# not needed. try the failed parts again!!!
handle_request_failure(upload_remote)
@@ -195,7 +170,7 @@ def get_file_data(path, part):
{
"file": file_name,
"path": key,
- "type": "application/x-gzip" if file_type == "database" else "application/x-tar",
+ "type": ("application/x-gzip" if file_type == "database" else "application/x-tar"),
"size": os.path.getsize(file_path),
},
)
@@ -206,25 +181,13 @@ def get_file_data(path, part):
handle_request_failure(register_press)
-def render_actions_table():
- actions_table = [["#", "Action"]]
- actions = []
-
- for n, action in enumerate(migrator_actions):
- actions_table.append([n + 1, action["title"]])
- actions.append(action["fn"])
-
- render_table(actions_table)
- return actions
-
-
def render_site_table(sites_info, version_info):
- sites_table = [["#", "Site Name", "Frappe"]]
+ sites_table = [["#", "Site Name", "Frappe Version"]]
available_sites = {}
for n, site_data in enumerate(sites_info):
name = site_data["name"]
- frappe = version_info[name]
+ frappe = version_info[0]
sites_table.append([n + 1, name, frappe])
available_sites[name] = {
"frappe": frappe,
@@ -245,38 +208,6 @@ def render_teams_table(teams):
render_table(teams_table)
-def render_plan_table(plans_list):
- plans_table = [["Plan", "CPU Time"]]
- visible_headers = ["name", "cpu_time_per_day"]
-
- for plan in plans_list:
- plan, cpu_time = [plan[header] for header in visible_headers]
- plans_table.append(
- [plan, "{} hour{}/day".format(cpu_time, "" if cpu_time < 2 else "s")]
- )
-
- render_table(plans_table)
-
-
-def render_group_table(versions):
- # title row
- versions_table = [["#", "Version", "Bench", "Apps"]]
-
- # all rows
- idx = 0
- for version in versions:
- for group in version["groups"]:
- apps_list = ", ".join(
- ["{}:{}".format(app["app"], app["branch"]) for app in group["apps"]]
- )
- row = [idx + 1, version["name"], group["name"], apps_list]
- versions_table.append(row)
- idx += 1
-
- render_table(versions_table)
- return versions_table
-
-
def handle_request_failure(request=None, message=None, traceback=True, exit_code=1):
message = message or "Request failed with error code {}".format(request.status_code)
response = html2text.html2text(request.text) if traceback else ""
@@ -285,14 +216,6 @@ def handle_request_failure(request=None, message=None, traceback=True, exit_code
sys.exit(exit_code)
-@add_line_after
-def select_primary_action():
- actions = render_actions_table()
- idx = click.prompt("What do you want to do?", type=click.IntRange(1, len(actions))) - 1
-
- return actions[idx]
-
-
def get_site_info(site):
site_info_response = session.post(site_info_url, {"name": site})
if site_info_response.ok:
@@ -300,12 +223,6 @@ def get_site_info(site):
return {}
-def get_version(info, app="frappe"):
- for app in info.get("installed_apps", {}):
- if app.get("frappe", 0) == 1:
- return app.get("name").lower().strip("frappe").strip().title()
-
-
def get_branch(info, app="frappe"):
for app in info.get("installed_apps", {}):
if app.get("frappe", 0) == 1:
@@ -342,9 +259,9 @@ def select_site():
if get_all_sites_request.ok:
# the following lines have data with a lot of redundancy, but there's no real reason to bother cleaning them up
- all_sites = get_all_sites_request.json()["message"]["site_list"]
+ all_sites = get_all_sites_request.json()["message"]
sites_info = {site["name"]: get_site_info(site["name"]) for site in all_sites}
- sites_version = {x: get_version(y) for x, y in sites_info.items()}
+ sites_version = [details["latest_frappe_version"] for details in sites_info.values()]
available_sites = render_site_table(all_sites, sites_version)
while True:
@@ -353,16 +270,18 @@ def select_site():
).strip()
if selected_site in available_sites:
site_data = available_sites[selected_site]
- downgrade = is_downgrade(sites_info[selected_site])
-
- if (not downgrade) or (
- downgrade
- and click.confirm(
- "Downgrading may lead to a broken site. Are you sure you want to do this?"
- )
- ):
+ global has_external_files
+ if not has_external_files:
+ downgrade = is_downgrade(sites_info[selected_site])
+ if (not downgrade) or (
+ downgrade
+ and click.confirm(
+ "Downgrading may lead to a broken site. Are you sure you want to do this?"
+ )
+ ):
+ return site_data
+ else:
return site_data
-
else:
print("Site {} does not exist. Try again ❌".format(selected_site))
else:
@@ -380,11 +299,11 @@ def select_team(session):
# ask if they want to select, go ahead with if only one exists
if len(available_teams) == 1:
- team = available_teams[0]
+ team = available_teams[0]["name"]
else:
render_teams_table(available_teams)
idx = click.prompt("Select Team", type=click.IntRange(1, len(available_teams))) - 1
- team = available_teams[idx]
+ team = available_teams[idx]["name"]
print("Team '{}' set for current session".format(team))
@@ -404,184 +323,83 @@ def is_valid_subdomain(subdomain):
@add_line_after
-def choose_plan(plans_list):
- print("{} plans available".format(len(plans_list)))
- available_plans = [plan["name"] for plan in plans_list]
- render_plan_table(plans_list)
-
- while True:
- input_plan = click.prompt("Select Plan").strip()
- if input_plan in available_plans:
- print("{} Plan selected ✅".format(input_plan))
- return input_plan
- else:
- print("Invalid Selection ❌")
-
-
-@add_line_after
-def check_app_compat(available_group):
- is_compat = True
- incompatible_apps, filtered_apps, branch_msgs = [], [], []
- existing_group = [
- (app["app_name"], app["branch"]) for app in get_installed_apps_info()
- ]
- print("Checking availability of existing app group")
-
- for (app, branch) in existing_group:
- info = [(a["app"], a["branch"]) for a in available_group["apps"] if a["app"] == app]
- if info:
- app_title, available_branch = info[0]
-
- if branch != available_branch:
- print("⚠️ App {}:{} => {}".format(app, branch, available_branch))
- branch_msgs.append([app, branch, available_branch])
- filtered_apps.append(app_title)
- is_compat = False
-
- else:
- print("✅ App {}:{}".format(app, branch))
- filtered_apps.append(app_title)
-
- else:
- incompatible_apps.append(app)
- print("❌ App {}:{}".format(app, branch))
- is_compat = False
-
- start_msg = "\nSelecting this group will "
- incompatible_apps = (
- ("\n\nDrop the following apps:\n" + "\n".join(incompatible_apps))
- if incompatible_apps
- else ""
- )
- branch_change = (
+def take_backup(local_site):
+ print(f"Taking backup for site {local_site}")
+ odb = frappe.utils.backups.new_backup(ignore_files=False, force=True)
+ return [
(
- "\n\nUpgrade the following apps:\n"
- + "\n".join(["{}: {} => {}".format(*x) for x in branch_msgs])
- )
- if branch_msgs
- else ""
- )
- changes = (incompatible_apps + branch_change) or "be perfect for you :)"
- warning_message = start_msg + changes
- print(warning_message)
-
- return is_compat, filtered_apps
-
-
-@add_line_after
-def filter_apps(versions):
- rendered_group_table = render_group_table(versions)
- while True:
- version_index = click.prompt("Select Version Number", type=int)
- try:
- if version_index < 1: # 0th row is title
- raise IndexError
- version, group = (
- rendered_group_table[version_index][1],
- rendered_group_table[version_index][2],
- )
- selected_version = find(versions, lambda v: v["name"] == version)
- selected_group = find(selected_version["groups"], lambda g: g["name"] == group)
- except IndexError:
- print("Invalid Selection ❌")
- continue
-
- is_compat, filtered_apps = check_app_compat(selected_group)
-
- if is_compat or click.confirm("Continue anyway?"):
- print("App Group {} selected! ✅".format(selected_group["name"]))
- break
-
- return selected_group["name"], filtered_apps
-
-
-@add_line_after
-def get_subdomain(domain):
- while True:
- subdomain = click.prompt("Enter subdomain").strip()
- if is_valid_subdomain(subdomain) and is_subdomain_available(subdomain):
- print("Site Domain: {}.{}".format(subdomain, domain))
- return subdomain
+ "config",
+ getattr(odb, "site_config_backup_path", None)
+ or getattr(odb, "backup_path_conf", None),
+ ),
+ ("database", odb.backup_path_db),
+ ("public", odb.backup_path_files),
+ ("private", odb.backup_path_private_files),
+ ]
@add_line_after
-def upload_backup(local_site):
- # take backup
+def upload_files(files):
files_uploaded = {}
- print("Taking backup for site {}".format(local_site))
- odb = frappe.utils.backups.new_backup(ignore_files=False, force=True)
-
- # upload files
- for x, (file_type, file_path) in enumerate(
- [
- (
- "config",
- getattr(odb, "site_config_backup_path", None)
- or getattr(odb, "backup_path_conf", None),
- ),
- ("database", odb.backup_path_db),
- ("public", odb.backup_path_files),
- ("private", odb.backup_path_private_files),
- ]
- ):
+ for file_type, file_path in files:
file_name = file_path.split(os.sep)[-1]
-
uploaded_file = upload_backup_file(file_type, file_name, file_path)
-
if uploaded_file:
files_uploaded[file_type] = uploaded_file
else:
- print("Upload failed for: {}".format(file_path))
+ print(f"Upload failed for: {file_path}")
print("Cannot create site on Frappe Cloud without all site backup files uploaded.")
- print("Exitting...")
+ print("Exiting...")
sys.exit(1)
-
print("Uploaded backup files! ✅")
-
return files_uploaded
-def new_site(local_site):
- # get new site options
- site_options = get_new_site_options()
-
- # set preferences from site options
- subdomain = get_subdomain(site_options["domain"])
- plan = choose_plan(site_options["plans"])
+def external_file_checker(file_path, file_type):
+ file_name = os.path.basename(file_path)
+ mime_type, _ = mimetypes.guess_type(file_path)
+ if file_type == "database":
+ if not file_name.endswith((".sql.gz", ".sql")) and not file_name.endswith(
+ tuple(f".sql ({i}).gz" for i in range(1, 10))
+ ):
+ raise ValueError(
+ 'Database backup file should end with the name "database.sql.gz" or "database.sql"'
+ )
+ if mime_type not in [
+ "application/x-gzip",
+ "application/x-sql",
+ "application/gzip",
+ "application/sql",
+ ]:
+ raise ValueError("Invalid database backup file")
- versions = site_options["versions"]
- selected_group, filtered_apps = filter_apps(versions)
- files_uploaded = upload_backup(local_site)
+ elif file_type in ["public", "private"]:
+ if mime_type != "application/x-tar":
+ raise ValueError(f"Invalid {file_type} files backup file")
- # push to frappe_cloud
- payload = json.dumps(
- {
- "site": {
- "apps": filtered_apps,
- "files": files_uploaded,
- "group": selected_group,
- "name": subdomain,
- "plan": plan,
- }
- }
- )
+ elif file_type == "config":
+ if mime_type != "application/json":
+ raise ValueError("Invalid config files backup file")
- session.headers.update({"Content-Type": "application/json; charset=utf-8"})
- site_creation_request = session.post(upload_url, payload)
- if site_creation_request.ok:
- site_url = site_creation_request.json()["message"]
- print("Your site {} is being migrated ✨".format(local_site))
- print(
- "View your site dashboard at https://{}/dashboard/sites/{}".format(
- remote_site, site_url
- )
- )
- print("Your site URL: https://{}".format(site_url))
+@add_line_after
+def upload_backup(local_site):
+ files_uploaded = {}
+ if has_external_files:
+ print("Trying to upload externally added files to S3")
+ files_to_upload = [
+ ("config", external_config_file_path),
+ ("database", external_db_path),
+ ("public", external_public_files_path),
+ ("private", external_private_files_path),
+ ]
else:
- handle_request_failure(site_creation_request)
+ files_to_upload = take_backup(local_site)
+ files_uploaded = upload_files(files_to_upload)
+ return files_uploaded
+@add_line_after
def restore_site(local_site):
# get list of existing sites they can restore
selected_site = select_site()["name"]
@@ -593,7 +411,11 @@ def restore_site(local_site):
)
# backup site
- files_uploaded = upload_backup(local_site)
+ try:
+ files_uploaded = upload_backup(local_site)
+ except Exception as e:
+ print(f"{e}")
+ sys.exit()
# push to frappe_cloud
payload = json.dumps({"name": selected_site, "files": files_uploaded})
@@ -641,11 +463,12 @@ def create_session():
)
-def frappecloud_migrator(local_site):
+def frappecloud_migrator(local_site, frappe_provider):
global login_url, upload_url, remote_link_url, register_remote_url, options_url, site_exists_url, site_info_url, restore_site_url, account_details_url, all_site_url, finish_multipart_url
- global session, migrator_actions, remote_site
+ global session, remote_site, site_plans_url
+ global has_external_files, external_db_path, external_public_files_path, external_private_files_path, external_config_file_path
- remote_site = frappe.conf.frappecloud_url or "frappecloud.com"
+ remote_site = frappe_provider or frappe.conf.frappecloud_url
scheme = "https"
login_url = "{}://{}/api/method/login".format(scheme, remote_site)
@@ -673,11 +496,9 @@ def frappecloud_migrator(local_site):
finish_multipart_url = "{}://{}/api/method/press.api.site.multipart_exit".format(
scheme, remote_site
)
-
- migrator_actions = [
- {"title": "Create a new site", "fn": new_site},
- {"title": "Restore to an existing site", "fn": restore_site},
- ]
+ site_plans_url = "{}://{}/api/method/press.api.site.get_site_plans".format(
+ scheme, remote_site
+ )
# get credentials + auth user + start session
try:
@@ -685,10 +506,7 @@ def frappecloud_migrator(local_site):
except RetryError:
raise KeyboardInterrupt
- # available actions defined in migrator_actions
- primary_action = select_primary_action()
-
- primary_action(local_site)
+ restore_site(local_site)
def cleanup(current_file):
@@ -703,25 +521,59 @@ def executed_from_temp_dir():
return cur_file.startswith(temp_dir)
-if __name__ in ("__main__", "frappe.integrations.frappe_providers.frappecloud"):
+@click.command()
+def main():
+ global has_external_files, external_db_path, external_public_files_path, external_private_files_path, external_config_file_path
+ local_site = ""
if executed_from_temp_dir():
current_file = os.path.abspath(__file__)
atexit.register(cleanup, current_file)
- try:
- local_site = sys.argv[1]
- except Exception:
- local_site = input("Name of the site you want to migrate: ").strip()
+ frappe_provider = click.prompt(
+ "Frappe provider (default: frappecloud.com)", default="frappecloud.com"
+ )
+
+ restore_choice = click.prompt(
+ "Do you want to restore from external files? (yes/no)", default="no"
+ )
+ if restore_choice.lower() in ["yes", "y"]:
+ has_external_files = True
+ try:
+ external_db_path = click.prompt("Enter full path to the external database file")
+ external_file_checker(external_db_path, "database")
+ external_public_files_path = click.prompt("Enter full path to the public files")
+ external_file_checker(external_public_files_path, "public")
+ external_private_files_path = click.prompt("Enter full path to the private files")
+ external_file_checker(external_private_files_path, "private")
+ external_config_file_path = click.prompt("Enter full path to the config file")
+ external_file_checker(external_config_file_path, "config")
+ except ValueError as e:
+ print(f"Error while file validation ': {str(e)}")
+ sys.exit()
+ else:
+ local_site = click.prompt("Name of the site you want to migrate")
+ has_external_files = False
+ external_db_path = None
+ external_public_files_path = None
+ external_private_files_path = None
+ external_config_file_path = None
try:
- frappe.init(site=local_site)
- frappe.connect()
- frappecloud_migrator(local_site)
+ if not has_external_files:
+ frappe.init(site=local_site)
+ frappe.connect()
+ frappecloud_migrator(local_site, frappe_provider)
+ else:
+ frappecloud_migrator(local_site=None, frappe_provider=frappe_provider)
except (KeyboardInterrupt, click.exceptions.Abort):
- print("\nExitting...")
+ print("\nExiting...")
except Exception:
from frappe.utils import get_traceback
print(get_traceback())
+ finally:
+ frappe.destroy()
+
- frappe.destroy()
+if __name__ == "__main__":
+ main()
diff --git a/press/scripts/migrate_2.py b/press/scripts/migrate_2.py
deleted file mode 100644
index 4f7d9f34e57..00000000000
--- a/press/scripts/migrate_2.py
+++ /dev/null
@@ -1,531 +0,0 @@
-# -*- coding: utf-8 -*-
-# imports - standard imports
-import atexit
-import json
-import os
-import re
-import shlex
-import subprocess
-import sys
-import tempfile
-
-# imports - module imports
-import frappe
-import frappe.utils.backups
-from frappe.utils import get_installed_apps_info, update_progress_bar
-from frappe.utils.backups import BackupGenerator
-from frappe.utils.commands import add_line_after, render_table
-
-# third party imports
-
-try:
- print("Setting Up requirements...")
- # imports - third party imports
- import click
- import html2text
- import requests
- from requests_toolbelt.multipart import encoder
- from tenacity import (
- RetryError,
- retry,
- retry_if_exception_type,
- stop_after_attempt,
- wait_fixed,
- )
-except ImportError:
- dependencies = [
- "tenacity",
- "html2text",
- "requests",
- "click",
- "semantic-version",
- "requests-toolbelt",
- ]
- install_command = shlex.split(
- "{} -m pip install {}".format(sys.executable, " ".join(dependencies))
- )
- subprocess.call(install_command, stdout=open(os.devnull, "w"))
- import click
- import html2text
- import requests
- from requests_toolbelt.multipart import encoder
- from tenacity import (
- RetryError,
- retry,
- retry_if_exception_type,
- stop_after_attempt,
- wait_fixed,
- )
-
-if sys.version[0] == "2":
- reload(sys) # noqa
- sys.setdefaultencoding("utf-8")
-
-
-@retry(stop=stop_after_attempt(5))
-def get_new_site_options():
- site_options_sc = session.post(options_url)
-
- if site_options_sc.ok:
- site_options = site_options_sc.json()["message"]
- return site_options
- else:
- print("Couldn't retrive New site information: {}".format(site_options_sc.status_code))
-
-
-@retry(stop=stop_after_attempt(5))
-def is_subdomain_available(subdomain):
- res = session.post(site_exists_url, {"subdomain": subdomain})
- if res.ok:
- available = not res.json()["message"]
- if not available:
- print("Subdomain already exists! Try another one")
-
- return available
-
-
-@retry(
- stop=stop_after_attempt(2) | retry_if_exception_type(SystemExit), wait=wait_fixed(5)
-)
-def upload_backup_file(file_type, file_name, file_path):
- def _update_progress_bar(monitor):
- update_progress_bar(
- "Uploading {} file".format(file_type), monitor.bytes_read, monitor.len
- )
-
- from math import ceil
-
- K = 1024
- M = K**2
-
- max_size = (
- 100 # in M: Max Size for multipart uploads - break down big files in `n` MB parts
- )
- file_size = os.path.getsize(file_path) / M
-
- total_size = ceil(file_size / 1024) # in G
- allowed_max_size = (
- 4 # in G: aws allows max 5G but we'll cap single requests at 4 instead
- )
-
- parts = 1
-
- if total_size > allowed_max_size:
- parts = ceil(file_size / max_size)
-
- # retreive upload link
- upload_ticket = session.get(remote_link_url, data={"file": file_name, "parts": parts})
-
- if not upload_ticket.ok:
- handle_request_failure(upload_ticket)
-
- payload = upload_ticket.json()["message"]
-
- key = ""
-
- if parts > 1:
-
- def get_file_data(path, part):
- value = part * max_size * M
- with open(path, "rb") as f:
- f.seek(value)
- return f.read(max_size * M)
-
- upload_id = payload["UploadId"]
- key = payload["Key"]
- signed_urls = payload["signed_urls"]
- file_parts = []
-
- for count in range(parts):
- signed_url = signed_urls[count]
- file_data = get_file_data(file_path, count)
- update_progress_bar("Uploading {} File".format(file_type), count, parts)
-
- res = requests.put(signed_url, data=file_data)
- etag = res.headers["ETag"]
- file_parts.append(
- {"ETag": etag, "PartNumber": count + 1}
- ) # you have to append etag and partnumber of each parts
-
- upload_remote = session.post(
- finish_multipart_url,
- data={
- "file": key,
- "id": upload_id,
- "action": "complete",
- "parts": json.dumps(file_parts),
- },
- )
- print()
- if not upload_remote.ok:
- # not needed. try the failed parts again!!!
- handle_request_failure(upload_remote)
-
- else:
- url = payload["url"]
- fields = payload["fields"]
- key = fields["key"]
-
- # upload remote file
- fields["file"] = (file_name, open(file_path, "rb"))
- multipart_payload = encoder.MultipartEncoder(fields=fields)
- multipart_payload = encoder.MultipartEncoderMonitor(
- multipart_payload, _update_progress_bar
- )
-
- upload_remote = session.post(
- url,
- data=multipart_payload,
- headers={
- "Accept": "application/json",
- "Content-Type": multipart_payload.content_type,
- },
- )
- print()
- if not upload_remote.ok:
- handle_request_failure(upload_remote)
-
- # register remote file to site
- register_press = session.post(
- register_remote_url,
- {
- "file": file_name,
- "path": key,
- "type": "application/x-gzip" if file_type == "database" else "application/x-tar",
- "size": os.path.getsize(file_path),
- },
- )
-
- if register_press.ok:
- return register_press.json()["message"]
-
- handle_request_failure(register_press)
-
-
-def render_actions_table():
- actions_table = [["#", "Action"]]
- actions = []
-
- for n, action in enumerate(migrator_actions):
- actions_table.append([n + 1, action["title"]])
- actions.append(action["fn"])
-
- render_table(actions_table)
- return actions
-
-
-def render_site_table(sites_info, version_info):
- sites_table = [["#", "Site Name", "Frappe", "Status"]]
- available_sites = {}
-
- for n, site_data in enumerate(sites_info):
- name, status = site_data["name"], site_data["status"]
- frappe = version_info[name]
- if status in ("Active", "Broken"):
- sites_table.append([n + 1, name, frappe, status])
- available_sites[name] = {
- "status": status,
- "frappe": frappe,
- "name": name,
- "branch": version_info,
- }
-
- render_table(sites_table)
- return available_sites
-
-
-def render_group_table(versions):
- # title row
- versions_table = [["#", "Version", "Bench", "Apps"]]
-
- # all rows
- idx = 0
- for version in versions:
- for group in version["groups"]:
- apps_list = ", ".join(
- ["{}:{}".format(app["app"], app["branch"]) for app in group["apps"]]
- )
- row = [idx + 1, version["name"], group["name"], apps_list]
- versions_table.append(row)
- idx += 1
-
- render_table(versions_table)
- return versions_table
-
-
-def handle_request_failure(request=None, message=None, traceback=True, exit_code=1):
- message = message or "Request failed with error code {}".format(request.status_code)
- response = html2text.html2text(request.text) if traceback else ""
-
- print("{0}{1}".format(message, "\n" + response))
- sys.exit(exit_code)
-
-
-def raise_limits_warning():
- raise_warn = False
- files = BackupGenerator(
- frappe.conf.db_name, frappe.conf.db_name, frappe.conf.db_password
- ).get_recent_backup(older_than=24 * 30)
-
- for file in files:
- if file:
- file_size_in_mb = os.path.getsize(file) / (1024 * 1024)
- if "database" in file and file_size_in_mb > 500:
- raise_warn = True
- return raise_warn
-
-
-def is_valid_subdomain(subdomain):
- if len(subdomain) < 5:
- print("Subdomain too short. Use 5 or more characters")
- return False
- matched = re.match("^[a-z0-9][a-z0-9-]*[a-z0-9]$", subdomain)
- if matched:
- return True
- print(
- "Subdomain contains invalid characters. Use lowercase characters, numbers and hyphens"
- )
-
-
-@add_line_after
-def check_app_compat(available_group):
- is_compat = True
- incompatible_apps, filtered_apps, branch_msgs = [], [], []
- existing_group = [
- (app["app_name"], app["branch"]) for app in get_installed_apps_info()
- ]
- print("Checking availability of existing app group")
-
- for (app, branch) in existing_group:
- info = [(a["app"], a["branch"]) for a in available_group["apps"] if a["app"] == app]
- if info:
- app_title, available_branch = info[0]
-
- if branch != available_branch:
- print("⚠️ App {}:{} => {}".format(app, branch, available_branch))
- branch_msgs.append([app, branch, available_branch])
- filtered_apps.append(app_title)
- is_compat = False
-
- else:
- print("✅ App {}:{}".format(app, branch))
- filtered_apps.append(app_title)
-
- else:
- incompatible_apps.append(app)
- print("❌ App {}:{}".format(app, branch))
- is_compat = False
-
- start_msg = "\nSelecting this group will "
- incompatible_apps = (
- ("\n\nDrop the following apps:\n" + "\n".join(incompatible_apps))
- if incompatible_apps
- else ""
- )
- branch_change = (
- (
- "\n\nUpgrade the following apps:\n"
- + "\n".join(["{}: {} => {}".format(*x) for x in branch_msgs])
- )
- if branch_msgs
- else ""
- )
- changes = (incompatible_apps + branch_change) or "be perfect for you :)"
- warning_message = start_msg + changes
- print(warning_message)
-
- return is_compat, filtered_apps
-
-
-@add_line_after
-def get_version() -> int:
- while True:
- version = click.prompt("Select Version Number", type=int)
- if version not in [12, 13]:
- print("Invalid Selection ❌")
- else:
- return version
-
-
-@add_line_after
-def get_subdomain(domain):
- while True:
- subdomain = click.prompt("Enter subdomain").strip()
- if is_valid_subdomain(subdomain) and is_subdomain_available(subdomain):
- print("Site Domain: {}.{}".format(subdomain, domain))
- return subdomain
-
-
-@add_line_after
-def upload_backup(local_site):
- # take backup
- files_uploaded = {}
- print("Taking backup for site {}".format(local_site))
- odb = frappe.utils.backups.new_backup(ignore_files=False, force=True)
-
- # upload files
- for x, (file_type, file_path) in enumerate(
- [
- (
- "config",
- getattr(odb, "site_config_backup_path", None)
- or getattr(odb, "backup_path_conf", None),
- ),
- ("database", odb.backup_path_db),
- ("public", odb.backup_path_files),
- ("private", odb.backup_path_private_files),
- ]
- ):
- file_name = file_path.split(os.sep)[-1]
-
- uploaded_file = upload_backup_file(file_type, file_name, file_path)
-
- if uploaded_file:
- files_uploaded[file_type] = uploaded_file
- else:
- print("Upload failed for: {}".format(file_path))
- print("Cannot create site on Frappe Cloud without all site backup files uploaded.")
- print("Exitting...")
- sys.exit(1)
-
- print("Uploaded backup files! ✅")
-
- return files_uploaded
-
-
-def new_site(local_site, frappe_version):
-
- files_uploaded = upload_backup(local_site)
-
- # push to frappe_cloud
- payload = json.dumps(
- {
- "site": {
- "files": files_uploaded,
- "version": int(frappe_version),
- "name": local_site,
- }
- }
- )
-
- session.headers.update({"Content-Type": "application/json; charset=utf-8"})
- site_creation_request = session.post(upload_url, payload)
-
- if site_creation_request.ok:
- site_url = site_creation_request.json()["message"]
- print("Your site {} is being migrated ✨".format(local_site))
- print(
- "View your site dashboard at https://{}/dashboard/sites/{}".format(
- remote_site, site_url
- )
- )
- print("Your site URL: https://{}".format(site_url))
- else:
- handle_request_failure(site_creation_request)
-
-
-@add_line_after
-def create_session(username, password):
- auth_credentials = {"usr": username, "pwd": password}
-
- session = requests.Session()
- login_sc = session.post(login_url, auth_credentials)
-
- if login_sc.ok:
- session.headers.update({"X-Press-Team": username, "Connection": "keep-alive"})
- return session
- else:
- handle_request_failure(
- message="Authorization Failed with Error Code {}".format(login_sc.status_code),
- traceback=False,
- )
-
-
-def frappecloud_migrator(local_site, username, password, frappe_version):
- global login_url, upload_url, remote_link_url, register_remote_url, options_url, site_exists_url, site_info_url, restore_site_url, account_details_url, all_site_url, finish_multipart_url
- global session, migrator_actions, remote_site
-
- remote_site = frappe.conf.frappecloud_url or "frappecloud.com"
- scheme = "https"
-
- login_url = "{}://{}/api/method/login".format(scheme, remote_site)
- upload_url = "{}://{}/api/method/press.api.site.new_central_site".format(
- scheme, remote_site
- )
- remote_link_url = "{}://{}/api/method/press.api.site.get_upload_link".format(
- scheme, remote_site
- )
- register_remote_url = "{}://{}/api/method/press.api.site.uploaded_backup_info".format(
- scheme, remote_site
- )
- options_url = "{}://{}/api/method/press.api.site.options_for_new".format(
- scheme, remote_site
- )
- site_exists_url = "{}://{}/api/method/press.api.site.exists".format(
- scheme, remote_site
- )
- site_info_url = "{}://{}/api/method/press.api.site.get".format(scheme, remote_site)
- account_details_url = "{}://{}/api/method/press.api.account.get".format(
- scheme, remote_site
- )
- all_site_url = "{}://{}/api/method/press.api.site.all".format(scheme, remote_site)
- restore_site_url = "{}://{}/api/method/press.api.site.restore".format(
- scheme, remote_site
- )
- finish_multipart_url = "{}://{}/api/method/press.api.site.multipart_exit".format(
- scheme, remote_site
- )
-
- # get credentials + auth user + start session
- try:
- session = create_session(username, password)
- except RetryError:
- raise KeyboardInterrupt
-
- new_site(local_site, frappe_version)
-
-
-def cleanup(current_file):
- print("Cleaning Up...")
- os.remove(current_file)
-
-
-def executed_from_temp_dir():
- """Return True if script executed from temp directory"""
- temp_dir = tempfile.gettempdir()
- cur_file = __file__
- return cur_file.startswith(temp_dir)
-
-
-@click.command()
-@click.option("-s", "--local_site", prompt="Site Name")
-@click.option("-u", "--username", prompt="Username")
-@click.option("-p", "--password", prompt="Password", hide_input=True)
-@click.option(
- "-f",
- "--frappe_version",
- type=click.Choice(["12", "13"]),
- prompt="Version",
- default="13",
- show_default=True,
-)
-def main(local_site, username, password, frappe_version):
- try:
- frappe.init(site=local_site)
- frappe.connect()
- frappecloud_migrator(local_site, username, password, frappe_version)
- except (KeyboardInterrupt, click.exceptions.Abort):
- print("\nExitting...")
- except Exception:
- from frappe.utils import get_traceback
-
- print(get_traceback())
-
- frappe.destroy()
-
-
-if __name__ in ("__main__", "frappe.integrations.frappe_providers.frappecloud"):
- if executed_from_temp_dir():
- current_file = os.path.abspath(__file__)
- atexit.register(cleanup, current_file)
- main()
diff --git a/press/scripts/mypy_line_count_check.sh b/press/scripts/mypy_line_count_check.sh
new file mode 100755
index 00000000000..e84e5cc1d8f
--- /dev/null
+++ b/press/scripts/mypy_line_count_check.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+line_count=$(dmypy run press | wc -l)
+
+MAX_ERRORS=813
+
+if [ "$line_count" -gt $MAX_ERRORS ]; then
+ echo "mypy shows $line_count errors, which exceeds the limit of $MAX_ERRORS."
+ dmypy run "$@"
+ exit 1
+fi
diff --git a/press/scripts/registry.py b/press/scripts/registry.py
index 73f954d6772..6eebdd3bac9 100644
--- a/press/scripts/registry.py
+++ b/press/scripts/registry.py
@@ -1,6 +1,6 @@
+import frappe
import grequests
import requests
-import frappe
from tqdm import tqdm
frappe.init(site="frappe.cloud")
diff --git a/press/scripts/weekend_support.py b/press/scripts/weekend_support.py
index 2aba165652d..d543e054437 100644
--- a/press/scripts/weekend_support.py
+++ b/press/scripts/weekend_support.py
@@ -1,14 +1,21 @@
-import frappe
import datetime
from datetime import timedelta
from itertools import cycle
+import frappe
+
agents = [
+ "jayanta@frappe.io",
+ "saurabh@erpnext.com",
+ "mangesh@frappe.io",
+ "bowrna@frappe.io",
"shadrak@erpnext.com",
+ "aradhya@frappe.io",
+ "sabu@frappe.io",
+ "tanmoy@frappe.io",
+ "aysha@frappe.io",
+ "ritwik.p@frappe.io",
"balamurali@erpnext.com",
- "aditya@erpnext.com",
- "athul@erpnext.com",
- "rutwik@frappe.io",
]
@@ -37,9 +44,6 @@ def next_weekdays(from_: datetime.date, till: datetime.date):
def main():
agent_cycle = cycle(agents)
- weekday_cycle = agents
- # weekday_cycle.remove("aditya@erpnext.com")
- weekday_cycle = cycle(weekday_cycle)
from_ = datetime.date.today()
till = datetime.date(2023, 7, 20)
@@ -66,26 +70,3 @@ def main():
"event_type": "Public",
}
).insert()
-
- for weekday in next_weekdays(from_, till):
- agent = next(weekday_cycle)
- contact = frappe.get_last_doc("Contact", {"email_id": agent})
- if frappe.db.exists(
- "Event",
- {
- "subject": ("like", "%Dedicated Support"),
- "starts_on": weekday,
- "ends_on": datetime.datetime.combine(weekday, datetime.time(23, 59)),
- },
- ):
- continue
- frappe.get_doc(
- {
- "doctype": "Event",
- "subject": f"{contact.first_name} on Dedicated Support",
- "starts_on": weekday,
- "ends_on": datetime.datetime.combine(weekday, datetime.time(23, 59)),
- "all_day": 1,
- "event_type": "Public",
- }
- ).insert()
diff --git a/press/security/fail2ban.py b/press/security/fail2ban.py
new file mode 100644
index 00000000000..d543e52406b
--- /dev/null
+++ b/press/security/fail2ban.py
@@ -0,0 +1,8 @@
+import frappe
+
+
+def ignore_ips() -> str:
+ """
+ Returns a space-separated string of IPs to be ignored by fail2ban.
+ """
+ return " ".join(frappe.get_all("Monitor Server", pluck="ip"))
diff --git a/press/signup_e2e.py b/press/signup_e2e.py
new file mode 100644
index 00000000000..569668db849
--- /dev/null
+++ b/press/signup_e2e.py
@@ -0,0 +1,153 @@
+"""Scheduled runner for the Playwright signup E2E test.
+
+Site Config Keys:
+ enable_signup_e2e: truthy (1/true/yes/on) to run during scheduled job.
+ signup_e2e_base_url: override BASE_URL (defaults to frappe.utils.get_url()).
+ signup_e2e_timeout_seconds: overall subprocess timeout (default 900).
+ signup_e2e_otp_helper_endpoint: forwarded to OTP_HELPER_ENDPOINT for the test.
+
+"""
+
+from __future__ import annotations
+
+import contextlib
+import os
+import random
+import signal
+import subprocess
+from pathlib import Path
+
+import frappe
+
+TRUTHY = {"1", "true", "yes", "on"}
+
+
+def _truthy(val: object) -> bool:
+ if val is None:
+ return False
+ if isinstance(val, bool):
+ return val
+ return str(val).lower() in TRUTHY
+
+
+def run_signup_e2e(): # noqa: C901
+ if not _truthy(frappe.conf.get("enable_signup_e2e")):
+ return
+
+ app_root = Path(__file__).resolve().parent.parent # .../apps/press
+ dashboard_dir = app_root / "dashboard"
+
+ if not dashboard_dir.exists():
+ frappe.log(f"signup_e2e: dashboard directory not found at {dashboard_dir}")
+ return
+
+ product_trials_list = frappe.db.get_all(
+ "Product Trial",
+ filters={"published": 1},
+ pluck="name",
+ order_by="name asc",
+ )
+ if not product_trials_list:
+ frappe.log("signup_e2e: no published product trials found; aborting run")
+ return
+
+ random.shuffle(product_trials_list)
+ product_trials_list = product_trials_list[:3]
+
+ base_url = frappe.conf.get("signup_e2e_base_url") or frappe.utils.get_url()
+ timeout = int(frappe.conf.get("signup_e2e_timeout_seconds") or 900)
+ otp_helper = frappe.conf.get("signup_e2e_otp_helper_endpoint")
+
+ env = os.environ.copy()
+ env["PRODUCT_TRIALS"] = ",".join(product_trials_list)
+ env["BASE_URL"] = base_url
+ if otp_helper:
+ env["OTP_HELPER_ENDPOINT"] = str(otp_helper)
+
+ cmd = ["npm", "run", "test:e2e", "--", "--project=cron"]
+ frappe.log(
+ f"signup_e2e: starting Playwright test (products={env.get('PRODUCT_TRIALS')} base_url={env.get('BASE_URL')} timeout={timeout}s)"
+ )
+
+ try:
+ proc = subprocess.Popen(
+ cmd,
+ cwd=str(dashboard_dir),
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ env=env,
+ text=True,
+ )
+ except FileNotFoundError:
+ frappe.log("signup_e2e: failed to spawn npm (is Node/npm installed in this environment?)")
+ return
+
+ output_lines = []
+ try:
+ try:
+ while True:
+ line = proc.stdout.readline() # type: ignore[attr-defined]
+ if not line and proc.poll() is not None:
+ break
+ if line:
+ output_lines.append(line.rstrip())
+ except Exception:
+ remaining, _ = proc.communicate(timeout=max(5, timeout // 3))
+ if remaining:
+ output_lines.append(remaining)
+ proc.wait(timeout=timeout)
+ except subprocess.TimeoutExpired:
+ frappe.log(f"signup_e2e: timeout after {timeout}s; terminating process")
+ with contextlib.suppress(Exception):
+ proc.send_signal(signal.SIGINT)
+ with contextlib.suppress(Exception):
+ proc.kill()
+ except Exception as e:
+ frappe.log(f"signup_e2e: unexpected error: {e}")
+
+ exit_code = proc.returncode if proc.returncode is not None else -1
+
+ MAX_LINES = 2000
+ if len(output_lines) > MAX_LINES:
+ trimmed = len(output_lines) - MAX_LINES
+ output_lines = output_lines[-MAX_LINES:]
+ output_lines.insert(0, f"[signup_e2e] (truncated {trimmed} earlier lines)")
+
+ frappe.log(
+ f"signup_e2e: completed with exit_code={exit_code} lines={len(output_lines)}\n"
+ + "\n".join(output_lines)
+ )
+
+ if exit_code != 0:
+ frappe.log_error(
+ title="Signup E2E failed",
+ message=f"Exit code: {exit_code}\nLast 50 lines:\n" + "\n".join(output_lines[-50:]),
+ )
+
+ clean_up()
+
+
+def clean_up():
+ signup_teams = frappe.db.get_all(
+ "Team", {"user": ("like", "%fc-signup-test+%"), "enabled": 1}, pluck="name"
+ )
+ if not signup_teams:
+ return
+
+ trial_sites = frappe.db.get_all(
+ "Site",
+ {"team": ("in", signup_teams), "status": "Active", "standby_for_product": ("is", "set")},
+ pluck="name",
+ )
+ for site in trial_sites:
+ try:
+ frappe.get_doc("Site", site).archive()
+ frappe.db.commit()
+ except Exception:
+ frappe.db.rollback()
+
+ frappe.db.set_value("Team", {"name": ("in", signup_teams)}, "enabled", 0)
+ frappe.db.commit()
+
+
+__all__ = ["run_signup_e2e"]
diff --git a/press/telegram_utils.py b/press/telegram_utils.py
index 9bac378fb95..a4ff4163fba 100644
--- a/press/telegram_utils.py
+++ b/press/telegram_utils.py
@@ -1,29 +1,34 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
import frappe
import telegram
+
from press.utils import log_error
class Telegram:
- def __init__(self, topic: str = None):
+ def __init__(self, topic: str | None = None, group: str | None = None):
settings = frappe.db.get_value(
"Press Settings",
None,
["telegram_bot_token", "telegram_alerts_chat_group"],
as_dict=True,
)
- self.token = settings.telegram_bot_token
- self.group = settings.telegram_alerts_chat_group
- self.chat_id = frappe.db.get_value("Telegram Group", self.group, "chat_id")
+ self.group = group or settings.telegram_alerts_chat_group
+ telegram_group = frappe.db.get_value("Telegram Group", self.group, ["token", "chat_id"])
+ token, chat_id = telegram_group if telegram_group else (None, None)
+ self.token = token or settings.telegram_bot_token
+ self.chat_id = chat_id
+ self.topic = topic
self.topic_id = frappe.db.get_value(
"Telegram Group Topic", {"parent": self.group, "topic": topic}, "topic_id"
)
- def send(self, message, html=False):
+ def send(self, message, html=False, reraise=False):
+ if not message:
+ return None
try:
text = message[: telegram.MAX_MESSAGE_LENGTH]
parse_mode = self._get_parse_mode(html)
@@ -34,6 +39,8 @@ def send(self, message, html=False):
message_thread_id=self.topic_id,
)
except Exception:
+ if reraise:
+ raise
log_error(
"Telegram Bot Error",
message=message,
@@ -79,7 +86,8 @@ def respond(self, message):
command = text.replace(mention, "")
response = self.process(command.strip())
- self.send(response)
+ if response:
+ self.send(response)
def process(self, command):
arguments = command.split(" ")
@@ -90,16 +98,19 @@ def process(self, command):
"ping": frappe.ping,
}
return commands.get(arguments[0], what)(*arguments[1:])
- elif len(arguments) == 4:
+ if len(arguments) == 4:
doctype, name, action, key = arguments
commands = {"get": get_value, "execute": execute}
return commands.get(action, what)(frappe.unscrub(doctype), name, key)
- elif len(arguments) == 5:
- doctype, name, action, key, value = arguments
+ if len(arguments) >= 5:
+ doctype, name, action, key, *values = arguments
commands = {
"set": set_value,
+ "execute": execute,
}
- return commands.get(action, what)(frappe.unscrub(doctype), name, key, value)
+ if action == "set" and len(values) == 1:
+ return commands.get(action, what)(frappe.unscrub(doctype), name, key, values[0])
+ return commands.get(action, what)(frappe.unscrub(doctype), name, key, *values)
return what()
@@ -123,7 +134,7 @@ def execute(doctype, name, method, *args):
# return "EXECUTE", doctype, name, method
try:
document = frappe.get_doc(doctype, name)
- return document.run_method(method)
+ return document.run_method(method, *args)
except Exception:
return f"```{frappe.get_traceback()}```"
@@ -141,6 +152,7 @@ def what(*args):
doctype name execute method
doctype name get field
doctype name set field value
+doctype name execute method argument1 argument2 ...
doctype = site|bench|server|proxy-server|database-server
@@ -150,4 +162,6 @@ def what(*args):
site docs.frappe.cloud get status```
```
bench docs.frappe.cloud set auto_scale_workers 0```
+```
+server f17.frappe.cloud execute increase_disk_size 25```
"""
diff --git a/press/templates/emails/2fa_recovery_codes_otp.html b/press/templates/emails/2fa_recovery_codes_otp.html
new file mode 100644
index 00000000000..7210eaec4e3
--- /dev/null
+++ b/press/templates/emails/2fa_recovery_codes_otp.html
@@ -0,0 +1,23 @@
+{% import "templates/emails/macros.html" as utils %}
+{% extends "templates/emails/saas.html" %}
+{% set title = title %}
+{% set image_path = image_path %}
+{% set read_pixel_path = read_pixel_path %}
+{% set otp = otp %}
+
+{% block content %}
+
+
+
+
Use below verification code to view your 2FA recovery codes in Frappe Cloud!
+
Verification Code
+
{{ otp }}
+ {{ utils.separator() }}
+
Team Frappe
+
+ {% if read_pixel_path %}
+
+ {% endif %}
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/press/templates/emails/2fa_recovery_codes_viewed.html b/press/templates/emails/2fa_recovery_codes_viewed.html
new file mode 100644
index 00000000000..c68c428ea68
--- /dev/null
+++ b/press/templates/emails/2fa_recovery_codes_viewed.html
@@ -0,0 +1,24 @@
+{% extends "templates/emails/base.html" %}
+
+{% block content %}
+
+
+
Security Alert
+
Hey there!
+
+ Your two-factor authentication recovery codes were viewed at {{ viewed_at }}.
+
+
+ If you did not view your recovery codes, please secure your account immediately:
+
+
+ Change your password
+ Generate new recovery codes
+ Review any recent account activity
+
+
You can access your security settings here:
+
Security Settings
+
If you recognize this activity, you can ignore this email.
+
+
+{% endblock %}
diff --git a/press/templates/emails/2fa_recovery_codes_yearly_reminder.html b/press/templates/emails/2fa_recovery_codes_yearly_reminder.html
new file mode 100644
index 00000000000..1e178114805
--- /dev/null
+++ b/press/templates/emails/2fa_recovery_codes_yearly_reminder.html
@@ -0,0 +1,14 @@
+{% import "templates/emails/macros.html" as utils %}
+{% extends "templates/emails/base.html" %}
+
+{% block content %}
+
+
+
Security Alert
+
Hey there!
+
It's been a year since you last reviewed your two-factor authentication recovery codes.
+
Please verify that you still have access to these codes. They are essential for account recovery if you lose access to your authentication device.
+ {{ utils.button('View Your Recovery Codes ', link, true) }}
+
+
+{% endblock %}
diff --git a/press/templates/emails/access_request.html b/press/templates/emails/access_request.html
new file mode 100644
index 00000000000..40eaf7af96f
--- /dev/null
+++ b/press/templates/emails/access_request.html
@@ -0,0 +1,23 @@
+{% import "templates/emails/macros.html" as utils %}
+
+{% extends "templates/emails/base.html" %}
+{% block content %}
+
+
+
Frappe has requested access to your resources.
+
Reason: {{ reason }}
+
+
Resources:
+ {% for resource in resources %}
+
+ {{ resource.document_type }}:
+ {{ resource.document_name }}
+
+ {% endfor %}
+
+ {{ utils.button('View Access Requests', 'dashboard/access-requests') }}
+ {{ utils.separator() }}
+
Team Frappe
+
+
+{% endblock %}
diff --git a/press/templates/emails/access_request_update.html b/press/templates/emails/access_request_update.html
new file mode 100644
index 00000000000..495d0a92c07
--- /dev/null
+++ b/press/templates/emails/access_request_update.html
@@ -0,0 +1,22 @@
+{% import "templates/emails/macros.html" as utils %}
+
+{% extends "templates/emails/base.html" %}
+{% block content %}
+
+
+
Your request for support access has been {{ status.lower() }}.
+
+
Resources:
+ {% for resource in resources %}
+
+ {{ resource.document_type }}:
+ {{ resource.document_name }}
+
+ {% endfor %}
+
+ {{ utils.button('View Access Requests', 'dashboard/access-requests') }}
+ {{ utils.separator() }}
+
Team Frappe
+
+
+{% endblock %}
diff --git a/press/templates/emails/auto_scale_notification.html b/press/templates/emails/auto_scale_notification.html
new file mode 100644
index 00000000000..3bb3c0a0776
--- /dev/null
+++ b/press/templates/emails/auto_scale_notification.html
@@ -0,0 +1,23 @@
+{% extends "templates/emails/base.html" %}
+{% import "templates/emails/macros.html" as utils %}
+
+{% block content %}
+
+
+
+
Hi,
+
+ {{ message }}
+
+
+ If you have any concerns or questions, please do not hesitate to reach out to our support team at
+ {{ utils.link('support.frappe.io', 'https://support.frappe.io')}}
+
+ {{ utils.button('View Autoscale Job', link, true) }}
+ {{ utils.separator() }}
+ {{ utils.signature() }}
+
+
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/press/templates/emails/bench_deploy_failure.html b/press/templates/emails/bench_deploy_failure.html
new file mode 100644
index 00000000000..9557d3ac0eb
--- /dev/null
+++ b/press/templates/emails/bench_deploy_failure.html
@@ -0,0 +1,23 @@
+{% extends "templates/emails/base.html" %}
+{% import "templates/emails/macros.html" as utils %}
+
+{% block content %}
+
+
+
+
Hi,
+
+ {{ message }}
+
+
+ If you have any concerns or questions, please do not hesitate to reach out to our support team at
+ {{ utils.link('support.frappe.io', 'https://support.frappe.io')}}
+
+ {{ utils.button('View Deploy Job', link, true) }}
+ {{ utils.separator() }}
+ {{ utils.signature() }}
+
+
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/press/templates/emails/budget_alert.html b/press/templates/emails/budget_alert.html
new file mode 100644
index 00000000000..6f14badbc0a
--- /dev/null
+++ b/press/templates/emails/budget_alert.html
@@ -0,0 +1,61 @@
+{% extends "templates/emails/base.html" %}
+{% import "templates/emails/macros.html" as utils %}
+{% block content %}
+
+
+
+
Hello,
+
+
+ Your Frappe Cloud cost for team {{ team_user }} has exceeded
+ the monthly budget alert threshold.
+
+
+
+ Budget Alert Details:
+
+
+
+
+ Monthly Alert Threshold:
+
+
+ {{ alert_threshold }}
+
+
+
+
+ Month-To-Date Invoice Amount:
+
+
+ {{ invoice_amount }}
+
+
+
+
+ Amount Over Budget:
+
+
+ {{ excess_amount }}
+
+
+
+
+ Invoice Period:
+
+
+ {{ frappe.format_date(period_start) }} to {{ frappe.format_date(period_end) }}
+
+
+
+
+
+ To manage your budget alerts, please visit your billing settings in the Frappe Cloud dashboard.
+
+
+ {{ utils.separator() }}
+ {{ utils.signature() }}
+
+
+
+{% endblock %}
diff --git a/press/templates/emails/disabled_auto_disk_expansion.html b/press/templates/emails/disabled_auto_disk_expansion.html
new file mode 100644
index 00000000000..31ae2b9ba7d
--- /dev/null
+++ b/press/templates/emails/disabled_auto_disk_expansion.html
@@ -0,0 +1,31 @@
+{% import "templates/emails/macros.html" as utils %} {% extends
+"templates/emails/base.html" %} {% block content %}
+
+
+
+
+ Server {{ server }} has used more than {{ used_storage_percentage }} of storage
+ available.
+
+
+ Your server has currently used {{ current_disk_usage }} of the available
+ {{ available_disk_space }}
+
+ To ensure that the server is not overloaded we recommend increasing the
+ storage by {{ increase_by }}
+
+
+ To enable auto disk expansion please refer the following
+ documentation
+
+ Need Help ?
+ Please raise a ticket on support.frappe.io for queires.
+
+
+ {{ utils.separator() }} {{ utils.signature() }}
+
+
+
+{% endblock %}
diff --git a/press/templates/emails/disabled_site_monitoring.html b/press/templates/emails/disabled_site_monitoring.html
new file mode 100644
index 00000000000..64618bd16bf
--- /dev/null
+++ b/press/templates/emails/disabled_site_monitoring.html
@@ -0,0 +1,27 @@
+{% extends "templates/emails/base.html" %} {% block content %}
+
+
+
+ Hey there,
+
+ Monitoring & alerting mechanism has been disabled for your site
+ {{site}} .
+ Reason for disabling monitoring:
+
+
{{reason}}
+
+ Please re-enable monitoring from dashboard to receive alerts.
+ Check the documentation
+ here for the guide. If you need any assistance, please raise a ticket on
+ frappecloud.com/support .
+
+
+
+
+{% endblock %}
diff --git a/press/templates/emails/drip_email.html b/press/templates/emails/drip_email.html
deleted file mode 100644
index 6683cbcd34d..00000000000
--- a/press/templates/emails/drip_email.html
+++ /dev/null
@@ -1,12 +0,0 @@
-{% extends "templates/emails/base.html" %}
-
- {% block content %}
-
-
-
- {% endblock %}
-
diff --git a/press/templates/emails/enabled_auto_disk_expansion.html b/press/templates/emails/enabled_auto_disk_expansion.html
new file mode 100644
index 00000000000..8ff98b125e0
--- /dev/null
+++ b/press/templates/emails/enabled_auto_disk_expansion.html
@@ -0,0 +1,31 @@
+{% import "templates/emails/macros.html" as utils %} {% extends
+"templates/emails/base.html" %} {% block content %}
+
+
+
+
+ Server {{ server }} has used more than 90% of storage
+ available.
+
+
+ Your server has currently used {{ current_disk_usage }} of the available
+ {{ available_disk_space }}
+
+ To ensure that the server is not overloaded we are auto increasing the
+ storage by {{ increase_by }}
+
+
+ To disable auto disk expansion please refer the following
+ documentation
+
+ Need Help ?
+ Please raise a ticket on support.frappe.io for queires.
+
+
+ {{ utils.separator() }} {{ utils.signature() }}
+
+
+
+{% endblock %}
diff --git a/press/templates/emails/incident.html b/press/templates/emails/incident.html
new file mode 100644
index 00000000000..4d2e82b5773
--- /dev/null
+++ b/press/templates/emails/incident.html
@@ -0,0 +1,17 @@
+{% extends "templates/emails/base.html" %} {% import
+"templates/emails/macros.html" as utils %} {% block content %}
+
+
+
+
Hi,
+
{{ message }}
+
+ For any concerns or questions, please reach out to our support team at {{
+ utils.link('support.frappe.io', 'https://support.frappe.io')}}
+
+ {{ utils.button('View Server Analytics', link, true) }} {{ utils.separator()
+ }} {{ utils.signature() }}
+
+
+
+{% endblock %}
diff --git a/press/templates/emails/insufficient_balance.html b/press/templates/emails/insufficient_balance.html
new file mode 100644
index 00000000000..26a94d1a81a
--- /dev/null
+++ b/press/templates/emails/insufficient_balance.html
@@ -0,0 +1,20 @@
+{% import "templates/emails/macros.html" as utils %}
+{% extends "templates/emails/base.html" %}
+
+{% block content %}
+
+
+
+
+ Insufficient Balance for upcoming Frappe Cloud payment
+
+
+ Greetings from Frappe! We noticed that your Frappe Cloud account does not have sufficient credits to pay for the upcoming invoice.
+ Please consider adding credits to your account or update the payment mode to Card and add a card with sufficient balance from {{ utils.link('Billing Settings', '/dashboard/billing') }} in your Frappe Cloud dashboard.
+
+ {{ utils.separator() }}
+ {{ utils.signature() }}
+
+
+
+{% endblock %}
diff --git a/press/templates/emails/invite_team_member.html b/press/templates/emails/invite_team_member.html
index fb0a2cbc637..700e27484f6 100644
--- a/press/templates/emails/invite_team_member.html
+++ b/press/templates/emails/invite_team_member.html
@@ -1,12 +1,13 @@
{% extends "templates/emails/base.html" %}
+{% set invited_by = invited_by %}
{% block content %}
Hello,
- You have been invited to join Frappe Cloud.
+ You have been invited to join {{ invited_by }} team on Frappe Cloud.
Please confirm your email address by clicking the button below:
@@ -14,7 +15,7 @@
-
+
Verify Account
diff --git a/press/templates/emails/login_otp.html b/press/templates/emails/login_otp.html
new file mode 100644
index 00000000000..aaabd1234ad
--- /dev/null
+++ b/press/templates/emails/login_otp.html
@@ -0,0 +1,23 @@
+{% import "templates/emails/macros.html" as utils %}
+{% extends "templates/emails/saas.html" %}
+{% set title = title %}
+{% set image_path = image_path %}
+{% set read_pixel_path = read_pixel_path %}
+{% set otp = otp %}
+
+{% block content %}
+
+
+
+
Use below verification code to log in to Frappe Cloud!
+
Verification Code
+
{{ otp }}
+ {{ utils.separator() }}
+
Team Frappe
+
+ {% if read_pixel_path %}
+
+ {% endif %}
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/press/templates/emails/marketplace_app_visibility.html b/press/templates/emails/marketplace_app_visibility.html
new file mode 100644
index 00000000000..c1ef25600e6
--- /dev/null
+++ b/press/templates/emails/marketplace_app_visibility.html
@@ -0,0 +1,35 @@
+{% extends "templates/emails/base.html" %}
+
+{% block content %}
+
+
+
Hi {{ developer_name }},
+
+
I hope this email finds you well.
+
+
I'm writing to inform you about an important policy update regarding Frappe Cloud Marketplace Apps. To
+ ensure continued availability on the Marketplace, all apps are now required to be fully open-source, meaning
+ the GitHub repository must be made public.
+
+
To comply with this new policy, please make your app's repository public by end of the month i.e. 31st March 2025. After this
+ period, non-compliant apps will be disabled or removed from the Frappe Cloud Marketplace.
+
+
Here are the details for your app:
+
+
+ App Name: {{ app_name }}
+ Repository URL: {{ repository_url }}
+
+
+
We understand that this change might require some adjustments, and we appreciate your prompt attention to
+ this
+ matter. If you have any questions or anticipate any challenges in making your repository public, please
+ raise a support ticket at support.frappe.io. We're here to help and ensure a smooth transition.
+
+
Thank you for your understanding and cooperation.
+
+
Regards,
+ Team Frappe Cloud
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/press/templates/emails/partner_approval.html b/press/templates/emails/partner_approval.html
new file mode 100644
index 00000000000..4d7bd801745
--- /dev/null
+++ b/press/templates/emails/partner_approval.html
@@ -0,0 +1,20 @@
+{% import "templates/emails/macros.html" as utils %}
+{% extends "templates/emails/base.html" %}
+
+{% block content %}
+
+
+
+
Hello Partner Manager,
+
User {{ user }} has raised a request to link their team to Partner {{ partner }}.
+
+ Confirm your consent by clicking on the following button.
+
+ {{ utils.button('Accept', link, true) }}
+ {{ utils.separator() }}
+ {{ utils.signature() }}
+
+
+
+{% endblock %}
+
diff --git a/press/templates/emails/partner_link_certificate.html b/press/templates/emails/partner_link_certificate.html
new file mode 100644
index 00000000000..764f408c2e4
--- /dev/null
+++ b/press/templates/emails/partner_link_certificate.html
@@ -0,0 +1,19 @@
+{% import "templates/emails/macros.html" as utils %}
+{% extends "templates/emails/base.html" %}
+
+{% block content %}
+
+
+
+
Hello,
+
Partner {{ partner }} has raised a request to link your certificate with their Frappe Cloud team account.
+
+ Confirm your consent by clicking on the following button.
+
+ {{ utils.button('Approve', link, true) }}
+ {{ utils.separator() }}
+ {{ utils.signature() }}
+
+
+
+{% endblock %}
diff --git a/press/templates/emails/press_webhook_disabled.html b/press/templates/emails/press_webhook_disabled.html
new file mode 100644
index 00000000000..7f25b50cefd
--- /dev/null
+++ b/press/templates/emails/press_webhook_disabled.html
@@ -0,0 +1,18 @@
+{% import "templates/emails/macros.html" as utils %}
+{% extends "templates/emails/base.html" %}
+
+{% block content %}
+
+
+
+
+ Due to a high number of failed webhook deliveries in the past hour, we have disabled one of your configured webhooks.
+
+
Suspended webhook endpoint : {{ endpoint }}
+
Please visit the Frappe Cloud Dashboard to review the webhook attempts and identify the issue. Once resolved, you can reactivate it directly from the dashboard.
+ {{ utils.separator() }}
+ {{ utils.signature() }}
+
+
+
+{% endblock %}
diff --git a/press/templates/emails/product_trial_email.html b/press/templates/emails/product_trial_email.html
new file mode 100644
index 00000000000..3eda989b68f
--- /dev/null
+++ b/press/templates/emails/product_trial_email.html
@@ -0,0 +1,40 @@
+
+
+
+
+
+
+
+
+ {% if logo %}
+
+ {% endif %}
+
+ {% if title %}
+ {{ title }}
+ {% else %}
+ Frappe Cloud
+ {% endif %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/press/templates/emails/product_trial_verify_account.html b/press/templates/emails/product_trial_verify_account.html
new file mode 100644
index 00000000000..614fd905bde
--- /dev/null
+++ b/press/templates/emails/product_trial_verify_account.html
@@ -0,0 +1,28 @@
+{% import "templates/emails/macros.html" as utils %}
+{% extends "templates/emails/saas_trial.html" %}
+{% set title = title %}
+{% set image_path = image_path %}
+{% set read_pixel_path = read_pixel_path %}
+{% set otp = otp %}
+{% set header_content = header_content %}
+
+{% block content %}
+
+
+
+ {% if header_content %}
+ {% autoescape false %}
+ {{ header_content }}
+ {% endautoescape %}
+ {% endif %}
+
Verification Code
+
{{ otp }}
+ {{ utils.separator() }}
+
Team Frappe
+
+ {% if read_pixel_path %}
+
+ {% endif %}
+
+
+{% endblock %}
diff --git a/press/templates/emails/reset_password.html b/press/templates/emails/reset_password.html
index 98f58c4a7cb..014bb1cc601 100644
--- a/press/templates/emails/reset_password.html
+++ b/press/templates/emails/reset_password.html
@@ -1,38 +1,21 @@
-{% extends "templates/emails/base.html" %}
+{% import "templates/emails/macros.html" as utils %}
+{% extends "templates/emails/saas.html" %}
+{% set title = title %}
+{% set image_path = image_path %}
+{% set read_pixel_path = read_pixel_path %}
{% block content %}
-
-
-
- Please click on the button below to reset your password. If this was not you, please ignore this email.
-
-
-
-
- Thanks,
- The Frappe Team
-
+
+
+
Please click on the button below to reset your password. If this was not you, please ignore this email.
+ {{ utils.button('Reset Password', link, true) }}
+ {{ utils.separator() }}
+
Team Frappe
+ {% if read_pixel_path %}
+
+ {% endif %}
{% endblock %}
diff --git a/press/templates/emails/saas.html b/press/templates/emails/saas.html
index 107a1517615..513be595f70 100644
--- a/press/templates/emails/saas.html
+++ b/press/templates/emails/saas.html
@@ -4,10 +4,10 @@
-
+
{% if image_path %}
-
+
{% endif %}
@@ -16,8 +16,8 @@
{% block content %}{% endblock %}
-
-
+
+ This email was sent by Frappe Cloud
diff --git a/press/templates/emails/saas_trial.html b/press/templates/emails/saas_trial.html
new file mode 100644
index 00000000000..a89ff220a34
--- /dev/null
+++ b/press/templates/emails/saas_trial.html
@@ -0,0 +1,27 @@
+
+
+
+
+
+
+
+
+ {% if logo_name %}
+
+ {% endif %}
+
+
+
+
+ {% block content %}{% endblock %}
+
+
+
+ This email was sent by Frappe Cloud
+
+
+
+
+
+
+
diff --git a/press/templates/emails/saas_verify_account.html b/press/templates/emails/saas_verify_account.html
index 180af1f3da2..1aa07b77a06 100644
--- a/press/templates/emails/saas_verify_account.html
+++ b/press/templates/emails/saas_verify_account.html
@@ -6,14 +6,16 @@
-
- {{ message|safe }}
-
-
+
You're almost done!
+
Just one quick step left to get you started with Frappe products!
+ Please confirm your email address to complete your account setup.
{{ utils.button('Verify Account', link, true) }}
{{ utils.separator() }}
- {{ signature_text }}
+
Team Frappe
+ {% if read_pixel_path %}
+
+ {% endif %}
{% endblock %}
diff --git a/press/templates/emails/site_exceeded_disk_usage_warning.html b/press/templates/emails/site_exceeded_disk_usage_warning.html
new file mode 100644
index 00000000000..47a5014354b
--- /dev/null
+++ b/press/templates/emails/site_exceeded_disk_usage_warning.html
@@ -0,0 +1,42 @@
+{% import "templates/emails/macros.html" as utils %}
+{% extends "templates/emails/base.html" %}
+
+{% block content %}
+
+
+
+
+ Site {{ site }} exceeded plan limit
+
+
+ Your site has exceeded
+
+ {% if current_disk_usage > 100 %}
+ - disk usage limit by {{ current_disk_usage - 100 }}%
+ {% endif %}
+ {% if current_database_usage > 100 %}
+ - database usage limit by {{ current_database_usage - 100 }}%
+ {% endif %}
+
+ To continue using your Frappe Cloud site, please upgrade the plan of your site
+
+ Need Help ?
+ 1. How to change the plan ? Please refer the documentation
+ 2. If you want to know, which database table is consuming more space, you can check it using Database Analyzer
+ 4. If you need some guide on which table can be truncated to reduce database storage, please check documentation
+ 3. If you have any doubts or need help, please contact support at Frappe Cloud Support
+
+ If there is no action taken, your site will be suspended in {{ no_of_days_left_to_suspend }} days.
+
+
+ {{ utils.separator() }}
+ {{ utils.signature() }}
+
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/press/templates/emails/site_suspend_due_to_exceeding_disk_usage.html b/press/templates/emails/site_suspend_due_to_exceeding_disk_usage.html
new file mode 100644
index 00000000000..1036cac5898
--- /dev/null
+++ b/press/templates/emails/site_suspend_due_to_exceeding_disk_usage.html
@@ -0,0 +1,26 @@
+{% import "templates/emails/macros.html" as utils %}
+{% extends "templates/emails/base.html" %}
+
+{% block content %}
+
+
+
+
+ {{ subject }}
+
+
+ Unfortunately, we must bring to your attention that your site hosted on Frappe Cloud has been temporarily suspended due to exceeding disk / database usage for consecutive 7 days.
+
+ To continue using your Frappe Cloud site, please upgrade the plan of your site
+
+ Once you upgrade to a new plan, your site will be activated immediately.
+
+ If there is no action taken, your site will be archived after 3 weeks.
+
+
+ {{ utils.separator() }}
+ {{ utils.signature() }}
+
+
+
+{% endblock %}
diff --git a/press/templates/emails/snapshot_recovery_completion.html b/press/templates/emails/snapshot_recovery_completion.html
new file mode 100644
index 00000000000..ddebed3fb0e
--- /dev/null
+++ b/press/templates/emails/snapshot_recovery_completion.html
@@ -0,0 +1,20 @@
+{% import "templates/emails/macros.html" as utils %} {% extends
+"templates/emails/base.html" %} {% block content %}
+
+
+
+
+ Snapshot ({{ snapshot }}) recovery has been completed
+
+
+ You can visit Frappe Cloud dashboard and download the site backups.
+
+ Need Help ?
+ Please raise a ticket on support.frappe.io for queries.
+
+
+ {{ utils.separator() }} {{ utils.signature() }}
+
+
+
+{% endblock %}
diff --git a/press/templates/emails/suspended_sites.html b/press/templates/emails/suspended_sites.html
new file mode 100644
index 00000000000..a671c5e8a42
--- /dev/null
+++ b/press/templates/emails/suspended_sites.html
@@ -0,0 +1,39 @@
+{% import "templates/emails/macros.html" as utils %}
+{% extends "templates/emails/base.html" %}
+
+{% block content %}
+
+
+
+
+ {{ subject }}
+
+
+ Unfortunately, we must bring to your attention that your site hosted on Frappe Cloud has been temporarily suspended due to an unpaid invoice.
+
+ To continue using your Frappe Cloud site, please settle your invoices by going to your Frappe Cloud Dashboard.
+
+ Once your invoices are settled, your site will be activated immediately.
+
+ Please complete the above steps before 20th of this month , failing which your site will be dropped and if you have subscribed to $10 plan you might lose your data permanently.
+
+ We're sad to see you discontinue usage of Frappe Cloud and value your feedback. Please fill this form or block 15-min with our team to share your experience.
+ If you have any concerns or questions, contact our support team at support.frappe.io.
+
+
+ {%- if sites -%}
+ {{ utils.separator() }}
+
Following sites were suspended:
+
+ {%- for site in sites -%}
+ {{ site }}
+ {%- endfor -%}
+
+ {%- endif -%}
+
+ {{ utils.separator() }}
+ {{ utils.signature() }}
+
+
+
+{% endblock %}
diff --git a/press/templates/emails/transfer_team_confirmation.html b/press/templates/emails/transfer_team_confirmation.html
new file mode 100644
index 00000000000..98a996fc93a
--- /dev/null
+++ b/press/templates/emails/transfer_team_confirmation.html
@@ -0,0 +1,34 @@
+{% extends "templates/emails/base.html" %}
+{% import "templates/emails/macros.html" as utils %}
+
+{% block content %}
+
+
+
+
+ {{ type | capitalize }} Transfer Confirmation
+
+
+ The team, {{ old_team }} have requested to transfer the {{ type }} {{ name }} to your team
+ {{new_team}}. Click on the button below to give consent for this transfer. Please note that on giving
+ consent, the following changes will happen:
+
+
+ 1. The {{ type }} {{ name }} under {{ old_team }}, will be transferred to your team
+
+ {% if type == 'site' %}
+ 2. You will be billed for this site under the team {{ new_team }}
+
+ {% endif %}
+
+ If you are not aware of this request, ignore the confirmation email.
+
+
+
+ {{ utils.button('Accept Transfer', transfer_url, true) }}
+ {{ utils.separator() }}
+ {{ utils.signature() }}
+
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/press/templates/emails/verification_code_for_login.html b/press/templates/emails/verification_code_for_login.html
new file mode 100644
index 00000000000..eb8f1059b15
--- /dev/null
+++ b/press/templates/emails/verification_code_for_login.html
@@ -0,0 +1,21 @@
+{% import "templates/emails/macros.html" as utils %}
+{% extends "templates/emails/saas.html" %}
+{% set title = title %}
+{% set image_path = image_path %}
+{% set full_name = full_name %}
+{% set otp = otp %}
+
+{% block content %}
+
+
+
+
Hi {{ full_name }}
+
You have requested a verification code to login to your Frappe Cloud account. The code is valid for 10 minutes.
+
{{ otp }}
+
If you haven't requested for this, please ignore this email.
+ {{ utils.separator() }}
+
Team Frappe
+
+
+
+{% endblock %}
diff --git a/press/templates/emails/verify_account.html b/press/templates/emails/verify_account.html
index 3262495834c..fc7187f10e3 100644
--- a/press/templates/emails/verify_account.html
+++ b/press/templates/emails/verify_account.html
@@ -3,23 +3,28 @@
{% set title = title %}
{% set image_path = image_path %}
{% set read_pixel_path = read_pixel_path %}
+{% set otp = otp %}
{% block content %}
-
Hello,
-
- Thanks for signing up on {{ title }}!
-
-
- Please confirm your email address by clicking the button below:
-
+
You're almost done!
+
Just one quick step left to get you started with Frappe Cloud!
+ {% if otp %}
+
Verification Code
+
{{ otp }}
+
Or click on the button to verify your account
+ {% else %}
+
Click on the button to verify your account
+ {% endif %}
{{ utils.button('Verify Account', link, true) }}
{{ utils.separator() }}
- {{ utils.signature() }}
+
Team Frappe
-
+ {% if read_pixel_path %}
+
+ {% endif %}
{% endblock %}
diff --git a/press/templates/emails/verify_account_for_site_login.html b/press/templates/emails/verify_account_for_site_login.html
new file mode 100644
index 00000000000..67ce2343b9a
--- /dev/null
+++ b/press/templates/emails/verify_account_for_site_login.html
@@ -0,0 +1,20 @@
+{% import "templates/emails/macros.html" as utils %}
+{% extends "templates/emails/saas.html" %}
+{% set image_path = image_path %}
+{% set otp = otp %}
+
+{% block content %}
+
+
+
+
Hi,
+
You have requested a verification code to log in to your site(s) on Frappe Cloud.
+
Verification Code
+
{{ otp }}
+
If you haven't requested for this, please ignore this email.
+ {{ utils.separator() }}
+
Team Frappe
+
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/press/templates/erpnext_signup_layout.html b/press/templates/erpnext_signup_layout.html
deleted file mode 100644
index c355eef2b24..00000000000
--- a/press/templates/erpnext_signup_layout.html
+++ /dev/null
@@ -1,73 +0,0 @@
-{%- extends "templates/base.html" -%}
-
-{%- block navbar -%}
-
-
-
-
-
-
-
-
-{%- endblock -%}
-
-{%- block footer -%}
-{%- endblock -%}
-
-{%- block content -%}
-{%- endblock -%}
-
-{%- block script -%}
-{{ super() }}
-{%- endblock -%}
-
-{%- block style -%}
-
-{%- endblock -%}
diff --git a/press/templates/erpnextsmb_signup_layout.html b/press/templates/erpnextsmb_signup_layout.html
deleted file mode 100644
index 9e53b9d7455..00000000000
--- a/press/templates/erpnextsmb_signup_layout.html
+++ /dev/null
@@ -1,69 +0,0 @@
-{%- extends "templates/base.html" -%}
-
-{%- block navbar -%}
-
-
-
-
-
-
-
-
-{%- endblock -%}
-
-{%- block footer -%}
-{%- endblock -%}
-
-{%- block content -%}
-{%- endblock -%}
-
-{%- block script -%}
-{{ super() }}
-{%- endblock -%}
-
-{%- block style -%}
-
-{%- endblock -%}
diff --git a/press/templates/frappe_signup_layout.html b/press/templates/frappe_signup_layout.html
deleted file mode 100644
index 68caafd8bf3..00000000000
--- a/press/templates/frappe_signup_layout.html
+++ /dev/null
@@ -1,69 +0,0 @@
-{%- extends "templates/base.html" -%}
-
-{%- block navbar -%}
-
-
-
-
-
-
-
-
-{%- endblock -%}
-
-{%- block footer -%}
-{%- endblock -%}
-
-{%- block content -%}
-{%- endblock -%}
-
-{%- block script -%}
-{{ super() }}
-{%- endblock -%}
-
-{%- block style -%}
-
-{%- endblock -%}
diff --git a/press/templates/frappedesk_layout.html b/press/templates/frappedesk_layout.html
deleted file mode 100644
index 7fb5c9314b2..00000000000
--- a/press/templates/frappedesk_layout.html
+++ /dev/null
@@ -1,64 +0,0 @@
-{%- extends "templates/base.html" -%}
-
-{%- block navbar -%}
-
-
-
-
-
-
-
-{%- endblock -%}
-
-{%- block footer -%}
-{%- endblock -%}
-
-{%- block content -%}
-{%- endblock -%}
-
-{%- block script -%}
-{{ super() }}
-{%- endblock -%}
-
-{%- block style -%}
-
-{%- endblock -%}
-
diff --git a/press/templates/healthcare_signup_layout.html b/press/templates/healthcare_signup_layout.html
deleted file mode 100644
index 1d9baccfdf4..00000000000
--- a/press/templates/healthcare_signup_layout.html
+++ /dev/null
@@ -1,69 +0,0 @@
-{%- extends "templates/base.html" -%}
-
-{%- block navbar -%}
-
-
-
-
-
-
-
-
-{%- endblock -%}
-
-{%- block footer -%}
-{%- endblock -%}
-
-{%- block content -%}
-{%- endblock -%}
-
-{%- block script -%}
-{{ super() }}
-{%- endblock -%}
-
-{%- block style -%}
-
-{%- endblock -%}
diff --git a/press/templates/marketplace/base.html b/press/templates/marketplace/base.html
index 2b07aab2ce3..50179046672 100644
--- a/press/templates/marketplace/base.html
+++ b/press/templates/marketplace/base.html
@@ -1,31 +1,44 @@
-
-
-
-
-
-
- {% include "templates/includes/meta_block.html" %}
-
- {%- block title -%} {%- endblock -%}
- {% if not self.title() %}
- Frappe Cloud Marketplace
- {% endif %}
-
-
-
- {{ head_html or "" }}
-
-
-
- {% include "templates/marketplace/navbar.html" %}
-
- {%- block body -%} {%- endblock -%}
-
- {% include "templates/marketplace/footer.html" %}
-
-
-
-
-
+{%- extends "templates/base.html" -%}
+
+{%- block head -%}
+
+
+
+{%- endblock -%}
+
+{%- block title -%} {%- endblock -%}
+{% if not self.title() %}
+ Frappe Cloud Marketplace
+{% endif %}
+
+{%- block navbar -%}
+{% include "templates/marketplace/navbar.html" %}
+{%- endblock -%}
+
+{%- block content -%}
+{%- endblock -%}
+
+{%- block footer -%}
+{% include "templates/marketplace/footer.html" %}
+{%- endblock -%}
+
+{%- block script -%}
+{{ super() }}
+
+
+
+{%- endblock -%}
diff --git a/press/templates/marketplace/footer.html b/press/templates/marketplace/footer.html
index 249aadaac9b..28323a69c91 100644
--- a/press/templates/marketplace/footer.html
+++ b/press/templates/marketplace/footer.html
@@ -21,7 +21,7 @@
School
Team
- Contact
About
@@ -35,7 +35,7 @@
Built on Frappe
diff --git a/press/templates/marketplace/macros.html b/press/templates/marketplace/macros.html
index 258ab064cf0..9aff8c301d9 100644
--- a/press/templates/marketplace/macros.html
+++ b/press/templates/marketplace/macros.html
@@ -2,8 +2,10 @@
{%- set classes = resolve_class([
'inline-flex items-center justify-center px-3 py-1 text-base leading-5 rounded-md focus:outline-none',
{
-'text-white bg-gradient-blue hover:bg-gradient-none hover:bg-blue-500 focus:shadow-outline-blue': kind == 'primary',
-'bg-gray-50 hover:bg-gray-100 text-gray-900 focus:shadow-outline-gray': kind == 'default',
+'text-white bg-gray-900 hover:bg-gray-800 active:bg-gray-700 focus-visible:ring focus-visible:ring-gray-400': kind ==
+'primary',
+'text-gray-800 bg-gray-100 hover:bg-gray-200 active:bg-gray-300 focus-visible:ring focus-visible:ring-gray-400': kind ==
+'default',
}
]) -%}
{%- if as == 'a' -%}
@@ -14,18 +16,18 @@
{% endmacro %}
{% macro link(label, url, class, blank=False) %}
-
{{ label }}
{% endmacro %}
-{% macro badge_blue(title) %}
-
+{% macro badge_gray(title) %}
+
{{ title }}
{% endmacro %}
{% macro badge_green(title) %}
-
+
{{ title }}
{% endmacro %}
@@ -85,4 +87,8 @@
{% endfor %}
-{% endmacro %}
\ No newline at end of file
+{% endmacro %}
+
+{%- macro approved_badge() -%}
+
+{%- endmacro -%}
diff --git a/press/templates/marketplace/navbar.html b/press/templates/marketplace/navbar.html
index 7413be23b74..60753b45309 100644
--- a/press/templates/marketplace/navbar.html
+++ b/press/templates/marketplace/navbar.html
@@ -1,16 +1,32 @@
-
-
-
+
+
+
diff --git a/press/templates/saas/layout.html b/press/templates/saas/layout.html
index add83e222f7..3f7b6b52fee 100644
--- a/press/templates/saas/layout.html
+++ b/press/templates/saas/layout.html
@@ -2,8 +2,8 @@
{%- block navbar -%}
-
-
+
+
{%- endblock -%}
diff --git a/press/templates/saas/macros.html b/press/templates/saas/macros.html
index 5fe70607412..c933a5edf1d 100644
--- a/press/templates/saas/macros.html
+++ b/press/templates/saas/macros.html
@@ -130,7 +130,7 @@
Verification Email Sent!
Payment Request Received
-
Thank you for your payment request. We will send you a confirmation email shortly. In the meantime if you have any queries please reach us at
+ Thank you for your payment request. We will send you a confirmation email shortly. In the meantime if you have any queries please reach us at
our support page .
{% endmacro %}
@@ -163,6 +163,6 @@
Something Went Wrong
{% macro load_subs() %}
- Loading Susbcriptions
+ Loading Subscriptions
{% endmacro %}
diff --git a/press/templates/saas/setup-account.html b/press/templates/saas/setup-account.html
index fa5f6757202..23a894fc535 100644
--- a/press/templates/saas/setup-account.html
+++ b/press/templates/saas/setup-account.html
@@ -12,7 +12,7 @@
{%- else -%}
-
-
Create Account
+
Create
+ Account
{% if enable_google_oauth %}
@@ -111,7 +104,7 @@