From c41e8a6aecf3a131f10ea103e64f6a4bf1fe2a0d Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 27 Apr 2026 19:10:21 -0400 Subject: [PATCH 01/15] chore: install `hat-syslog` --- Pipfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Pipfile b/Pipfile index bb30b6038..34e4dd210 100644 --- a/Pipfile +++ b/Pipfile @@ -28,3 +28,4 @@ html5lib = "*" bgutil-ytdlp-pot-provider = "~=1.3.1" yt-dlp-remote-cipher = "*" babi = "*" +hat-syslog = "*" From 9e76e89e17df36852cc80c8daa3f7abf24854320 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 1 May 2026 11:08:00 -0400 Subject: [PATCH 02/15] fix: refactor service dependencies --- .../dependencies | 2 - .../dependencies.d/base | 0 .../dependencies.d/tubesync-app-init | 0 .../bgutil-ytdlp-pot-provider/dependencies | 2 - .../dependencies.d/base | 0 .../bgutil-ytdlp-pot-provider-build | 0 .../s6-overlay/s6-rc.d/gunicorn/dependencies | 2 - .../s6-rc.d/gunicorn/dependencies.d/base | 0 .../dependencies.d/tubesync-migrations-init | 0 .../huey-database/dependencies.d/gunicorn | 1 - .../dependencies.d/tubesync-downloads-init | 0 .../dependencies.d/tubesync-migrations-init | 0 .../huey-filesystem/dependencies.d/gunicorn | 1 - .../dependencies.d/tubesync-downloads-init | 0 .../dependencies.d/tubesync-migrations-init | 0 .../huey-net-limited/dependencies.d/gunicorn | 1 - .../dependencies.d/tubesync-downloads-init | 0 .../dependencies.d/tubesync-migrations-init | 0 .../huey-network/dependencies.d/gunicorn | 1 - .../dependencies.d/tubesync-downloads-init | 0 .../dependencies.d/tubesync-migrations-init | 0 .../etc/s6-overlay/s6-rc.d/nginx/dependencies | 2 - .../s6-rc.d/nginx/dependencies.d/base | 0 .../s6-rc.d/nginx/dependencies.d/gunicorn | 0 .../tubesync-app-init/dependencies.d/base | 0 .../dependencies.d/tubesync-config-init | 0 .../s6-overlay/s6-rc.d/tubesync-app-init/run | 26 ++++++++++ .../s6-overlay/s6-rc.d/tubesync-app-init/type | 1 + .../s6-overlay/s6-rc.d/tubesync-app-init/up | 3 ++ .../tubesync-config-init/dependencies.d/base | 0 .../s6-rc.d/tubesync-config-init/run | 17 ++++++ .../s6-rc.d/tubesync-config-init/type | 1 + .../s6-rc.d/tubesync-config-init/up | 3 ++ .../dependencies.d/base | 0 .../dependencies.d/tubesync-config-init | 0 .../s6-rc.d/tubesync-downloads-init/run | 12 +++++ .../s6-rc.d/tubesync-downloads-init/type | 1 + .../s6-rc.d/tubesync-downloads-init/up | 3 ++ .../contents.d/tubesync-app-init | 0 .../contents.d/tubesync-config-init | 0 .../contents.d/tubesync-downloads-init | 0 .../contents.d/tubesync-migrations-init | 0 .../s6-rc.d/tubesync-init/dependencies | 1 - .../etc/s6-overlay/s6-rc.d/tubesync-init/run | 52 ------------------- .../etc/s6-overlay/s6-rc.d/tubesync-init/type | 2 +- .../etc/s6-overlay/s6-rc.d/tubesync-init/up | 3 -- .../dependencies.d/base | 0 .../dependencies.d/tubesync-app-init | 0 .../s6-rc.d/tubesync-migrations-init/run | 12 +++++ .../s6-rc.d/tubesync-migrations-init/type | 1 + .../s6-rc.d/tubesync-migrations-init/up | 3 ++ .../s6-overlay/s6-rc.d/yt-cipher/dependencies | 2 - .../s6-rc.d/yt-cipher/dependencies.d/base | 0 .../dependencies.d/tubesync-app-init | 0 54 files changed, 84 insertions(+), 71 deletions(-) delete mode 100644 config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider-build/dependencies create mode 100644 config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider-build/dependencies.d/base create mode 100644 config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider-build/dependencies.d/tubesync-app-init delete mode 100644 config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider/dependencies create mode 100644 config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider/dependencies.d/base create mode 100644 config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider/dependencies.d/bgutil-ytdlp-pot-provider-build delete mode 100644 config/root/etc/s6-overlay/s6-rc.d/gunicorn/dependencies create mode 100644 config/root/etc/s6-overlay/s6-rc.d/gunicorn/dependencies.d/base create mode 100644 config/root/etc/s6-overlay/s6-rc.d/gunicorn/dependencies.d/tubesync-migrations-init delete mode 100644 config/root/etc/s6-overlay/s6-rc.d/huey-database/dependencies.d/gunicorn create mode 100644 config/root/etc/s6-overlay/s6-rc.d/huey-database/dependencies.d/tubesync-downloads-init create mode 100644 config/root/etc/s6-overlay/s6-rc.d/huey-database/dependencies.d/tubesync-migrations-init delete mode 100644 config/root/etc/s6-overlay/s6-rc.d/huey-filesystem/dependencies.d/gunicorn create mode 100644 config/root/etc/s6-overlay/s6-rc.d/huey-filesystem/dependencies.d/tubesync-downloads-init create mode 100644 config/root/etc/s6-overlay/s6-rc.d/huey-filesystem/dependencies.d/tubesync-migrations-init delete mode 100644 config/root/etc/s6-overlay/s6-rc.d/huey-net-limited/dependencies.d/gunicorn create mode 100644 config/root/etc/s6-overlay/s6-rc.d/huey-net-limited/dependencies.d/tubesync-downloads-init create mode 100644 config/root/etc/s6-overlay/s6-rc.d/huey-net-limited/dependencies.d/tubesync-migrations-init delete mode 100644 config/root/etc/s6-overlay/s6-rc.d/huey-network/dependencies.d/gunicorn create mode 100644 config/root/etc/s6-overlay/s6-rc.d/huey-network/dependencies.d/tubesync-downloads-init create mode 100644 config/root/etc/s6-overlay/s6-rc.d/huey-network/dependencies.d/tubesync-migrations-init delete mode 100644 config/root/etc/s6-overlay/s6-rc.d/nginx/dependencies create mode 100644 config/root/etc/s6-overlay/s6-rc.d/nginx/dependencies.d/base create mode 100644 config/root/etc/s6-overlay/s6-rc.d/nginx/dependencies.d/gunicorn create mode 100644 config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/dependencies.d/base create mode 100644 config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/dependencies.d/tubesync-config-init create mode 100755 config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/run create mode 100644 config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/type create mode 100755 config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/up create mode 100644 config/root/etc/s6-overlay/s6-rc.d/tubesync-config-init/dependencies.d/base create mode 100755 config/root/etc/s6-overlay/s6-rc.d/tubesync-config-init/run create mode 100644 config/root/etc/s6-overlay/s6-rc.d/tubesync-config-init/type create mode 100755 config/root/etc/s6-overlay/s6-rc.d/tubesync-config-init/up create mode 100644 config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/dependencies.d/base create mode 100644 config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/dependencies.d/tubesync-config-init create mode 100755 config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/run create mode 100644 config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/type create mode 100755 config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/up create mode 100644 config/root/etc/s6-overlay/s6-rc.d/tubesync-init/contents.d/tubesync-app-init create mode 100644 config/root/etc/s6-overlay/s6-rc.d/tubesync-init/contents.d/tubesync-config-init create mode 100644 config/root/etc/s6-overlay/s6-rc.d/tubesync-init/contents.d/tubesync-downloads-init create mode 100644 config/root/etc/s6-overlay/s6-rc.d/tubesync-init/contents.d/tubesync-migrations-init delete mode 100644 config/root/etc/s6-overlay/s6-rc.d/tubesync-init/dependencies delete mode 100755 config/root/etc/s6-overlay/s6-rc.d/tubesync-init/run delete mode 100755 config/root/etc/s6-overlay/s6-rc.d/tubesync-init/up create mode 100644 config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/dependencies.d/base create mode 100644 config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/dependencies.d/tubesync-app-init create mode 100755 config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/run create mode 100644 config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/type create mode 100755 config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/up delete mode 100644 config/root/etc/s6-overlay/s6-rc.d/yt-cipher/dependencies create mode 100644 config/root/etc/s6-overlay/s6-rc.d/yt-cipher/dependencies.d/base create mode 100644 config/root/etc/s6-overlay/s6-rc.d/yt-cipher/dependencies.d/tubesync-app-init diff --git a/config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider-build/dependencies b/config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider-build/dependencies deleted file mode 100644 index b12488442..000000000 --- a/config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider-build/dependencies +++ /dev/null @@ -1,2 +0,0 @@ -base -tubesync-init diff --git a/config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider-build/dependencies.d/base b/config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider-build/dependencies.d/base new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider-build/dependencies.d/tubesync-app-init b/config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider-build/dependencies.d/tubesync-app-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider/dependencies b/config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider/dependencies deleted file mode 100644 index c15ef0c07..000000000 --- a/config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider/dependencies +++ /dev/null @@ -1,2 +0,0 @@ -base -bgutil-ytdlp-pot-provider-build diff --git a/config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider/dependencies.d/base b/config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider/dependencies.d/base new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider/dependencies.d/bgutil-ytdlp-pot-provider-build b/config/root/etc/s6-overlay/s6-rc.d/bgutil-ytdlp-pot-provider/dependencies.d/bgutil-ytdlp-pot-provider-build new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/gunicorn/dependencies b/config/root/etc/s6-overlay/s6-rc.d/gunicorn/dependencies deleted file mode 100644 index b12488442..000000000 --- a/config/root/etc/s6-overlay/s6-rc.d/gunicorn/dependencies +++ /dev/null @@ -1,2 +0,0 @@ -base -tubesync-init diff --git a/config/root/etc/s6-overlay/s6-rc.d/gunicorn/dependencies.d/base b/config/root/etc/s6-overlay/s6-rc.d/gunicorn/dependencies.d/base new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/gunicorn/dependencies.d/tubesync-migrations-init b/config/root/etc/s6-overlay/s6-rc.d/gunicorn/dependencies.d/tubesync-migrations-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/huey-database/dependencies.d/gunicorn b/config/root/etc/s6-overlay/s6-rc.d/huey-database/dependencies.d/gunicorn deleted file mode 100644 index 8b1378917..000000000 --- a/config/root/etc/s6-overlay/s6-rc.d/huey-database/dependencies.d/gunicorn +++ /dev/null @@ -1 +0,0 @@ - diff --git a/config/root/etc/s6-overlay/s6-rc.d/huey-database/dependencies.d/tubesync-downloads-init b/config/root/etc/s6-overlay/s6-rc.d/huey-database/dependencies.d/tubesync-downloads-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/huey-database/dependencies.d/tubesync-migrations-init b/config/root/etc/s6-overlay/s6-rc.d/huey-database/dependencies.d/tubesync-migrations-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/huey-filesystem/dependencies.d/gunicorn b/config/root/etc/s6-overlay/s6-rc.d/huey-filesystem/dependencies.d/gunicorn deleted file mode 100644 index 8b1378917..000000000 --- a/config/root/etc/s6-overlay/s6-rc.d/huey-filesystem/dependencies.d/gunicorn +++ /dev/null @@ -1 +0,0 @@ - diff --git a/config/root/etc/s6-overlay/s6-rc.d/huey-filesystem/dependencies.d/tubesync-downloads-init b/config/root/etc/s6-overlay/s6-rc.d/huey-filesystem/dependencies.d/tubesync-downloads-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/huey-filesystem/dependencies.d/tubesync-migrations-init b/config/root/etc/s6-overlay/s6-rc.d/huey-filesystem/dependencies.d/tubesync-migrations-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/huey-net-limited/dependencies.d/gunicorn b/config/root/etc/s6-overlay/s6-rc.d/huey-net-limited/dependencies.d/gunicorn deleted file mode 100644 index 8b1378917..000000000 --- a/config/root/etc/s6-overlay/s6-rc.d/huey-net-limited/dependencies.d/gunicorn +++ /dev/null @@ -1 +0,0 @@ - diff --git a/config/root/etc/s6-overlay/s6-rc.d/huey-net-limited/dependencies.d/tubesync-downloads-init b/config/root/etc/s6-overlay/s6-rc.d/huey-net-limited/dependencies.d/tubesync-downloads-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/huey-net-limited/dependencies.d/tubesync-migrations-init b/config/root/etc/s6-overlay/s6-rc.d/huey-net-limited/dependencies.d/tubesync-migrations-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/huey-network/dependencies.d/gunicorn b/config/root/etc/s6-overlay/s6-rc.d/huey-network/dependencies.d/gunicorn deleted file mode 100644 index 8b1378917..000000000 --- a/config/root/etc/s6-overlay/s6-rc.d/huey-network/dependencies.d/gunicorn +++ /dev/null @@ -1 +0,0 @@ - diff --git a/config/root/etc/s6-overlay/s6-rc.d/huey-network/dependencies.d/tubesync-downloads-init b/config/root/etc/s6-overlay/s6-rc.d/huey-network/dependencies.d/tubesync-downloads-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/huey-network/dependencies.d/tubesync-migrations-init b/config/root/etc/s6-overlay/s6-rc.d/huey-network/dependencies.d/tubesync-migrations-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/nginx/dependencies b/config/root/etc/s6-overlay/s6-rc.d/nginx/dependencies deleted file mode 100644 index c4f03c232..000000000 --- a/config/root/etc/s6-overlay/s6-rc.d/nginx/dependencies +++ /dev/null @@ -1,2 +0,0 @@ -base -gunicorn diff --git a/config/root/etc/s6-overlay/s6-rc.d/nginx/dependencies.d/base b/config/root/etc/s6-overlay/s6-rc.d/nginx/dependencies.d/base new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/nginx/dependencies.d/gunicorn b/config/root/etc/s6-overlay/s6-rc.d/nginx/dependencies.d/gunicorn new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/dependencies.d/base b/config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/dependencies.d/base new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/dependencies.d/tubesync-config-init b/config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/dependencies.d/tubesync-config-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/run b/config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/run new file mode 100755 index 000000000..fa99495c1 --- /dev/null +++ b/config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/run @@ -0,0 +1,26 @@ +#!/command/with-contenv bash + +# Copy local_settings.py for the user +if [ -f /config/tubesync/local_settings.py ] +then + # from the image for an example or comparison + cp -v -p /app/tubesync/local_settings.py \ + /config/tubesync/local_settings.py.image + + # to the image to apply the user's settings + cp -v -p /config/tubesync/local_settings.py \ + /app/tubesync/local_settings.py + + chown app:app \ + /config/tubesync/local_settings.py.image + chmod 0755 \ + /config/tubesync/local_settings.py.image +fi + +# Reset permissions +chown -R root:app /app +chmod -R 0750 /app +chmod 0755 /app/*.py /app/*.sh +find /app -mindepth 2 -type f -execdir chmod 640 '{}' + +chown -R app:app /app/common/static +chown -R app:app /app/static diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/type b/config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/type new file mode 100644 index 000000000..3d92b15f2 --- /dev/null +++ b/config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/type @@ -0,0 +1 @@ +oneshot \ No newline at end of file diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/up b/config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/up new file mode 100755 index 000000000..965be2249 --- /dev/null +++ b/config/root/etc/s6-overlay/s6-rc.d/tubesync-app-init/up @@ -0,0 +1,3 @@ +#!/command/execlineb -P + +/etc/s6-overlay/s6-rc.d/tubesync-app-init/run diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-config-init/dependencies.d/base b/config/root/etc/s6-overlay/s6-rc.d/tubesync-config-init/dependencies.d/base new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-config-init/run b/config/root/etc/s6-overlay/s6-rc.d/tubesync-config-init/run new file mode 100755 index 000000000..14dcda75f --- /dev/null +++ b/config/root/etc/s6-overlay/s6-rc.d/tubesync-config-init/run @@ -0,0 +1,17 @@ +#!/command/with-contenv bash + +# Change runtime user UID and GID +groupmod -o -g "${PGID:=911}" app +usermod -o -u "${PUID:=911}" app + +# Ensure existence and reset permissions +mkdir -v -p /run/app +chown -R app:app /run/app +chmod -R 0700 /run/app + +# Ensure /config directories exist +mkdir -v -p /config/{cache,media,state,tasks,tubesync} + +# Reset permissions +chown -R app:app /config +chmod -R 0755 /config diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-config-init/type b/config/root/etc/s6-overlay/s6-rc.d/tubesync-config-init/type new file mode 100644 index 000000000..3d92b15f2 --- /dev/null +++ b/config/root/etc/s6-overlay/s6-rc.d/tubesync-config-init/type @@ -0,0 +1 @@ +oneshot \ No newline at end of file diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-config-init/up b/config/root/etc/s6-overlay/s6-rc.d/tubesync-config-init/up new file mode 100755 index 000000000..6c6ae61fc --- /dev/null +++ b/config/root/etc/s6-overlay/s6-rc.d/tubesync-config-init/up @@ -0,0 +1,3 @@ +#!/command/execlineb -P + +/etc/s6-overlay/s6-rc.d/tubesync-config-init/run diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/dependencies.d/base b/config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/dependencies.d/base new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/dependencies.d/tubesync-config-init b/config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/dependencies.d/tubesync-config-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/run b/config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/run new file mode 100755 index 000000000..784d137b5 --- /dev/null +++ b/config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/run @@ -0,0 +1,12 @@ +#!/command/with-contenv bash + +# Optionally reset the download dir permissions +if [[ 'True' != "${TUBESYNC_RESET_DOWNLOAD_DIR:-True}" ]] +then + exit 0 +fi + +echo "TUBESYNC_RESET_DOWNLOAD_DIR=True, Resetting /downloads directory permissions" + +chown -R app:app /downloads +chmod -R 0755 /downloads diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/type b/config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/type new file mode 100644 index 000000000..3d92b15f2 --- /dev/null +++ b/config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/type @@ -0,0 +1 @@ +oneshot \ No newline at end of file diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/up b/config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/up new file mode 100755 index 000000000..f64254a29 --- /dev/null +++ b/config/root/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/up @@ -0,0 +1,3 @@ +#!/command/execlineb -P + +/etc/s6-overlay/s6-rc.d/tubesync-downloads-init/run diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/contents.d/tubesync-app-init b/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/contents.d/tubesync-app-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/contents.d/tubesync-config-init b/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/contents.d/tubesync-config-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/contents.d/tubesync-downloads-init b/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/contents.d/tubesync-downloads-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/contents.d/tubesync-migrations-init b/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/contents.d/tubesync-migrations-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/dependencies b/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/dependencies deleted file mode 100644 index df967b96a..000000000 --- a/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/dependencies +++ /dev/null @@ -1 +0,0 @@ -base diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/run b/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/run deleted file mode 100755 index 4ab92e4f3..000000000 --- a/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/run +++ /dev/null @@ -1,52 +0,0 @@ -#!/command/with-contenv bash - -# Change runtime user UID and GID -groupmod -o -g "${PGID:=911}" app -usermod -o -u "${PUID:=911}" app - -# Ensure /config directories exist -mkdir -v -p /config/{cache,media,state,tasks,tubesync} - -# Copy local_settings.py for the user -if [ -f /config/tubesync/local_settings.py ] -then - # from the image for an example or comparison - cp -v -p /app/tubesync/local_settings.py \ - /config/tubesync/local_settings.py.image - - # to the image to apply the user's settings - cp -v -p /config/tubesync/local_settings.py \ - /app/tubesync/local_settings.py -fi - -# Reset permissions -chown -R app:app /run/app -chmod -R 0700 /run/app -chown -R app:app /config -chmod -R 0755 /config -chown -R root:app /app -chmod -R 0750 /app -chmod 0755 /app/*.py /app/*.sh -find /app -mindepth 2 -type f -execdir chmod 640 '{}' + -chown -R app:app /app/common/static -chown -R app:app /app/static - -# Optionally reset the download dir permissions -if [ "${TUBESYNC_RESET_DOWNLOAD_DIR:=True}" == "True" ] -then - export TUBESYNC_RESET_DOWNLOAD_DIR - echo "TUBESYNC_RESET_DOWNLOAD_DIR=True, Resetting /downloads directory permissions" - chown -R app:app /downloads - chmod -R 0755 /downloads -fi - -if [ 'True' = "${TUBESYNC_DEBUG:-False}" ] -then - s6-setuidgid app \ - /usr/bin/python3 /app/manage.py \ - showmigrations -v 3 --list -fi - -# Run migrations -exec s6-setuidgid app \ - /usr/bin/python3 /app/manage.py migrate diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/type b/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/type index 3d92b15f2..757b42211 100644 --- a/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/type +++ b/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/type @@ -1 +1 @@ -oneshot \ No newline at end of file +bundle diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/up b/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/up deleted file mode 100755 index 5f51f603c..000000000 --- a/config/root/etc/s6-overlay/s6-rc.d/tubesync-init/up +++ /dev/null @@ -1,3 +0,0 @@ -#!/command/execlineb -P - -/etc/s6-overlay/s6-rc.d/tubesync-init/run diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/dependencies.d/base b/config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/dependencies.d/base new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/dependencies.d/tubesync-app-init b/config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/dependencies.d/tubesync-app-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/run b/config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/run new file mode 100755 index 000000000..34d4eb781 --- /dev/null +++ b/config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/run @@ -0,0 +1,12 @@ +#!/command/with-contenv bash + +if [[ 'True' == "${TUBESYNC_DEBUG:-False}" ]] +then + s6-setuidgid app \ + /usr/bin/python3 /app/manage.py \ + showmigrations -v 3 --list +fi + +# Run migrations +exec s6-setuidgid app \ + /usr/bin/python3 /app/manage.py migrate diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/type b/config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/type new file mode 100644 index 000000000..3d92b15f2 --- /dev/null +++ b/config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/type @@ -0,0 +1 @@ +oneshot \ No newline at end of file diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/up b/config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/up new file mode 100755 index 000000000..4f0e5a50b --- /dev/null +++ b/config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/up @@ -0,0 +1,3 @@ +#!/command/execlineb -P + +/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/run diff --git a/config/root/etc/s6-overlay/s6-rc.d/yt-cipher/dependencies b/config/root/etc/s6-overlay/s6-rc.d/yt-cipher/dependencies deleted file mode 100644 index b12488442..000000000 --- a/config/root/etc/s6-overlay/s6-rc.d/yt-cipher/dependencies +++ /dev/null @@ -1,2 +0,0 @@ -base -tubesync-init diff --git a/config/root/etc/s6-overlay/s6-rc.d/yt-cipher/dependencies.d/base b/config/root/etc/s6-overlay/s6-rc.d/yt-cipher/dependencies.d/base new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/yt-cipher/dependencies.d/tubesync-app-init b/config/root/etc/s6-overlay/s6-rc.d/yt-cipher/dependencies.d/tubesync-app-init new file mode 100644 index 000000000..e69de29bb From 19f65c37386b1d65ab508ca0fecf68412f1d6911 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 1 May 2026 11:33:33 -0400 Subject: [PATCH 03/15] chore: install `busybox-syslogd` --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index d14f867aa..170c7bead 100644 --- a/Dockerfile +++ b/Dockerfile @@ -569,6 +569,7 @@ RUN --mount=type=cache,id=apt-lib-cache-${TARGETARCH},sharing=private,target=/va python3-libsass \ python3-pip-whl \ python3-socks \ + busybox-syslogd \ curl \ indent \ less \ From 0deb95e02cce5aadb5f1eb54e8fe73ed4d0422cb Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 1 May 2026 12:29:27 -0400 Subject: [PATCH 04/15] feat: add the `hat-syslog-server` service --- .../s6-rc.d/hat-syslog-server/dependencies.d/base | 0 .../dependencies.d/tubesync-config-init | 0 .../etc/s6-overlay/s6-rc.d/hat-syslog-server/run | 15 +++++++++++++++ .../etc/s6-overlay/s6-rc.d/hat-syslog-server/type | 1 + .../s6-rc.d/user/contents.d/hat-syslog-server | 0 5 files changed, 16 insertions(+) create mode 100644 config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/dependencies.d/base create mode 100644 config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/dependencies.d/tubesync-config-init create mode 100755 config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/run create mode 100644 config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/type create mode 100644 config/root/etc/s6-overlay/s6-rc.d/user/contents.d/hat-syslog-server diff --git a/config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/dependencies.d/base b/config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/dependencies.d/base new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/dependencies.d/tubesync-config-init b/config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/dependencies.d/tubesync-config-init new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/run b/config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/run new file mode 100755 index 000000000..13b8c41bd --- /dev/null +++ b/config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/run @@ -0,0 +1,15 @@ +#!/command/with-contenv bash + +set -e +cd /app + +DIR="${XDG_STATE_HOME:-/config/state}/hat" +DATABASE="${DIR}/syslog.db" + +mkdir -p "${DIR}" +chown -R app:app "${DIR}" +chmod 0700 "${DIR}" + +exec s6-setuidgid app \ + hat-syslog-server --log-level 'DEBUG' \ + --db-enable-archive --db-path "${DATABASE}" diff --git a/config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/type b/config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/type new file mode 100644 index 000000000..5883cff0c --- /dev/null +++ b/config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/type @@ -0,0 +1 @@ +longrun diff --git a/config/root/etc/s6-overlay/s6-rc.d/user/contents.d/hat-syslog-server b/config/root/etc/s6-overlay/s6-rc.d/user/contents.d/hat-syslog-server new file mode 100644 index 000000000..e69de29bb From f53a70bacadd024bfbb86046b98babef3ae90943 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 13 May 2026 18:52:14 -0400 Subject: [PATCH 05/15] feat: add the `busybox-syslogd` service --- .../busybox-syslogd/dependencies.d/base | 0 .../dependencies.d/hat-syslog-server | 0 .../s6-overlay/s6-rc.d/busybox-syslogd/run | 25 +++++++++++++++++++ .../s6-overlay/s6-rc.d/busybox-syslogd/type | 1 + .../s6-rc.d/user/contents.d/busybox-syslogd | 0 5 files changed, 26 insertions(+) create mode 100644 config/root/etc/s6-overlay/s6-rc.d/busybox-syslogd/dependencies.d/base create mode 100644 config/root/etc/s6-overlay/s6-rc.d/busybox-syslogd/dependencies.d/hat-syslog-server create mode 100755 config/root/etc/s6-overlay/s6-rc.d/busybox-syslogd/run create mode 100644 config/root/etc/s6-overlay/s6-rc.d/busybox-syslogd/type create mode 100644 config/root/etc/s6-overlay/s6-rc.d/user/contents.d/busybox-syslogd diff --git a/config/root/etc/s6-overlay/s6-rc.d/busybox-syslogd/dependencies.d/base b/config/root/etc/s6-overlay/s6-rc.d/busybox-syslogd/dependencies.d/base new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/busybox-syslogd/dependencies.d/hat-syslog-server b/config/root/etc/s6-overlay/s6-rc.d/busybox-syslogd/dependencies.d/hat-syslog-server new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/busybox-syslogd/run b/config/root/etc/s6-overlay/s6-rc.d/busybox-syslogd/run new file mode 100755 index 000000000..b88340878 --- /dev/null +++ b/config/root/etc/s6-overlay/s6-rc.d/busybox-syslogd/run @@ -0,0 +1,25 @@ +#!/command/with-contenv bash + +install_pkg() { + apt-get update && apt-get install -y busybox busybox-syslogd +} + +set -e +cd / + +install -d -o root -g app -m 0750 /run/app/log + +command -v busybox >/dev/null || install_pkg + +HOST=127.0.0.1 +PORT=6514 +export HOST PORT + +config='/run/app/log/.syslog.conf' +logfile='/run/app/log/messages' + +: >| "${config}" + +exec busybox syslogd -n -S -l 8 -f "${config}" \ + -b 9 -s 2048 -O "${logfile}" \ + -L -R "${HOST}:${PORT}" diff --git a/config/root/etc/s6-overlay/s6-rc.d/busybox-syslogd/type b/config/root/etc/s6-overlay/s6-rc.d/busybox-syslogd/type new file mode 100644 index 000000000..5883cff0c --- /dev/null +++ b/config/root/etc/s6-overlay/s6-rc.d/busybox-syslogd/type @@ -0,0 +1 @@ +longrun diff --git a/config/root/etc/s6-overlay/s6-rc.d/user/contents.d/busybox-syslogd b/config/root/etc/s6-overlay/s6-rc.d/user/contents.d/busybox-syslogd new file mode 100644 index 000000000..e69de29bb From 03db7bc2afb683babd72d5bdce54b4af5701ea9b Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 13 May 2026 18:52:18 -0400 Subject: [PATCH 06/15] Create hat_busybox.py Implement BusyBox RFC 3164 log message parser with tests. --- tubesync/common/hat_busybox.py | 161 +++++++++++++++++++++++++++++++++ 1 file changed, 161 insertions(+) create mode 100644 tubesync/common/hat_busybox.py diff --git a/tubesync/common/hat_busybox.py b/tubesync/common/hat_busybox.py new file mode 100644 index 000000000..e21d08aa6 --- /dev/null +++ b/tubesync/common/hat_busybox.py @@ -0,0 +1,161 @@ +import os +import re +import socket +from datetime import datetime +from hat.syslog import common + +# --- Constants & Grammar --- + +KNOWN_HOSTNAME = socket.gethostname() +RE_HOSTNAME = re.escape(KNOWN_HOSTNAME) + +VALID_MONTHS = ( + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec', +) + +S = ' ' +EOL = r'\r?\n?' +PRI = '<(?P0|[1-9][0-9]?|1[0-8][0-9]|19[0-1])>' +MONTH = f'(?P{"|".join(map(re.escape, VALID_MONTHS))})' +DAY = f'(?P{S}[1-9]|[1-2][0-9]|3[0-1])' +HOUR = '(?P[0-1][0-9]|2[0-3])' +MINUTE = ':(?P[0-5][0-9])' +SECOND = ':(?P[0-5][0-9])' +HOST_STRICT = f'(?:{S}{RE_HOSTNAME}(?={S}))?' +NOT_HOST = f'(?!{RE_HOSTNAME})' +PID = r'(?:[\[](?P[0-9]+)[\]])' +TAG_PID = f'(?P.+?){PID}?:' +MSG_BODY = '(?P.+)' + +formats = [ + {'parts': ( + PRI, MONTH, S, DAY, S, HOUR, MINUTE, SECOND, + HOST_STRICT, S, NOT_HOST, TAG_PID, S, + MSG_BODY, EOL, + )}, + {'parts': ( + PRI, + '(?P.+?):', S, PID, S, + MSG_BODY, EOL, + )}, +] +for _dict in formats: + _dict['string'] = ''.join(_dict['parts']) + _dict['regex'] = re.compile(_dict['string']) + + +def msg_from_busybox_str(msg_str: str) -> common.Msg: + """Strict BusyBox RFC 3164 parser. Raises ValueError on any deviation.""" + now = datetime.now() + + for _dict in formats: + _format_ = _dict['regex'] + match_obj = _format_.fullmatch(msg_str) + if match_obj is not None: + break + + if match_obj is None: + raise ValueError(f'BusyBox (RFC 3164) grammar mismatch: {msg_str}') + + m = match_obj.groupdict() + + if 'month' in m: + day_val = m['day'].replace(' ', '0') + time_str = f'{m["hour"]}:{m["minute"]}:{m["second"]}' + ts_str = f'{now.year} {m["month"]} {day_val} {time_str}' + + dt = datetime.strptime(ts_str, '%Y %b %d %H:%M:%S') + if now < dt: + dt = dt.replace(year=now.year - 1) + else: + dt = now + + prival = int(str(m['prival']), 10) + procid = m.get('procid', None) + tag_str = m['app_name'] + tag_ends_with_brackets = ( + ']' == tag_str[-1] and + tag_str.rsplit('[')[-1][:-1] and + tag_str[-1] != tag_str.rsplit('[')[-1][:-1] + ) + if procid is None and tag_ends_with_brackets: + procid = tag_str.rsplit('[')[-1][:-1] + if procid is not None: + try: + _pid = int(str(procid), 10) + if 0 >= _pid: + raise ValueError('too low') + elif 4_194_304 < _pid: + raise ValueError('too high') + except Exception as e: + raise ValueError(f'BusyBox (RFC 3164) invalid process ID: {e}') + + return common.Msg( + facility=common.Facility(prival // 8), + severity=common.Severity(prival % 8), + version=None, + timestamp=dt.timestamp(), + hostname=KNOWN_HOSTNAME, + app_name=m['app_name'], + procid=m.get('procid', None), + msgid=None, + data=None, + msg=m['msg'] + ) + + +# --- Test Suite --- + +def run_test_suite(): + h = KNOWN_HOSTNAME + p = f'[{os.getpid()}]' + negative_cases = [ + f'<13>May 1 23:59:59 {h} logger[a]: Invalid pid', + f'<13>May 1 23:59:59 {h} logger[0]: Invalid pid', + f'<13>May 1 23:59:59 {h} logger[4194305]: Invalid pid', + f'<13>May 11 24:60:60 {h} logger: Invalid time', + f'<192>May 1 00:00:00 {h} logger: Priority 192', + #f'<13>Feb 29 00:00:00 {h} logger: Non-leap year', # flaky during leap years + f'<13>May 1 00:00:00 {h}extra logger: Host mismatch', + f'<13>Jun 32 00:00:00 {h} logger: Invalid day', + f'<13>July 1 00:00:00 {h} logger: Invalid month', + ] + test_cases = [ + f'<13>May 1 23:59:59 {h} logger[4194304]: Valid pid', + '<13>Apr 30 23:17:18 logger: testing', + f'<13>May 1 01:31:45 {h} logger{p}: with args: -i -t logger --rfc3164', + f'<13>May 1 01:34:51 logger{p}: with args: -i -t logger', + '<13>May 1 01:38:28 logger: with args: -t logger', + f'<31>May 1 01:54:23 logger.as.root{p}: with args: -i -t logger.as.root -p daemon.debug', + f'<191>May 1 01:56:59 logger.as.root{p}: with args: -i -t logger.as.root -p local7.debug', + f'<0>May 1 02:01:08 :[]{p}: with args: -i -t :[] -p kern.emerg', + '<0>May 1 02:01:08 :[]: with args: -t :[] -p kern.emerg', + f'<128>May 1 02:07:20 Already.running.as.root.in.a.container:[]{p}: with args: -i -t Already.running.as.root.in.a.container:[] -p local0.emerg', + '<13>May 1 01:47:46 root: without any args', + b'<150>gunicorn.gunicorn.access: [113427] 127.0.0.1 - - [02/May/2026:09:03:03 +0000] "GET /healthcheck HTTP/1.1" 200 2 "-" "healthcheck"\n'.decode(), + ] + + print(f'--- Running Full Test Suite (Hostname: {h}) ---') + for i, test in enumerate(test_cases, 1): + try: + res = msg_from_busybox_str(test) + except Exception as e: + print(f'[FAIL] Case {i}: {e}') + else: + print(f'[PASS] Case {i}: APP={res.app_name} PID={res.procid}') + + print('\n--- Running Negative Tests ---') + for i, test in enumerate(negative_cases, 1): + try: + msg_from_busybox_str(test) + except ValueError: + print(f'[PASS] Neg Case {i} correctly rejected') + except Exception as e: + print(f'[FAIL] Neg Case {i}: {e}') + else: + print(f'[FAIL] Neg Case {i} accepted invalid string: {test}') + + +if '__main__' == __name__: + run_test_suite() From fb8b55079ded2f1dfeffda51de9e31c0e69f650b Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 13 May 2026 18:52:22 -0400 Subject: [PATCH 07/15] chore: configure syslog for web servers --- config/root/etc/nginx/nginx.conf | 6 ++++-- .../s6-rc.d/gunicorn/dependencies.d/busybox-syslogd | 0 tubesync/tubesync/gunicorn.py | 7 +++++-- 3 files changed, 9 insertions(+), 4 deletions(-) create mode 100644 config/root/etc/s6-overlay/s6-rc.d/gunicorn/dependencies.d/busybox-syslogd diff --git a/config/root/etc/nginx/nginx.conf b/config/root/etc/nginx/nginx.conf index eed42ad99..54b8a64d4 100644 --- a/config/root/etc/nginx/nginx.conf +++ b/config/root/etc/nginx/nginx.conf @@ -61,8 +61,10 @@ http { # Logging log_format host '$remote_addr - $remote_user [$time_local] "[$host] $request" $status $bytes_sent "$http_referer" "$http_user_agent" "$gzip_ratio"'; - access_log /dev/stdout; - error_log stderr; + ##access_log /dev/stdout; + access_log syslog:server=unix:/dev/log,facility=local1,nohostname; + error_log syslog:server=unix:/dev/log,facility=local1,nohostname info; + error_log stderr error; # GZIP gzip on; diff --git a/config/root/etc/s6-overlay/s6-rc.d/gunicorn/dependencies.d/busybox-syslogd b/config/root/etc/s6-overlay/s6-rc.d/gunicorn/dependencies.d/busybox-syslogd new file mode 100644 index 000000000..e69de29bb diff --git a/tubesync/tubesync/gunicorn.py b/tubesync/tubesync/gunicorn.py index 2c6ebce2b..09c122722 100644 --- a/tubesync/tubesync/gunicorn.py +++ b/tubesync/tubesync/gunicorn.py @@ -35,10 +35,13 @@ def get_num_workers(): ##### Logging # Access logs are printed to stdout from nginx -accesslog = None -errorlog = '-' +##accesslog = None +##errorlog = '-' loglevel = 'info' capture_output = True +syslog = True +syslog_addr = 'unix:///dev/log' +syslog_facility = 'local2' ##### Process proc_name = 'gunicorn' From fc3cfcfa3b11279de4a4427d2eaa927d1b12cb20 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 13 May 2026 18:52:25 -0400 Subject: [PATCH 08/15] chore: clean up extra white-space --- config/root/etc/nginx/nginx.conf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/config/root/etc/nginx/nginx.conf b/config/root/etc/nginx/nginx.conf index 54b8a64d4..521571fd5 100644 --- a/config/root/etc/nginx/nginx.conf +++ b/config/root/etc/nginx/nginx.conf @@ -101,7 +101,7 @@ http { default 80; "https" 443; } - + # Extract the remote port from the HTTP Host header. # Uses default_http_port from above, # when no port was found in the header. @@ -173,7 +173,7 @@ http { # YouTube JavaScript Cipher Challenge Solving Server include /etc/nginx/cipher_server.conf; - + # Proof-of-Origin Token Server include /etc/nginx/token_server.conf; From bcf3b91c31ba12e95ec0f2589a85f777caa599a7 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 13 May 2026 18:52:28 -0400 Subject: [PATCH 09/15] feat: proxy the web interface for stored logs --- config/root/etc/nginx/nginx.conf | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/config/root/etc/nginx/nginx.conf b/config/root/etc/nginx/nginx.conf index 521571fd5..b20ee2496 100644 --- a/config/root/etc/nginx/nginx.conf +++ b/config/root/etc/nginx/nginx.conf @@ -169,6 +169,36 @@ http { internal; alias /downloads/; } + + # Web server logs + location /ws { + proxy_pass http://127.0.0.1:23020/ws; + + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + + proxy_set_header Host $host; + proxy_set_header X-Forwarded-Host $x_forwarded_host; + proxy_set_header X-Forwarded-Port $x_forwarded_port; + proxy_set_header X-Forwarded-Proto $x_forwarded_proto; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Real-IP $remote_addr; + proxy_redirect off; + } + location /web-logs/ { + proxy_pass http://127.0.0.1:23020/; + + proxy_cache gunicorn; + + proxy_set_header Host $host; + proxy_set_header X-Forwarded-Host $x_forwarded_host; + proxy_set_header X-Forwarded-Port $x_forwarded_port; + proxy_set_header X-Forwarded-Proto $x_forwarded_proto; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Real-IP $remote_addr; + proxy_redirect off; + } } # YouTube JavaScript Cipher Challenge Solving Server From e358bea2cbb39588c6ccd185c517f2de298a3c6f Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 13 May 2026 18:52:31 -0400 Subject: [PATCH 10/15] Create hat-syslog_tool.py --- tubesync/hat-syslog_tool.py | 389 ++++++++++++++++++++++++++++++++++++ 1 file changed, 389 insertions(+) create mode 100755 tubesync/hat-syslog_tool.py diff --git a/tubesync/hat-syslog_tool.py b/tubesync/hat-syslog_tool.py new file mode 100755 index 000000000..6a0b2aff4 --- /dev/null +++ b/tubesync/hat-syslog_tool.py @@ -0,0 +1,389 @@ +import argparse +import asyncio +import hashlib +import io +import json +import lzma +import os +import random +import re +import sqlite3 +import sys +import time +from dataclasses import dataclass +from datetime import datetime, timezone +from typing import (Any, BinaryIO, Callable, Dict, Iterable, Optional, + TextIO, Union) +from urllib.parse import urljoin + +# --- THIRD PARTY LIBRARIES (Requires Installation) --- +# pip install aiohttp hat-syslog +import aiohttp +try: + from hat.syslog.common import Facility, Severity +except ImportError: + Facility = None + Severity = None + + +@dataclass(frozen=True) +class _Settings: + """Unified configuration for easy review and adjustment.""" + BATCH_SIZE: int = 250 + BUSY_TIMEOUT: float = 30.0 + COOPERATIVE_SLEEP: float = 0.5 + DEFAULT_RETRIES: int = 100 + HASH_CHUNK_SIZE: int = (1024) * 32 # KiB + INITIAL_BACKOFF: float = 0.2 + MAX_BACKOFF: float = 15.0 + USER_AGENT: str = 'hat-syslog_tool/1.0' + + +@dataclass(frozen=True) +class _SqlTemplates: + """Consolidated SQL templates to maintain schema synchronization.""" + create_log: str = ''' + CREATE TABLE IF NOT EXISTS log ( + entry_timestamp REAL, facility INTEGER, severity INTEGER, + version INTEGER, msg_timestamp REAL, hostname TEXT, + app_name TEXT, procid TEXT, msgid TEXT, data TEXT, msg TEXT + );''' + create_index: str = 'CREATE INDEX IF NOT EXISTS idx_entry_ts ON log (entry_timestamp DESC);' + create_staging: str = ''' + CREATE TEMPORARY TABLE staging_rows ( + entry_timestamp REAL, facility INTEGER, severity INTEGER, + version INTEGER, msg_timestamp REAL, hostname TEXT, + app_name TEXT, procid TEXT, msgid TEXT, data TEXT, msg TEXT, + ext_id_ref INTEGER + );''' + create_tracker: str = ''' + CREATE TEMPORARY TABLE file_tracker ( + ext_id INTEGER PRIMARY KEY, + log_rowid INTEGER DEFAULT 0, + committed BOOLEAN DEFAULT 0 + );''' + check_file_id: str = 'SELECT 1 FROM file_tracker WHERE ext_id = ? LIMIT 1' + check_log_exists: str = 'SELECT rowid FROM log WHERE entry_timestamp = ? AND msg = ? LIMIT 1' + insert_staging: str = 'INSERT INTO staging_rows VALUES (?,?,?,?,?,?,?,?,?,?,?,?)' + insert_tracker: str = 'INSERT INTO file_tracker (ext_id, committed) VALUES (?, 0)' + insert_tracker_skip: str = 'INSERT INTO file_tracker (ext_id, log_rowid, committed) VALUES (?, ?, 1)' + move_staging_to_log: str = ''' + INSERT INTO log ( + entry_timestamp, facility, severity, version, msg_timestamp, + hostname, app_name, procid, msgid, data, msg + ) SELECT + entry_timestamp, facility, severity, version, msg_timestamp, + hostname, app_name, procid, msgid, data, msg + FROM staging_rows''' + update_tracker_committed: str = ''' + UPDATE file_tracker + SET log_rowid = ( + SELECT l.rowid FROM log l + JOIN staging_rows s ON l.entry_timestamp = s.entry_timestamp AND l.msg = s.msg + WHERE s.ext_id_ref = file_tracker.ext_id + ), committed = 1 + WHERE committed = 0 + AND EXISTS ( + SELECT 1 FROM log l + JOIN staging_rows s ON l.entry_timestamp = s.entry_timestamp AND l.msg = s.msg + WHERE s.ext_id_ref = file_tracker.ext_id + )''' + clear_staging: str = 'DELETE FROM staging_rows' + count_log: str = 'SELECT COUNT(*) FROM log' + count_verified: str = 'SELECT COUNT(*) FROM file_tracker WHERE committed = 1 AND log_rowid > 0' + + +# Module-level instances +_CFG = _Settings() +_SQL = _SqlTemplates() + + +class OutputManager: + """Uses class attributes to lock in original streams at module load time.""" + _ORIGINAL_STDOUT: TextIO = sys.stdout + _ORIGINAL_STDERR: TextIO = sys.stderr + + def _base_print(self, lines: Union[str, Iterable[str]], stream: TextIO) -> None: + if isinstance(lines, str): + lines = (lines,) + for line in lines: + print(line, file=stream) + + def stdout_print(self, lines: Union[str, Iterable[str]]) -> None: + self._base_print(lines, self._ORIGINAL_STDOUT) + + def stderr_print(self, lines: Union[str, Iterable[str]]) -> None: + self._base_print(lines, self._ORIGINAL_STDERR) + + +def get_fac_sev_mappers() -> tuple[Callable[[str], int], Callable[[str], int]]: + """Returns mapping functions for facility and severity strings.""" + if Facility is not None and Severity is not None: + return lambda label: Facility[label].value, lambda label: Severity[label].value + + f_labels = ( + 'KERN', 'USER', 'MAIL', 'DAEMON', 'AUTH', 'SYSLOG', 'LPR', 'NEWS', + 'UUCP', 'CRON', 'AUTHPRIV', 'FTP', 'NTP', 'AUDIT', 'ALERT', 'CLOCK', + 'LOCAL0', 'LOCAL1', 'LOCAL2', 'LOCAL3', 'LOCAL4', 'LOCAL5', 'LOCAL6', 'LOCAL7' + ) + s_labels = ( + 'EMERGENCY', 'ALERT', 'CRITICAL', 'ERROR', + 'WARNING', 'NOTICE', 'INFORMATIONAL', 'DEBUG' + ) + f_map = {k: i for i, k in enumerate(f_labels)} + s_map = {k: i for i, k in enumerate(s_labels)} + return lambda label: f_map.get(label, 1), lambda label: s_map.get(label, 6) + + +def fetch_scalar(cur: sqlite3.Cursor, query: str, params: Iterable[Any] = ()) -> Any: + """Safely executes a query and returns the first column or None.""" + res = cur.execute(query, params).fetchone() + if res is not None: + return res[0] + return None + + +def execute_with_backoff(retries: int, func: Callable[..., Any], *args: Any) -> Any: + backoff = _CFG.INITIAL_BACKOFF + max_attempts = 1 if 0 >= retries else retries + for i in range(max_attempts): + try: + return func(*args) + except sqlite3.OperationalError as e: + if 0 == retries or max_attempts - 1 == i: + raise + if 'locked' in str(e).lower() or 'busy' in str(e).lower(): + time.sleep(backoff + random.uniform(0, 0.1)) + if _CFG.MAX_BACKOFF > backoff: + backoff *= 2 + continue + raise + raise sqlite3.OperationalError(f'Database locked after {max_attempts} attempts.') + + +def init_db(db_path: str, clean_requested: bool = False) -> sqlite3.Connection: + if clean_requested and os.path.exists(db_path): + raise FileExistsError(f"Safety Error: '{db_path}' exists. Specify a name that doesn't exist.") + conn = sqlite3.connect(db_path, timeout=_CFG.BUSY_TIMEOUT) + with conn: + conn.execute(_SQL.create_log) + conn.execute(_SQL.create_index) + return conn + + +def commit_batch(cur: sqlite3.Cursor) -> None: + cur.execute(_SQL.move_staging_to_log) + cur.execute(_SQL.update_tracker_committed) + cur.execute(_SQL.clear_staging) + cur.connection.commit() + + +def convert_mode(stream: BinaryIO, db_path: str, retries: int, clean: bool, show_stats: bool) -> None: + start_time = time.time() + out = OutputManager() + get_fac, get_sev = get_fac_sev_mappers() + + class ByteTracker(io.BufferedIOBase): + def __init__(self, raw: BinaryIO): + self._raw = raw + self.bytes_read: int = 0 + def read(self, size: int = -1) -> bytes: + b = self._raw.read(size) + self.bytes_read += len(b) + return b + + head = stream.read(6) + is_xz = head.startswith(b'\xfd7zXZ') + data_io = ByteTracker(io.BytesIO(head + stream.read())) + f = lzma.open(data_io, 'rt') if is_xz else io.TextIOWrapper(data_io, encoding='utf-8') + stats: Dict[str, Any] = {'new': 0, 'skipped': 0, 'errors': 0, 'uncompressed_bytes': 0} + + with execute_with_backoff(retries, init_db, db_path, clean) as conn: + execute_with_backoff(retries, lambda: conn.executescript(f'{_SQL.create_staging}{_SQL.create_tracker}')) + try: + cur = conn.cursor() + execute_with_backoff(retries, lambda: cur.execute('BEGIN IMMEDIATE')) + for line in f: + if not line.strip(): continue + stats['uncompressed_bytes'] += len(line.encode('utf-8')) + try: + raw = json.loads(line) + ext_id, ts, m_obj = raw.get('id'), raw.get('timestamp'), raw.get('msg', {}) + msg_text = m_obj.get('msg', '') + if ext_id is not None: + if fetch_scalar(cur, _SQL.check_file_id, (ext_id,)) is not None: + stats['skipped'] += 1 + continue + if ts is not None: + row_id = fetch_scalar(cur, _SQL.check_log_exists, (ts, msg_text)) + if row_id is not None: + if ext_id is not None: + cur.execute(_SQL.insert_tracker_skip, (ext_id, row_id)) + stats['skipped'] += 1 + continue + cur.execute(_SQL.insert_staging, ( + ts, get_fac(m_obj.get('facility')), get_sev(m_obj.get('severity')), + m_obj.get('version'), m_obj.get('timestamp'), m_obj.get('hostname'), + m_obj.get('app_name'), m_obj.get('procid'), m_obj.get('msgid'), + json.dumps(m_obj.get('data')), msg_text, ext_id + )) + if ext_id is not None: + cur.execute(_SQL.insert_tracker, (ext_id,)) + stats['new'] += 1 + if 0 == stats['new'] % _CFG.BATCH_SIZE: + commit_batch(cur) + time.sleep(_CFG.COOPERATIVE_SLEEP) + execute_with_backoff(retries, lambda: cur.execute('BEGIN IMMEDIATE')) + except Exception: + stats['errors'] += 1 + continue + commit_batch(cur) + total_rows = fetch_scalar(cur, _SQL.count_log) + stats['committed_tracker'] = fetch_scalar(cur, _SQL.count_verified) + except Exception as e: + conn.rollback() + out.stderr_print(f'[-] Fatal conversion error: {e}') + sys.exit(1) + + if show_stats: + duration = time.time() - start_time + raw_total = data_io.bytes_read + ratio = (stats['uncompressed_bytes'] / raw_total) if 0 < raw_total else 0.0 + tput = (stats['new'] / duration) if 0 < duration else 0.0 + out.stdout_print([ + '\n[+] Statistics Report:', f" New Rows: {stats['new']}", + f" Skipped: {stats['skipped']}", f" Errors: {stats['errors']}", + f" Database Total: {total_rows}", f" Tracker Verified: {stats['committed_tracker']}", + f" Duration: {duration:.2f}s ({tput:.1f} rows/s)", + f" Data Processed: {stats['uncompressed_bytes'] / 1024 / 1024:.2f} MiB", + f" Source Processed: {raw_total / 1024 / 1024:.2f} MiB", + ]) + if is_xz: out.stdout_print(f" Expansion Ratio: {ratio:.2f}x") + + if (stats['skipped'] + stats['new']) != stats.get('committed_tracker', 0): + out.stderr_print([ + '', '!' * 80, '!!! CRITICAL INTEGRITY FAILURE DETECTED !!!'.center(80), '!' * 80, + f'\nExpected committed rows: {stats["new"]}', f'Verified committed rows: {stats.get("committed_tracker", 0)}', + '\nSITUATION: Tracker count does not match database. Inconsistency likely.', '!' * 80, '' + ]) + sys.exit(2) + + +def verify_mode(stream: BinaryIO, filename: Optional[str]) -> tuple[bool, BinaryIO]: + out = OutputManager() + hasher = hashlib.sha512() + buffer = io.BytesIO() + while chunk := stream.read(_CFG.HASH_CHUNK_SIZE): + hasher.update(chunk) + buffer.write(chunk) + digest = hasher.hexdigest() + buffer.seek(0) + if filename and '-' != filename: + if match := re.search(r'\.([a-f0-9]{6})\.([a-f0-9]{6})\.', filename): + start, end = match.groups() + if not (digest.startswith(start) and digest.endswith(end)): + out.stderr_print(f"[-] Integrity Failure: Hash ({digest[:6]}..{digest[-6:]}) != tag ({start}.{end})") + return False, buffer + try: + head = buffer.read(6) + buffer.seek(0) + if head.startswith(b'\xfd7zXZ'): + with lzma.open(buffer, 'rt') as f: json.loads(f.readline()) + else: + wrapper = io.TextIOWrapper(buffer, encoding='utf-8') + json.loads(wrapper.readline()) + wrapper.detach() + buffer.seek(0) + except Exception as e: + out.stderr_print(f"[-] Format Failure: {e}") + return False, buffer + out.stdout_print(f"[+] Verification passed: {filename if filename else 'stdin'}") + return True, buffer + + +def get_file_hex_digest(filename: str) -> str: + """Reads a file in chunks and returns its SHA-512 hex digest.""" + hasher = hashlib.sha512() + with open(filename, 'rb') as f: + while chunk := f.read(_CFG.HASH_CHUNK_SIZE): + hasher.update(chunk) + return hasher.hexdigest() + + +async def backup_mode(url: str, output_dir: Optional[str]) -> None: + now_str = datetime.now(timezone.utc).strftime('%Y%m%dT%H%M%SZ') + out = OutputManager() + + filename = f"syslog_{now_str}.{{tag}}.jsonl.xz" + headers = {'User-Agent': _CFG.USER_AGENT} + target_url = urljoin(url.rsplit('/index.html', 1)[0] + '/', 'backup') + temp_path = path = os.path.join(output_dir, filename) if output_dir else filename + + if os.path.exists(path): + raise FileExistsError(f"Safety Error: '{path}' already exists.") + try: + async with aiohttp.ClientSession(headers=headers) as session: + async with session.get(target_url) as resp: + resp.raise_for_status() + with lzma.open(path, 'wb', preset=9) as xz_out: + out.stdout_print(f'[*] Streaming live to {path}...') + async for line in resp.content: xz_out.write(line) + # Now that the file is closed and flushed, hash the compressed file + # Build tag: 6 chars from start, 6 from end + digest = get_file_hex_digest(path) + tag = f'{digest[: 6]}.{digest[-6 :]}' + path = temp_path.format(tag=tag) + if os.path.exists(path): + raise FileExistsError(f"Safety Error: '{path}' already exists.") + os.rename(temp_path, path) + if os.path.exists(path): + out.stdout_print(f'[+] Backup saved to: {path}') + except Exception as e: + if os.path.exists(temp_path): + os.unlink(temp_path) + out.stderr_print(f'[-] Backup failed: {e}') + sys.exit(1) + + +def main() -> None: + p = argparse.ArgumentParser(description='hat-syslog Backup/Restoration Tool') + p.add_argument('input', help="URL for backup, file path, or '-'") + p.add_argument('--out', '-o', help='Database (.db) path for convert OR directory for backup') + p.add_argument('--backup', action='store_true', default=True) + p.add_argument('--convert', '--import', action='store_true') + p.add_argument('--verify', action='store_true') + p.add_argument('--clean', action='store_true', help='Error out if target database exists') + p.add_argument('--retries', type=int, default=_CFG.DEFAULT_RETRIES) + p.add_argument('--stats', action='store_true') + args = p.parse_args() + out = OutputManager() + + if args.backup and not args.input.startswith(('http://', 'https://')): + args.backup = False + + match args: + case _ if args.verify and not args.convert: + raw_buf = sys.stdin.buffer if '-' == args.input else open(args.input, 'rb') + success, _ = verify_mode(raw_buf, args.input) + sys.exit(0 if success else 1) + case _ if args.convert: + if not args.out: + out.stderr_print("[-] Error: --out is required for conversion.") + sys.exit(1) + try: + raw_buf = sys.stdin.buffer if '-' == args.input else open(args.input, 'rb') + success, v_buf = verify_mode(raw_buf, args.input) + if not success: sys.exit(1) + convert_mode(v_buf, args.out, args.retries, args.clean, args.stats) + except Exception as e: + out.stderr_print(f'[-] Error: {e}') + case _ if args.backup: + asyncio.run(backup_mode(args.input, args.out)) + case _: + out.stderr_print("[-] Error: Specify URL for backup or --convert for local files.") + sys.exit(1) + + +if '__main__' == __name__: + main() + From 97a6791d212f13a1a2707481e8209398183b158e Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 15 May 2026 16:13:05 -0400 Subject: [PATCH 11/15] Create older_formats.py --- patches/hat/syslog/older_formats.py | 111 ++++++++++++++++++++++++++++ 1 file changed, 111 insertions(+) create mode 100644 patches/hat/syslog/older_formats.py diff --git a/patches/hat/syslog/older_formats.py b/patches/hat/syslog/older_formats.py new file mode 100644 index 000000000..d25c0ea5c --- /dev/null +++ b/patches/hat/syslog/older_formats.py @@ -0,0 +1,111 @@ +import re +import socket +from datetime import datetime +from hat.syslog import common + + +KNOWN_HOSTNAME = socket.gethostname() +RE_HOSTNAME = re.escape(KNOWN_HOSTNAME) + +VALID_MONTHS = ( + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec', +) + +S = ' ' +EOL = r'\r?\n?' +PRI = '<(?P0|[1-9][0-9]?|1[0-8][0-9]|19[0-1])>' +MONTH = f'(?P{"|".join(map(re.escape, VALID_MONTHS))})' +DAY = f'(?P{S}[1-9]|[1-2][0-9]|3[0-1])' +HOUR = '(?P[0-1][0-9]|2[0-3])' +MINUTE = ':(?P[0-5][0-9])' +SECOND = ':(?P[0-5][0-9])' +HOST_STRICT = f'(?:{S}{RE_HOSTNAME}(?={S}))?' +NOT_HOST = f'(?!{RE_HOSTNAME})' +PID = r'(?:[\[](?P[0-9]+)[\]])' +TAG_PID = f'(?P.+?){PID}?:' +MSG_BODY = '(?P(?s:.)+)' + +formats = [ + # Generic with optional hostname and PID: + # Begins with a partial (5-15) ctime() date string. + # Does not accept hostnames other than this one. + # Remote logs that do not include a hostname are accepted. + {'parts': ( + PRI, MONTH, S, DAY, S, HOUR, MINUTE, SECOND, + HOST_STRICT, S, NOT_HOST, TAG_PID, S, + MSG_BODY, EOL, + )}, + # Support for `gunicorn` logs: + # No date or hostname. + # Also, non-standard PID placement. + {'parts': ( + PRI, + '(?P.+?):', S, PID, S, + MSG_BODY, EOL, + )}, +] +for _dict in formats: + _dict['regex'] = re.compile(''.join(_dict['parts'])) + + +def msg_from_rfc3164_str(msg_str: str) -> common.Msg: + """RFC 3164 parser. Raises ValueError on any deviation.""" + now = datetime.now() + + for _dict in formats: + _format_ = _dict['regex'] + match_obj = _format_.fullmatch(msg_str) + if match_obj is not None: + break + + if match_obj is None: + raise ValueError(f'No formats matched: {msg_str.encode()!r}') + + m = match_obj.groupdict() + + if 'month' in m: + day_val = m['day'].replace(' ', '0') + time_str = f'{m["hour"]}:{m["minute"]}:{m["second"]}' + ts_str = f'{now.year} {m["month"]} {day_val} {time_str}' + + dt = datetime.strptime(ts_str, '%Y %b %d %H:%M:%S') + # The skew should be zero when logging from the same host. + if now < dt: + dt = dt.replace(year=now.year - 1) + else: + # The matched format did not include the date and time. + dt = now + + prival = int(str(m['prival']), 10) + procid = m.get('procid', None) + tag_str = m['app_name'] + tag_ends_with_brackets = ( + ']' == tag_str[-1] and + tag_str.rsplit('[')[-1][:-1] and + tag_str[-1] != tag_str.rsplit('[')[-1][:-1] + ) + if procid is None and tag_ends_with_brackets: + procid = tag_str.rsplit('[')[-1][:-1] + if procid is not None: + try: + _pid = int(str(procid), 10) + if 0 >= _pid: + raise ValueError('too low') + elif 4_194_304 < _pid: # read from /proc instead? + raise ValueError('too high') + except Exception as e: + raise ValueError(f'Invalid process ID: {e}') + + return common.Msg( + facility=common.Facility(prival // 8), + severity=common.Severity(prival % 8), + version=None, + timestamp=dt.timestamp(), + hostname=KNOWN_HOSTNAME, + app_name=m['app_name'], + procid=m.get('procid', None), + msgid=None, + data=None, + msg=m['msg'] + ) From 75c59e04fc3c2d468598600e6536f593aab89cf0 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 15 May 2026 16:13:08 -0400 Subject: [PATCH 12/15] Create encoder.py --- patches/hat/syslog/encoder.py | 217 ++++++++++++++++++++++++++++++++++ 1 file changed, 217 insertions(+) create mode 100644 patches/hat/syslog/encoder.py diff --git a/patches/hat/syslog/encoder.py b/patches/hat/syslog/encoder.py new file mode 100644 index 000000000..b9fc6596c --- /dev/null +++ b/patches/hat/syslog/encoder.py @@ -0,0 +1,217 @@ +"""Syslog message encoder/decoder""" + +import datetime +import re + +from hat import json + +from hat.syslog import common +from hat.syslog import older_formats + + +def msg_to_str(msg: common.Msg) -> str: + """Create string representation of message according to RFC 5424""" + buff = [ + f'<{msg.facility.value * 8 + msg.severity.value}>{msg.version}', + _timestamp_to_str(msg.timestamp), + msg.hostname if msg.hostname else '-', + msg.app_name if msg.app_name else '-', + msg.procid if msg.procid else '-', + msg.msgid if msg.msgid else '-', + _data_to_str(msg.data)] + if msg.msg: + buff.append('BOM' + msg.msg) + return ' '.join(buff) + + +def msg_from_str(msg_str: str) -> common.Msg: + """Parse message string formatted according to RFC 5424""" + match = _msg_pattern.fullmatch(msg_str) + if match is None: + return older_formats.msg_from_rfc3164_str(msg_str) + match = match.groupdict() + prival = int(match['prival']) + return common.Msg( + facility=common.Facility(prival // 8), + severity=common.Severity(prival % 8), + version=int(match['version']), + timestamp=_parse_timestamp(match['timestamp']), + hostname=None if match['hostname'] == '-' else match['hostname'], + app_name=None if match['app_name'] == '-' else match['app_name'], + procid=None if match['procid'] == '-' else match['procid'], + msgid=None if match['msgid'] == '-' else match['msgid'], + data=_parse_data(match['data']), + msg=(match['msg'][3:] if match['msg'] and match['msg'][:3] == 'BOM' + else match['msg'])) + + +def msg_to_json(msg: common.Msg) -> json.Data: + """Convert message to json serializable data""" + return {'facility': msg.facility.name, + 'severity': msg.severity.name, + 'version': msg.version, + 'timestamp': msg.timestamp, + 'hostname': msg.hostname, + 'app_name': msg.app_name, + 'procid': msg.procid, + 'msgid': msg.msgid, + 'data': msg.data, + 'msg': msg.msg} + + +def msg_from_json(data: json.Data) -> common.Msg: + """Convert json serializable data to message""" + return common.Msg(facility=common.Facility[data['facility']], + severity=common.Severity[data['severity']], + version=data['version'], + timestamp=data['timestamp'], + hostname=data['hostname'], + app_name=data['app_name'], + procid=data['procid'], + msgid=data['msgid'], + data=data['data'], + msg=data['msg']) + + +_msg_pattern = re.compile(r''' + < (?P \d+) > + (?P \d+) + \ (?P - | + [^ ]+) + \ (?P - | + [^ ]+) + \ (?P - | + [^ ]+) + \ (?P - | + [^ ]+) + \ (?P - | + [^ ]+) + \ (?P - | + (\[ + ((\\(\\\\)*\]) | + [^\]])* + \])+) + (\ (?P .*))? +''', re.X | re.DOTALL) + +_timestamp_pattern = re.compile(r''' + (?P \d{4}) + - + (?P \d{2}) + - + (?P \d{2}) + T + (?P \d{2}) + : + (?P \d{2}) + : + (?P \d{2}) + (\. (?P \d+))? + ((?P Z) | + ((?P \+ | + -) + (?P \d{2}) + : + (?P \d{2}))) +''', re.X | re.DOTALL) + +_data_pattern = re.compile(r''' + \[ + (?P [^ \]]+) + (?P ((\\(\\\\)*\]) | + [^\]])*) + \] + (?P .*) +''', re.X | re.DOTALL) + +_param_pattern = re.compile(r''' + \ (?P [^=\]]+) + =" + (?P ((\\\\) | + (\\") | + (\\\]) | + [^"\]\\])*) + " + (?P .*) +''', re.X | re.DOTALL) + +_escape_pattern = re.compile(r'''((\\\\)|(\\")|(\\]))''') + + +def _timestamp_to_str(timestamp): + if not timestamp: + return '-' + return datetime.datetime.fromtimestamp( + timestamp, datetime.timezone.utc).replace( + tzinfo=None).isoformat() + 'Z' + + +def _data_to_str(data_json): + data = json.decode(data_json) if data_json else None + if not data: + return '-' + return ''.join(f'[{sd_id}{_param_to_str(param)}]' + for sd_id, param in data.items()) + + +def _param_to_str(param): + if not param: + return '' + return ' ' + ' '.join(f'{k}="{_escape_value(v)}"' + for k, v in param.items()) + + +def _parse_timestamp(timestamp_str): + if timestamp_str == '-': + return + match = _timestamp_pattern.fullmatch(timestamp_str).groupdict() + return datetime.datetime( + year=int(match['year']), + month=int(match['month']), + day=int(match['day']), + hour=int(match['hour']), + minute=int(match['minute']), + second=int(match['second']), + microsecond=(int(int(match['fraction']) * + pow(10, 6 - len(match['fraction']))) + if match['fraction'] else None), + tzinfo=(datetime.timezone.utc if match['tz_utc'] else + datetime.timezone(datetime.timedelta( + hours=((1 if match['tz_sign'] == '+' else -1) * + int(match['tz_hour'])), + minutes=int(match['tz_hour']))))).timestamp() + + +def _parse_data(data_str): + if data_str == '-': + return + data = {} + while data_str: + match = _data_pattern.fullmatch(data_str).groupdict() + data[match['id']] = _parse_param(match['param']) + data_str = match['rest'] + data_json = json.encode(data) + return data_json + + +def _parse_param(param_str): + param = {} + while param_str: + match = _param_pattern.fullmatch(param_str).groupdict() + param[match['name']] = _unescape_value(match['value']) + param_str = match['rest'] + return param + + +def _escape_value(value): + return value.replace('\\', '\\\\').replace('"', '\\"').replace(']', '\\]') + + +def _unescape_value(value): + return re.sub(_escape_pattern, _unescape_value_char, value) + + +def _unescape_value_char(match): + return {r'\\': '\\', + r'\"': r'"', + r'\]': r']'}[match.group(0)] From 8dce2bccd66c26d48ebd47f46faf5556faae357e Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 15 May 2026 16:13:10 -0400 Subject: [PATCH 13/15] Add hat-syslog patches to Dockerfile --- Dockerfile | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Dockerfile b/Dockerfile index 170c7bead..c6a977cbc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -720,6 +720,10 @@ RUN --mount=type=tmpfs,target=/cache \ # Copy root COPY config/root / +# patch hat-syslog +COPY patches/hat/ \ + /usr/local/lib/python3/dist-packages/hat/ + # patch yt_dlp COPY patches/yt_dlp/ \ /usr/local/lib/python3/dist-packages/yt_dlp/ From 924ef361299c14acaf8281c885d0a7ae56363642 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 15 May 2026 16:13:13 -0400 Subject: [PATCH 14/15] feat: logging from Django to hat-syslog --- .../s6-overlay/s6-rc.d/hat-syslog-server/run | 2 +- .../dependencies.d/busybox-syslogd | 0 .../dependencies.d/busybox-syslogd | 0 .../dependencies.d/busybox-syslogd | 0 .../dependencies.d/busybox-syslogd | 0 .../dependencies.d/busybox-syslogd | 0 tubesync/common/huey_syslog.py | 248 ++++++++++++++++++ tubesync/common/logger.py | 18 +- tubesync/common/logging.py | 22 ++ tubesync/tubesync/settings.py | 114 ++++++++ 10 files changed, 394 insertions(+), 10 deletions(-) create mode 100644 config/root/etc/s6-overlay/s6-rc.d/huey-database/dependencies.d/busybox-syslogd create mode 100644 config/root/etc/s6-overlay/s6-rc.d/huey-filesystem/dependencies.d/busybox-syslogd create mode 100644 config/root/etc/s6-overlay/s6-rc.d/huey-net-limited/dependencies.d/busybox-syslogd create mode 100644 config/root/etc/s6-overlay/s6-rc.d/huey-network/dependencies.d/busybox-syslogd create mode 100644 config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/dependencies.d/busybox-syslogd create mode 100644 tubesync/common/huey_syslog.py create mode 100644 tubesync/common/logging.py diff --git a/config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/run b/config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/run index 13b8c41bd..a6d7c7a7d 100755 --- a/config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/run +++ b/config/root/etc/s6-overlay/s6-rc.d/hat-syslog-server/run @@ -11,5 +11,5 @@ chown -R app:app "${DIR}" chmod 0700 "${DIR}" exec s6-setuidgid app \ - hat-syslog-server --log-level 'DEBUG' \ + hat-syslog-server --log-level 'INFO' \ --db-enable-archive --db-path "${DATABASE}" diff --git a/config/root/etc/s6-overlay/s6-rc.d/huey-database/dependencies.d/busybox-syslogd b/config/root/etc/s6-overlay/s6-rc.d/huey-database/dependencies.d/busybox-syslogd new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/huey-filesystem/dependencies.d/busybox-syslogd b/config/root/etc/s6-overlay/s6-rc.d/huey-filesystem/dependencies.d/busybox-syslogd new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/huey-net-limited/dependencies.d/busybox-syslogd b/config/root/etc/s6-overlay/s6-rc.d/huey-net-limited/dependencies.d/busybox-syslogd new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/huey-network/dependencies.d/busybox-syslogd b/config/root/etc/s6-overlay/s6-rc.d/huey-network/dependencies.d/busybox-syslogd new file mode 100644 index 000000000..e69de29bb diff --git a/config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/dependencies.d/busybox-syslogd b/config/root/etc/s6-overlay/s6-rc.d/tubesync-migrations-init/dependencies.d/busybox-syslogd new file mode 100644 index 000000000..e69de29bb diff --git a/tubesync/common/huey_syslog.py b/tubesync/common/huey_syslog.py new file mode 100644 index 000000000..0c70f43e0 --- /dev/null +++ b/tubesync/common/huey_syslog.py @@ -0,0 +1,248 @@ +import collections +import contextlib +import logging +import os +import threading +import time + +from hat.syslog import common +from hat.syslog.handler import ( + SyslogHandler as hat_syslog_handler_SyslogHandler, + _ThreadState, + _logging_handler_thread, + _record_to_msg, +) + + +logger = logging.getLogger(__name__) + + +class SyslogHandler(hat_syslog_handler_SyslogHandler): + """ + A process-safe wrapper for hat.syslog.handler.SyslogHandler. + + Bypasses immutable NamedTuple state constraints on fork boundaries, + avoids thread-lock corruption from os.fork(), and short-circuits + the time-blocking flush/close loops during a Huey graceful shutdown. + """ + def __init__(self, host, port, comm_type, queue_size=1024, reconnect_delay=5, *args, **kwargs): + super().__init__(host, port, comm_type, queue_size, reconnect_delay, *args, **kwargs) + + state = self._get_parent_attr('__state') + + if state: + state.closed.set() + thread = self._get_parent_attr('__thread') + if thread and thread.is_alive(): + with state.cv, contextlib.suppress(Exception): + state.cv.notify_all() + + self.__state = _ThreadState( + host=host, + port=port, + comm_type=self._determine_comm_type(comm_type), + queue=collections.deque(), + queue_size=queue_size, + reconnect_delay=reconnect_delay, + cv=threading.Condition(), + closed=threading.Event(), + dropped=[0], + ) + + self.__thread = None + self._initial_pid = os.getpid() + self._is_shutting_down = False + + def _alive_thread(self): + if not (self.__thread and self.__thread.is_alive()): + return False + + with self.__state.cv: + if self.__thread and self.__thread.is_alive(): + self.__thread._scoreboard['alive'] = (time.time(), time.monotonic_ns(),) + return True + return False + + def _after_fork(self, current_pid=None): + # Detect if we crossed the Unix fork boundary into Huey's process worker. + if current_pid is None: + current_pid = os.getpid() + + if current_pid == self._initial_pid: + # Return early when we have not forked. + return + + self._initial_pid = current_pid + + state = self.__state + new_state = _ThreadState( + host=state.host, + port=state.port, + comm_type=state.comm_type, + queue=state.queue, + queue_size=state.queue_size, + reconnect_delay=state.reconnect_delay, + cv=threading.Condition(), + closed=state.closed, + dropped=state.dropped, + ) + self.__state = new_state + self.__thread = None + + def _create_thread(self): + self._after_fork() + + if self._is_shutting_down or self.__state.closed.is_set() or self._alive_thread(): + return + + initial = self.__thread is None + with self.__state.cv: + previous = self.__thread + + self.__thread = threading.Thread( + target=_logging_handler_thread, + args=(self.__state,), + daemon=True + ) + + self.__thread._scoreboard = {'start': (time.time(), time.monotonic_ns(),)} + scoreboard = self.__thread._scoreboard + + if initial: + scoreboard['alive'] = scoreboard['start'] + scoreboard['initialized'] = scoreboard['start'] + elif previous and hasattr(previous, '_scoreboard'): + logger.debug(previous._scoreboard) + scoreboard['alive'] = previous._scoreboard.get('alive') + scoreboard['initialized'] = previous._scoreboard.get('initialized') + + self.__thread.start() + + def _determine_comm_type(self, comm_type): + if isinstance(comm_type, str): + needle = comm_type + haystack = frozenset(common.CommType.__members__) + vary = lambda x: { + x, x.upper(), + x.casefold(), x.casefold().upper(), + x.lower(), x.lower().upper(), + } + found = vary(needle).intersection(haystack) + if found: + member = tuple(found)[0] + return common.CommType[member] + else: + raise ValueError(f'Specify a valid comm_type from this list: {list(haystack)}') + + if not isinstance(comm_type, common.CommType): + raise ValueError('Invalid comm_type argument') + + def _parent_class_name(self): + return hat_syslog_handler_SyslogHandler.__name__ + + def _mangled_name(self, attr_name): + parent_class_name = self._parent_class_name() + return f'_{parent_class_name}{attr_name}' + + def _get_parent_attr(self, attr_name): + """Computes and gets mangled attributes from the super class.""" + mangled_name = self._mangled_name(attr_name) + return getattr(self, mangled_name, None) + + def _set_parent_attr(self, attr_name, value): + """Computes and sets mangled attributes on the super class.""" + mangled_name = self._mangled_name(attr_name) + setattr(self, mangled_name, value) + + def emit(self, record): + if self._is_shutting_down: + with contextlib.suppress(Exception): + logger.handle(record) + return + + self._create_thread() + + if not self._alive_thread(): + with contextlib.suppress(Exception): + logger.handle(record) + return + + with self.__state.cv: + if self.__state.closed.is_set(): + self._is_shutting_down = True + logger.warning('Closed in emit') + with contextlib.suppress(Exception): + logger.handle(record) + return + + msg = _record_to_msg(record) + self.__state.queue.append(msg) + + while len(self.__state.queue) > self.__state.queue_size: + self.__state.queue.popleft() + self.__state.dropped += 1 + logger.warning('Dropped a msg in emit') + + with contextlib.suppress(Exception): + # workaround for errors/0001.txt + self.__state.cv.notify_all() + + def flush(self): + self._create_thread() + + if not self._alive_thread(): + return + + with self.__state.cv: + try: + # workaround for errors/0001.txt + self.__state.cv.notify_all() + except RuntimeError: + pass + + self.__state.cv.wait_for( + lambda: (self.__state.closed.is_set() or not len(self.__state.queue)), + timeout=(self.__state.reconnect_delay ** 2), + ) + + with contextlib.suppress(Exception): + # workaround for errors/0001.txt + self.__state.cv.notify_all() + + def close(self): + """ + Cleans up the queue, flags the state as closed, and shuts down + the background thread without allowing new ones to be generated. + """ + + # Align/verify the background worker thread state immediately on exit + self._create_thread() + + # Gate the entire lock block: only wait if the thread is alive and logs exist + if 0 < len(self.__state.queue) and self._alive_thread(): + with self.__state.cv: + try: + self.__state.cv.notify_all() + except RuntimeError: + # Narrow capture: Only catch the specific lock-state breakdown + # that happens if the interpreter tears down mid-operation. + pass + + logger.debug('Flushing queue in close') + self.__state.cv.wait_for( + lambda: 0 >= len(self.__state.queue), + timeout=(3.0 * self.__state.reconnect_delay), + ) + + # Immediately trip the closed flag to unblock internal loops + self.__state.closed.set() + self._is_shutting_down = True + + if self._alive_thread(): + # Wake up any threads resting on the condition variable + with self.__state.cv, contextlib.suppress(Exception): + # workaround for errors/0001.txt + self.__state.cv.notify_all() + + super(hat_syslog_handler_SyslogHandler, self).close() + diff --git a/tubesync/common/logger.py b/tubesync/common/logger.py index 4a3c68293..12d77c253 100644 --- a/tubesync/common/logger.py +++ b/tubesync/common/logger.py @@ -1,22 +1,22 @@ import logging from django.conf import settings +##from .logging import default_handler, syslog_handler from .utils import getenv -logging_level = logging.DEBUG if settings.DEBUG else logging.INFO -default_formatter = logging.Formatter( - '%(asctime)s [%(name)s/%(levelname)s] %(message)s' -) -default_sh = logging.StreamHandler() -default_sh.setFormatter(default_formatter) -default_sh.setLevel(logging_level) +##if settings.DEBUG: +## default_handler.setLevel(logging.DEBUG) app_name = getenv('DJANGO_SETTINGS_MODULE') first_part = app_name.split('.', 1)[0] log = app_logger = logging.getLogger(first_part) -app_logger.addHandler(default_sh) -app_logger.setLevel(logging_level) +##app_logger.propagate = False +##app_logger.addHandler(default_handler) +##app_logger.addHandler(syslog_handler) +app_logger.setLevel(logging.INFO) +if settings.DEBUG: + app_logger.setLevel(logging.DEBUG) if ( hasattr(settings, 'DATABASES') and diff --git a/tubesync/common/logging.py b/tubesync/common/logging.py new file mode 100644 index 000000000..4b22d92dc --- /dev/null +++ b/tubesync/common/logging.py @@ -0,0 +1,22 @@ +import logging +from logging.handlers import SysLogHandler + + +default_formatter = logging.Formatter( + '%(asctime)s [%(name)s/%(levelname)s] %(message)s' +) +default_handler = logging.StreamHandler() +default_handler.setFormatter(default_formatter) +default_handler.setLevel(logging.INFO) + +syslog_formatter = logging.Formatter( + '%(asctime)s %(name)s: %(message)s', + '%b %d %H:%M:%S', +) +syslog_handler = SysLogHandler( + address='/dev/log', + facility=SysLogHandler.LOG_LOCAL0, +) +syslog_handler.setFormatter(syslog_formatter) +syslog_handler.setLevel(logging.DEBUG) + diff --git a/tubesync/tubesync/settings.py b/tubesync/tubesync/settings.py index 0d0642cf4..f24439deb 100644 --- a/tubesync/tubesync/settings.py +++ b/tubesync/tubesync/settings.py @@ -1,4 +1,5 @@ from django import VERSION as DJANGO_VERSION +from logging.handlers import SysLogHandler from pathlib import Path from common.huey import sqlite_tasks from common.utils import getenv @@ -73,6 +74,119 @@ consumer['verbose'] = DJANGO_HUEY.get('verbose', False) +LOGGING = { + 'version': 1, + 'disable_existing_loggers': False, + 'formatters': { + 'default': {}, + 'syslog': { + 'format': '%(asctime)s %(name)s: %(message)s', + 'datefmt': '%b %d %H:%M:%S', + }, + 'common': { + 'format': '%(asctime)s [%(name)s/%(levelname)s] %(message)s', + 'datefmt': None, + }, + 'consumer_simple': { + 'format': '%(asctime)s %(message)s', + 'datefmt': '%H:%M:%S', + }, + 'worker_process': { + 'format': '[%(asctime)s] %(levelname)s:%(name)s:%(process)d:%(message)s', + 'datefmt': None, + }, + 'worker_thread': { + 'format': '[%(asctime)s] %(levelname)s:%(name)s:%(process)d:%(threadName)s:%(message)s', + 'datefmt': None, + }, + }, + 'handlers': { + 'hat_syslog': { + 'class': 'common.huey_syslog.SyslogHandler', + 'host': '127.0.0.1', + 'port': 6514, + 'comm_type': 'TCP', + 'level': 'DEBUG', + 'formatter': 'default', + }, + 'hat_syslog_worker_process': { + 'class': 'common.huey_syslog.SyslogHandler', + 'host': '127.0.0.1', + 'port': 6514, + 'comm_type': 'TCP', + 'level': 'DEBUG', + 'formatter': 'worker_process', + }, + 'hat_syslog_worker_thread': { + 'class': 'common.huey_syslog.SyslogHandler', + 'host': '127.0.0.1', + 'port': 6514, + 'comm_type': 'TCP', + 'level': 'DEBUG', + 'formatter': 'worker_thread', + }, + 'stderr': { + 'class': 'logging.StreamHandler', + 'level': 'DEBUG' if DEBUG else 'INFO', + 'formatter': 'common', + }, + 'stderr_worker_process': { + 'class': 'logging.StreamHandler', + 'level': 'DEBUG' if DEBUG else 'INFO', + 'formatter': 'worker_process', + }, + 'stderr_worker_thread': { + 'class': 'logging.StreamHandler', + 'level': 'DEBUG' if DEBUG else 'INFO', + 'formatter': 'worker_thread', + }, + 'syslog': { + 'class': SysLogHandler, + 'address': '/dev/log', + 'facility': SysLogHandler.LOG_LOCAL0, + 'level': 'DEBUG', + 'formatter': 'syslog', + }, + }, + 'root': { + 'handlers': ['hat_syslog', 'stderr'], + 'level': 'DEBUG', + }, + 'loggers': { + 'common.huey_syslog': { + 'handlers': ['syslog', 'stderr'], + 'level': 'DEBUG', + 'propagate': False, + }, + 'django_huey.management.commands.djangohuey': { + 'handlers': ['syslog'], + 'level': 'DEBUG', + 'propagate': False, + }, + 'hat.syslog.handler': { + 'handlers': ['syslog', 'stderr'], + 'level': 'DEBUG', + 'propagate': False, + }, + 'huey': { + 'handlers': ['hat_syslog', 'stderr'], + 'level': 'INFO' if DJANGO_HUEY.get('verbose', False) is None else 'WARNING', + 'propagate': False, + }, + 'huey.consumer.worker.process': { + 'handlers': ['hat_syslog_worker_process', 'stderr_worker_process'], + 'level': 'INFO' if DJANGO_HUEY.get('verbose', False) is None else 'WARNING', + 'propagate': False, + }, + 'huey.consumer.worker.thread': { + 'handlers': ['hat_syslog_worker_thread', 'stderr_worker_thread'], + 'level': 'INFO' if DJANGO_HUEY.get('verbose', False) is None else 'WARNING', + 'propagate': False, + }, + }, +} + + TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', From 641aa0f7157b34b40e93c8c40b37d9185f2422ea Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 15 May 2026 16:17:10 -0400 Subject: [PATCH 15/15] fixup! Create hat_busybox.py --- tubesync/common/hat_busybox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/common/hat_busybox.py b/tubesync/common/hat_busybox.py index e21d08aa6..7c2dc7db8 100644 --- a/tubesync/common/hat_busybox.py +++ b/tubesync/common/hat_busybox.py @@ -26,7 +26,7 @@ NOT_HOST = f'(?!{RE_HOSTNAME})' PID = r'(?:[\[](?P[0-9]+)[\]])' TAG_PID = f'(?P.+?){PID}?:' -MSG_BODY = '(?P.+)' +MSG_BODY = '(?P(?s:.)+)' formats = [ {'parts': (