Initial commit: AbletonMCP-AI complete system

- MCP Server with audio fallback, sample management
- Song generator with bus routing
- Reference listener and audio resampler
- Vector-based sample search
- Master chain with limiter and calibration
- Fix: Audio fallback now works without M4L
- Fix: Full song detection in sample loader

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
renato97
2026-03-28 22:53:10 -03:00
commit 6ec8663954
120 changed files with 59101 additions and 0 deletions

View File

@@ -0,0 +1,18 @@
[Unit]
Description=AbletonMCP_AI autonomous GLM/Codex queue
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
User=ren
WorkingDirectory=/mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI
Environment=LOCAL_ENV_FILE=/mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI/automation/wsl.local.env
ExecStart=/bin/bash /mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI/automation/wsl/run_task_queue.sh
Restart=always
RestartSec=15
StandardOutput=append:/mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI/automation/wsl_runtime/logs/service.log
StandardError=append:/mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI/automation/wsl_runtime/logs/service.log
[Install]
WantedBy=multi-user.target

View File

@@ -0,0 +1,53 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
RUNTIME_DIR="$PROJECT_ROOT/automation/wsl_runtime"
CODEX_HOME_DIR="$RUNTIME_DIR/codex_home"
WINDOWS_CODEX_HOME="/mnt/c/Users/ren/.codex"
ENV_FILE="$PROJECT_ROOT/automation/wsl.local.env"
OPENAI_API_KEY_VALUE=""
mkdir -p "$CODEX_HOME_DIR" "$RUNTIME_DIR/logs"
if [[ -f "$WINDOWS_CODEX_HOME/auth.json" && ! -f "$CODEX_HOME_DIR/auth.json" ]]; then
cp "$WINDOWS_CODEX_HOME/auth.json" "$CODEX_HOME_DIR/auth.json"
fi
if [[ -f "$CODEX_HOME_DIR/auth.json" ]]; then
OPENAI_API_KEY_VALUE="$(jq -r '.OPENAI_API_KEY // empty' "$CODEX_HOME_DIR/auth.json" 2>/dev/null || true)"
fi
cat > "$CODEX_HOME_DIR/config.toml" <<'EOF'
model = "gpt-5.4"
[sandbox_workspace_write]
network_access = true
EOF
cat > "$ENV_FILE" <<EOF
export ANTHROPIC_BASE_URL='https://coding-intl.dashscope.aliyuncs.com/apps/anthropic'
export ANTHROPIC_AUTH_TOKEN='sk-sp-e87cea7b587c4af09e465726b084f41b'
export GLM_MODEL='glm-5'
export CODEX_MODEL='gpt-5.4'
export TELEGRAM_BOT_TOKEN='8444660361:AAECCo6oon0dbnQMzgaanZntYFOLgcZrcJ4'
export TELEGRAM_CHAT_ID='692714536'
export CODEX_HOME='/mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI/automation/wsl_runtime/codex_home'
export GLM_AGENTS_FILE='/mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI/automation/glm_agents.team.json'
export POLL_SECONDS='30'
export WATCH='1'
export CONTINUE_ON_ERROR='1'
EOF
if [[ -n "$OPENAI_API_KEY_VALUE" ]]; then
printf "export OPENAI_API_KEY='%s'\n" "$OPENAI_API_KEY_VALUE" >> "$ENV_FILE"
fi
chmod 600 "$ENV_FILE" "$CODEX_HOME_DIR/auth.json" 2>/dev/null || true
chmod +x "$SCRIPT_DIR/"*.sh
echo "WSL runtime bootstrapped"
echo "Runtime dir: $RUNTIME_DIR"
echo "Env file: $ENV_FILE"
echo "Codex home: $CODEX_HOME_DIR"

View File

@@ -0,0 +1,163 @@
services:
postgres:
image: postgres:16-alpine
container_name: abletonmcp-postgres
restart: unless-stopped
environment:
POSTGRES_USER: ${POSTGRES_USER:-postgres}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-changeme}
POSTGRES_DB: ${POSTGRES_BOOTSTRAP_DB:-postgres}
PGDATA: /var/lib/postgresql/data/pgdata
GITEA_DB_NAME: ${GITEA_DB_NAME:-gitea}
N8N_DB_NAME: ${N8N_DB_NAME:-n8n}
volumes:
- postgres-data:/var/lib/postgresql/data
- ./initdb:/docker-entrypoint-initdb.d:ro
ports:
- "${POSTGRES_PORT:-5432}:5432"
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-postgres} -d ${POSTGRES_BOOTSTRAP_DB:-postgres}"]
interval: 10s
timeout: 5s
retries: 5
start_period: 20s
networks:
- internal
redis:
image: redis:7-alpine
container_name: abletonmcp-redis
restart: unless-stopped
command:
- redis-server
- --requirepass
- ${REDIS_PASSWORD:-changeme}
- --appendonly
- "yes"
- --save
- "60"
- "1000"
volumes:
- redis-data:/data
ports:
- "${REDIS_PORT:-6379}:6379"
healthcheck:
test: ["CMD-SHELL", "redis-cli -a ${REDIS_PASSWORD:-changeme} ping | grep -q PONG"]
interval: 10s
timeout: 5s
retries: 5
start_period: 10s
networks:
- internal
gitea:
image: gitea/gitea:1.21-rootless
container_name: abletonmcp-gitea
restart: unless-stopped
environment:
USER_UID: 1000
USER_GID: 1000
GITEA__database__DB_TYPE: postgres
GITEA__database__HOST: postgres:5432
GITEA__database__NAME: ${GITEA_DB_NAME:-gitea}
GITEA__database__USER: ${POSTGRES_USER:-postgres}
GITEA__database__PASSWD: ${POSTGRES_PASSWORD:-changeme}
GITEA__server__DOMAIN: ${GITEA_DOMAIN:-localhost}
GITEA__server__ROOT_URL: ${GITEA_ROOT_URL:-http://localhost:3000}
GITEA__server__HTTP_PORT: 3000
GITEA__server__SSH_DOMAIN: ${GITEA_SSH_DOMAIN:-localhost}
GITEA__server__SSH_PORT: ${GITEA_SSH_PORT:-222}
GITEA__server__START_SSH_SERVER: "true"
GITEA__server__SSH_LISTEN_PORT: 222
GITEA__security__INSTALL_LOCK: ${GITEA_SECURITY_INSTALL_LOCK:-true}
GITEA__service__DISABLE_REGISTRATION: "true"
GITEA__server__OFFLINE_MODE: ${GITEA_OFFLINE_MODE:-true}
volumes:
- gitea-data:/var/lib/gitea
- gitea-config:/etc/gitea
- gitea-logs:/var/log/gitea
ports:
- "${GITEA_HTTP_PORT:-3000}:3000"
- "${GITEA_SSH_PORT:-222}:222"
healthcheck:
test: ["CMD-SHELL", "wget -q --spider http://localhost:3000/api/healthz || exit 1"]
interval: 15s
timeout: 5s
retries: 10
start_period: 45s
depends_on:
postgres:
condition: service_healthy
networks:
- internal
n8n:
image: n8nio/n8n:latest
container_name: abletonmcp-n8n
restart: unless-stopped
environment:
DB_TYPE: postgresdb
DB_POSTGRESDB_HOST: postgres
DB_POSTGRESDB_PORT: 5432
DB_POSTGRESDB_DATABASE: ${N8N_DB_NAME:-n8n}
DB_POSTGRESDB_USER: ${POSTGRES_USER:-postgres}
DB_POSTGRESDB_PASSWORD: ${POSTGRES_PASSWORD:-changeme}
N8N_PORT: 5678
N8N_PROTOCOL: http
N8N_HOST: ${N8N_HOST:-localhost}
N8N_PATH: ${N8N_PATH:-/}
N8N_ENCRYPTION_KEY: ${N8N_ENCRYPTION_KEY:-changeme-change-this}
N8N_LOG_LEVEL: ${N8N_LOG_LEVEL:-info}
N8N_EXECUTIONS_MODE: ${N8N_EXECUTIONS_MODE:-regular}
N8N_BASIC_AUTH_ACTIVE: ${N8N_BASIC_AUTH_ACTIVE:-true}
N8N_BASIC_AUTH_USER: ${N8N_BASIC_AUTH_USER:-admin}
N8N_BASIC_AUTH_PASSWORD: ${N8N_BASIC_AUTH_PASSWORD:-changeme}
N8N_COOKIE_POLICY: ${N8N_COOKIE_POLICY:-lax}
N8N_HOST_ALLOW_LIST: ${N8N_HOST_ALLOW_LIST:-localhost,127.0.0.1}
N8N_WEBHOOK_URL: ${N8N_WEBHOOK_URL:-http://localhost:5678/}
N8N_EDITOR_BASE_URL: ${N8N_EDITOR_BASE_URL:-http://localhost:5678}
GENERIC_TIMEZONE: ${TZ:-UTC}
TZ: ${TZ:-UTC}
N8N_DIAGNOSTICS_ENABLED: ${N8N_DIAGNOSTICS_ENABLED:-false}
N8N_VERSION_NOTIFICATIONS_ENABLED: ${N8N_VERSION_NOTIFICATIONS_ENABLED:-false}
volumes:
- n8n-data:/home/node/.n8n
- n8n-logs:/home/node/.npm/_logs
- ${PROJECT_PATH:-/mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI}:/project:rw
- ${PROJECT_PATH:-/mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI}/automation/workflows:/workflows:ro
ports:
- "${N8N_PORT:-5678}:5678"
healthcheck:
test: ["CMD-SHELL", "wget -q --spider http://localhost:5678/healthz || exit 1"]
interval: 15s
timeout: 5s
retries: 10
start_period: 45s
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
networks:
- internal
networks:
internal:
name: abletonmcp-network
driver: bridge
volumes:
postgres-data:
name: abletonmcp-postgres-data
gitea-data:
name: abletonmcp-gitea-data
gitea-config:
name: abletonmcp-gitea-config
gitea-logs:
name: abletonmcp-gitea-logs
redis-data:
name: abletonmcp-redis-data
n8n-data:
name: abletonmcp-n8n-data
n8n-logs:
name: abletonmcp-n8n-logs

View File

@@ -0,0 +1,18 @@
#!/bin/sh
set -eu
create_db() {
db_name="$1"
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "${POSTGRES_BOOTSTRAP_DB:-postgres}" <<-EOSQL
SELECT 'CREATE DATABASE "${db_name}"'
WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = '${db_name}')\gexec
EOSQL
}
if [ -n "${GITEA_DB_NAME:-}" ]; then
create_db "$GITEA_DB_NAME"
fi
if [ -n "${N8N_DB_NAME:-}" ]; then
create_db "$N8N_DB_NAME"
fi

View File

@@ -0,0 +1,12 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
SERVICE_SRC="$SCRIPT_DIR/ableton-glm-loop.service"
SERVICE_DST="/etc/systemd/system/ableton-glm-loop.service"
sudo cp "$SERVICE_SRC" "$SERVICE_DST"
sudo systemctl daemon-reload
sudo systemctl enable ableton-glm-loop.service
sudo systemctl restart ableton-glm-loop.service
sudo systemctl status --no-pager ableton-glm-loop.service || true

View File

@@ -0,0 +1,59 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
LOCAL_ENV_FILE="${LOCAL_ENV_FILE:-$PROJECT_ROOT/automation/wsl.local.env}"
if [[ -f "$LOCAL_ENV_FILE" ]]; then
# shellcheck disable=SC1090
source "$LOCAL_ENV_FILE"
fi
TASK_FILE="${1:?task file is required}"
REPORT_FILE="${2:?report file is required}"
GLM_MODEL="${GLM_MODEL:-glm-5}"
CODEX_MODEL="${CODEX_MODEL:-gpt-5.4}"
SKIP_CODEX_REVIEW="${SKIP_CODEX_REVIEW:-0}"
CODEX_HOME="${CODEX_HOME:-$PROJECT_ROOT/automation/wsl_runtime/codex_home}"
export CODEX_HOME
if [[ -n "${OPENAI_API_KEY:-}" ]]; then
export OPENAI_API_KEY
fi
RUN_DIR="$PROJECT_ROOT/automation/runs/loop_$(date +%Y%m%d_%H%M%S)"
CODEX_STDOUT_PATH="$RUN_DIR/codex_stdout.txt"
CODEX_MESSAGE_PATH="$RUN_DIR/codex_last_message.txt"
mkdir -p "$RUN_DIR"
notify() {
"$SCRIPT_DIR/send_telegram.sh" "$1" || true
}
notify "GLM/Codex loop started: $(basename "$TASK_FILE")"
"$SCRIPT_DIR/run_glm_cycle.sh" "$TASK_FILE" "$REPORT_FILE"
if [[ "$SKIP_CODEX_REVIEW" == "1" ]]; then
notify "GLM/Codex loop finished without Codex review: $(basename "$TASK_FILE")"
exit 0
fi
notify "Codex review started: $(basename "$TASK_FILE")"
WIN_TASK_FILE="$(wslpath -w "$TASK_FILE")"
WIN_REPORT_FILE="$(wslpath -w "$REPORT_FILE")"
WIN_PROJECT_ROOT="$(wslpath -w "$PROJECT_ROOT")"
WIN_CODEX_MESSAGE_PATH="$(wslpath -w "$CODEX_MESSAGE_PATH")"
WIN_REVIEW_SCRIPT="$(wslpath -w "$PROJECT_ROOT/automation/invoke_codex_review.ps1")"
if ! /mnt/c/Windows/System32/WindowsPowerShell/v1.0/powershell.exe -NoProfile -ExecutionPolicy Bypass -File "$WIN_REVIEW_SCRIPT" -TaskFile "$WIN_TASK_FILE" -ReportFile "$WIN_REPORT_FILE" -ProjectRoot "$WIN_PROJECT_ROOT" -OutputFile "$WIN_CODEX_MESSAGE_PATH" -CodexModel "$CODEX_MODEL" 2>&1 | tee "$CODEX_STDOUT_PATH"; then
notify "Codex review failed: $(basename "$TASK_FILE")"
exit 1
fi
notify "GLM/Codex loop finished: $(basename "$TASK_FILE")"
echo "Loop finished"
echo "Task: $TASK_FILE"
echo "GLM report: $REPORT_FILE"
echo "Codex note: $CODEX_MESSAGE_PATH"
echo "Codex stdout:$CODEX_STDOUT_PATH"

View File

@@ -0,0 +1,92 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
LOCAL_ENV_FILE="${LOCAL_ENV_FILE:-$PROJECT_ROOT/automation/wsl.local.env}"
if [[ -f "$LOCAL_ENV_FILE" ]]; then
# shellcheck disable=SC1090
source "$LOCAL_ENV_FILE"
fi
TASK_FILE="${1:?task file is required}"
REPORT_FILE="${2:?report file is required}"
GLM_MODEL="${GLM_MODEL:-glm-5}"
GLM_AGENTS_FILE="${GLM_AGENTS_FILE:-$PROJECT_ROOT/automation/glm_agents.team.json}"
export ANTHROPIC_BASE_URL="${ANTHROPIC_BASE_URL:-https://coding-intl.dashscope.aliyuncs.com/apps/anthropic}"
export ANTHROPIC_AUTH_TOKEN="${ANTHROPIC_AUTH_TOKEN:?ANTHROPIC_AUTH_TOKEN is required}"
export CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC="1"
export ANTHROPIC_MODEL="$GLM_MODEL"
export ANTHROPIC_SMALL_FAST_MODEL="$GLM_MODEL"
export ANTHROPIC_DEFAULT_HAIKU_MODEL="$GLM_MODEL"
export ANTHROPIC_DEFAULT_SONNET_MODEL="$GLM_MODEL"
export ANTHROPIC_DEFAULT_OPUS_MODEL="$GLM_MODEL"
RUN_DIR="$PROJECT_ROOT/automation/runs/glm_$(date +%Y%m%d_%H%M%S)"
STDOUT_PATH="$RUN_DIR/glm_stdout.txt"
mkdir -p "$RUN_DIR" "$(dirname "$REPORT_FILE")"
notify() {
"$SCRIPT_DIR/send_telegram.sh" "$1" || true
}
PROMPT=$(cat <<EOF
You are running as the GLM worker on this Linux repository.
Repository root:
$PROJECT_ROOT
Task file to follow exactly:
$TASK_FILE
You must:
1. Read the task markdown and implement the requested changes in the repository.
2. Run the validations requested by the task.
3. Create or overwrite this report file with a truthful report:
$REPORT_FILE
4. Do not overclaim. If something is incomplete, say so explicitly in the report.
5. Keep the diff focused.
6. If custom agents are available, use them aggressively and in parallel where safe:
- planner first
- implementer_core and implementer_aux for disjoint work
- validator before finishing
- retrieval_reviewer or runtime_guard when relevant
- reporter last
Open and follow the task markdown from disk instead of asking for the task again.
EOF
)
ARGS=(
-p
--dangerously-skip-permissions
--effort max
--model "$GLM_MODEL"
--add-dir "$PROJECT_ROOT"
)
if [[ -f "$GLM_AGENTS_FILE" ]]; then
AGENTS_JSON="$(cat "$GLM_AGENTS_FILE")"
ARGS+=(--agents "$AGENTS_JSON")
fi
notify "GLM worker started: $(basename "$TASK_FILE")"
if ! printf '%s\n' "$PROMPT" | claude "${ARGS[@]}" 2>&1 | tee "$STDOUT_PATH"; then
notify "GLM worker failed: $(basename "$TASK_FILE")"
exit 1
fi
if [[ ! -f "$REPORT_FILE" ]]; then
notify "GLM worker failed: missing report for $(basename "$TASK_FILE")"
echo "missing report: $REPORT_FILE" >&2
exit 1
fi
notify "GLM worker finished: $(basename "$TASK_FILE")"
echo "GLM cycle finished"
echo "Task: $TASK_FILE"
echo "Report: $REPORT_FILE"
echo "Stdout: $STDOUT_PATH"

View File

@@ -0,0 +1,106 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
QUEUE_FILE="${QUEUE_FILE:-$PROJECT_ROOT/automation/task_queue.json}"
LOCAL_ENV_FILE="${LOCAL_ENV_FILE:-$PROJECT_ROOT/automation/wsl.local.env}"
POLL_SECONDS="${POLL_SECONDS:-30}"
WATCH="${WATCH:-1}"
CONTINUE_ON_ERROR="${CONTINUE_ON_ERROR:-1}"
if [[ -f "$LOCAL_ENV_FILE" ]]; then
# shellcheck disable=SC1090
source "$LOCAL_ENV_FILE"
fi
notify() {
"$SCRIPT_DIR/send_telegram.sh" "$1" || true
}
queue_has_pending() {
jq -e '.tasks[] | select(.enabled == true and .status == "pending")' "$QUEUE_FILE" >/dev/null
}
read_next_task() {
jq -r '.tasks[] | select(.enabled == true and .status == "pending") | @base64' "$QUEUE_FILE" | head -n 1
}
update_task_status() {
local task_id="$1"
local status="$2"
local field="$3"
local value="$4"
local tmp
local queue_dir
queue_dir="$(dirname "$QUEUE_FILE")"
tmp="$(mktemp "$queue_dir/.task_queue.tmp.XXXXXX")"
jq --arg id "$task_id" --arg status "$status" --arg field "$field" --arg value "$value" '
.tasks |= map(
if .id == $id then
.status = $status | .[$field] = $value
else
.
end
)' "$QUEUE_FILE" > "$tmp"
mv "$tmp" "$QUEUE_FILE"
}
set_task_error() {
local task_id="$1"
local message="$2"
local tmp
local queue_dir
queue_dir="$(dirname "$QUEUE_FILE")"
tmp="$(mktemp "$queue_dir/.task_queue.tmp.XXXXXX")"
jq --arg id "$task_id" --arg msg "$message" '
.tasks |= map(
if .id == $id then
.error = $msg
else
.
end
)' "$QUEUE_FILE" > "$tmp"
mv "$tmp" "$QUEUE_FILE"
}
notify "AbletonMCP_AI queue runner started on $(date '+%Y-%m-%d %H:%M:%S')"
while true; do
if ! queue_has_pending; then
if [[ "$WATCH" == "1" ]]; then
sleep "$POLL_SECONDS"
continue
fi
break
fi
task_b64="$(read_next_task)"
if [[ -z "$task_b64" ]]; then
sleep "$POLL_SECONDS"
continue
fi
task_json="$(printf '%s' "$task_b64" | base64 -d)"
task_id="$(printf '%s' "$task_json" | jq -r '.id')"
task_title="$(printf '%s' "$task_json" | jq -r '.title')"
task_file_rel="$(printf '%s' "$task_json" | jq -r '.task_file')"
report_file_rel="$(printf '%s' "$task_json" | jq -r '.report_file')"
task_file="$PROJECT_ROOT/${task_file_rel//\\//}"
report_file="$PROJECT_ROOT/${report_file_rel//\\//}"
update_task_status "$task_id" "running" "started_at" "$(date -Iseconds)"
notify "Queue task started: [$task_id] $task_title"
if "$SCRIPT_DIR/run_glm_codex_loop.sh" "$task_file" "$report_file"; then
update_task_status "$task_id" "completed" "completed_at" "$(date -Iseconds)"
notify "Queue task completed: [$task_id] $task_title"
else
update_task_status "$task_id" "failed" "failed_at" "$(date -Iseconds)"
set_task_error "$task_id" "task runner failed"
notify "Queue task failed: [$task_id] $task_title"
if [[ "$CONTINUE_ON_ERROR" != "1" ]]; then
exit 1
fi
fi
done

View File

@@ -0,0 +1,281 @@
#!/usr/bin/env bash
#
# install.sh - Install Docker, Docker Compose, and local Python runtime on Ubuntu 24.04 WSL2
# Idempotent: safe to run multiple times
#
set -euo pipefail
readonly RED='\033[0;31m'
readonly GREEN='\033[0;32m'
readonly YELLOW='\033[1;33m'
readonly NC='\033[0m'
log_info() { echo -e "${GREEN}[INFO]${NC} $*"; }
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
log_error() { echo -e "${RED}[ERROR]${NC} $*"; }
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
WSL_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
AUTOMATION_DIR="$(cd "$WSL_DIR/.." && pwd)"
PROJECT_ROOT="$(cd "$AUTOMATION_DIR/.." && pwd)"
RUNTIME_DIR="$AUTOMATION_DIR/wsl_runtime"
VENV_DIR="$RUNTIME_DIR/venv"
check_sudo() {
if [[ $EUID -eq 0 ]]; then
log_error "This script should not be run as root. It will use sudo when needed."
exit 1
fi
}
detect_ubuntu() {
if [[ ! -f /etc/os-release ]]; then
log_error "Cannot detect OS version. /etc/os-release not found."
exit 1
fi
# shellcheck disable=SC1091
source /etc/os-release
if [[ "${ID:-}" != "ubuntu" ]]; then
log_warn "This script is designed for Ubuntu. Detected: ${ID:-unknown}"
fi
log_info "Detected Ubuntu ${VERSION_ID:-unknown}"
}
check_wsl2() {
if [[ ! -f /proc/version ]]; then
log_warn "Cannot verify WSL environment"
return
fi
if grep -qi microsoft /proc/version; then
log_info "Running in WSL environment"
else
log_warn "Not running in WSL. This script is designed for WSL2."
fi
}
install_docker() {
log_info "Checking Docker installation..."
if command -v docker >/dev/null 2>&1; then
log_info "Docker already installed: $(docker --version)"
else
log_info "Installing Docker..."
sudo apt-get update -q
sudo apt-get install -y \
ca-certificates \
curl \
gnupg \
lsb-release \
software-properties-common
sudo install -m 0755 -d /etc/apt/keyrings
if [[ ! -f /etc/apt/keyrings/docker.gpg ]]; then
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg
sudo chmod a+r /etc/apt/keyrings/docker.gpg
fi
local codename
codename=$(. /etc/os-release && echo "$VERSION_CODENAME")
sudo tee /etc/apt/sources.list.d/docker.list >/dev/null <<EOF
deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu $codename stable
EOF
sudo apt-get update -q
sudo apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
fi
if ! groups "$USER" | grep -q '\bdocker\b'; then
log_info "Adding user $USER to docker group..."
sudo usermod -aG docker "$USER"
log_warn "A new login session may be needed for docker group membership."
fi
sudo systemctl enable docker
sudo systemctl start docker
}
install_python() {
log_info "Checking Python installation..."
if command -v python3 >/dev/null 2>&1; then
log_info "Python already installed: $(python3 --version)"
else
sudo apt-get update -q
sudo apt-get install -y python3 python3-pip python3-venv python3-full
fi
}
install_utilities() {
log_info "Installing system utilities..."
sudo apt-get update -q
sudo apt-get install -y \
jq \
git \
curl \
wget \
rsync \
net-tools \
dnsutils \
htop \
ncdu \
tree \
unzip \
zip \
httpie \
python3-rich \
pipx
}
configure_docker_wsl2() {
log_info "Configuring Docker for WSL..."
local docker_config_dir="/etc/docker"
local docker_config_file="$docker_config_dir/daemon.json"
if [[ ! -f "$docker_config_file" ]]; then
sudo mkdir -p "$docker_config_dir"
sudo tee "$docker_config_file" >/dev/null <<'EOF'
{
"log-driver": "json-file",
"log-opts": {
"max-size": "10m",
"max-file": "3"
},
"features": {
"containerd-snapshotter": true
},
"iptables": false
}
EOF
sudo systemctl restart docker
fi
local bashrc_file="$HOME/.bashrc"
if ! grep -q 'WSL Docker helpers' "$bashrc_file" 2>/dev/null; then
cat >> "$bashrc_file" <<'EOF'
# WSL Docker helpers
export DOCKER_HOST=unix:///var/run/docker.sock
EOF
fi
}
handle_windows_paths() {
log_info "Ensuring project symlink exists..."
if [[ ! -L "$HOME/ableton-mcp-ai" ]]; then
ln -sfn "$PROJECT_ROOT" "$HOME/ableton-mcp-ai"
fi
}
install_python_dependencies() {
log_info "Preparing local virtual environment..."
mkdir -p "$RUNTIME_DIR"
if [[ ! -d "$VENV_DIR" ]]; then
python3 -m venv "$VENV_DIR"
fi
# shellcheck disable=SC1091
source "$VENV_DIR/bin/activate"
python -m pip install --upgrade pip
local found_req=false
local requirements_files=(
"$PROJECT_ROOT/MCP_Server/requirements.txt"
"$PROJECT_ROOT/requirements.txt"
)
for req_file in "${requirements_files[@]}"; do
if [[ -f "$req_file" ]]; then
log_info "Installing dependencies from: $req_file"
python -m pip install -r "$req_file"
found_req=true
fi
done
if [[ "$found_req" == "false" ]]; then
log_warn "No requirements.txt files found"
fi
deactivate
}
verify_installation() {
log_info "Verifying installation..."
local all_good=true
if command -v docker >/dev/null 2>&1; then
log_info "OK Docker: $(docker --version)"
else
log_error "FAIL Docker not found"
all_good=false
fi
if docker compose version >/dev/null 2>&1; then
log_info "OK Docker Compose: $(docker compose version)"
else
log_error "FAIL Docker Compose not found"
all_good=false
fi
if command -v python3 >/dev/null 2>&1; then
log_info "OK Python: $(python3 --version)"
else
log_error "FAIL Python3 not found"
all_good=false
fi
if [[ -x "$VENV_DIR/bin/python" ]]; then
log_info "OK Venv: $VENV_DIR"
else
log_error "FAIL Venv not found at $VENV_DIR"
all_good=false
fi
if command -v jq >/dev/null 2>&1; then
log_info "OK jq installed"
else
log_error "FAIL jq not found"
all_good=false
fi
if [[ "$all_good" == "true" ]]; then
log_info "All dependencies installed successfully"
return 0
fi
log_error "Some dependencies failed to install"
return 1
}
main() {
log_info "Starting AbletonMCP-AI WSL installation..."
echo
check_sudo
detect_ubuntu
check_wsl2
echo
install_docker
install_python
install_utilities
configure_docker_wsl2
handle_windows_paths
install_python_dependencies
echo
verify_installation
echo
log_info "Installation complete"
log_info "Next step: run ./setup.sh and then ./start.sh"
}
main "$@"

View File

@@ -0,0 +1,21 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
WSL_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
SYSTEMD_DIR="$WSL_DIR/systemd"
if [[ $EUID -ne 0 ]]; then
echo "Run with sudo"
exit 1
fi
for service_file in "$SYSTEMD_DIR"/*.service; do
cp "$service_file" /etc/systemd/system/"$(basename "$service_file")"
done
systemctl daemon-reload
systemctl enable abletonmcp-stack.service abletonmcp-queue-runner.service
echo "Installed systemd units"
echo "Enabled by default: abletonmcp-stack.service, abletonmcp-queue-runner.service"
echo "Optional unit left disabled: abletonmcp-glm-runner.service"

View File

@@ -0,0 +1,39 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
WSL_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
AUTOMATION_DIR="$(cd "$WSL_DIR/.." && pwd)"
DOCKER_ENV_FILE="$WSL_DIR/.env"
COMPOSE_FILE="$WSL_DIR/docker-compose.yml"
LOGS_DIR="$AUTOMATION_DIR/wsl_runtime/logs"
follow="${1:-all}"
compose_cmd() {
docker compose --env-file "$DOCKER_ENV_FILE" -f "$COMPOSE_FILE" "$@"
}
case "$follow" in
docker)
compose_cmd logs -f
;;
queue)
tail -f "$LOGS_DIR/queue-runner.log"
;;
all)
compose_cmd logs -f &
docker_pid=$!
if [[ -f "$LOGS_DIR/queue-runner.log" ]]; then
tail -f "$LOGS_DIR/queue-runner.log" &
tail_pid=$!
wait "$docker_pid" "$tail_pid"
else
wait "$docker_pid"
fi
;;
*)
echo "Usage: $0 [all|docker|queue]"
exit 1
;;
esac

View File

@@ -0,0 +1,8 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
"$SCRIPT_DIR/stop.sh"
sleep 2
"$SCRIPT_DIR/start.sh"

View File

@@ -0,0 +1,140 @@
#!/usr/bin/env bash
set -euo pipefail
readonly RED='\033[0;31m'
readonly GREEN='\033[0;32m'
readonly YELLOW='\033[1;33m'
readonly BLUE='\033[0;34m'
readonly NC='\033[0m'
log_info() { echo -e "${GREEN}[INFO]${NC} $*"; }
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
log_step() { echo -e "${BLUE}[STEP]${NC} $*"; }
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
WSL_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
AUTOMATION_DIR="$(cd "$WSL_DIR/.." && pwd)"
PROJECT_ROOT="$(cd "$AUTOMATION_DIR/.." && pwd)"
RUNTIME_DIR="$AUTOMATION_DIR/wsl_runtime"
DOCKER_ENV_FILE="$WSL_DIR/.env"
RUNNER_ENV_FILE="$AUTOMATION_DIR/wsl.local.env"
PROJECT_LINK="$HOME/ableton-mcp-ai"
generate_secret() {
openssl rand -hex "${1:-16}" 2>/dev/null || python3 - <<'PY'
import secrets
print(secrets.token_hex(16))
PY
}
ensure_dirs() {
log_step "Creating runtime directories"
mkdir -p \
"$RUNTIME_DIR/logs" \
"$RUNTIME_DIR/pids" \
"$RUNTIME_DIR/data" \
"$AUTOMATION_DIR/reports" \
"$AUTOMATION_DIR/runs" \
"$AUTOMATION_DIR/tasks" \
"$AUTOMATION_DIR/workflows" \
"$WSL_DIR/initdb"
}
ensure_symlink() {
if [[ ! -L "$PROJECT_LINK" ]]; then
ln -sfn "$PROJECT_ROOT" "$PROJECT_LINK"
fi
log_info "Project link: $PROJECT_LINK"
}
write_docker_env() {
if [[ -f "$DOCKER_ENV_FILE" ]]; then
log_info "Docker env already exists: $DOCKER_ENV_FILE"
return
fi
log_step "Generating docker env"
cat > "$DOCKER_ENV_FILE" <<EOF
PROJECT_PATH='$PROJECT_ROOT'
TZ='America/Buenos_Aires'
POSTGRES_USER='postgres'
POSTGRES_PASSWORD='$(generate_secret 16)'
POSTGRES_BOOTSTRAP_DB='postgres'
POSTGRES_PORT='5432'
GITEA_DOMAIN='localhost'
GITEA_ROOT_URL='http://localhost:3000'
GITEA_HTTP_PORT='3000'
GITEA_SSH_DOMAIN='localhost'
GITEA_SSH_PORT='222'
GITEA_ADMIN_USER='giteaadmin'
GITEA_ADMIN_PASSWORD='$(generate_secret 16)'
GITEA_ADMIN_EMAIL='admin@localhost'
GITEA_DB_NAME='gitea'
GITEA_SECURITY_INSTALL_LOCK='true'
GITEA_OFFLINE_MODE='true'
REDIS_PASSWORD='$(generate_secret 16)'
REDIS_PORT='6379'
N8N_HOST='localhost'
N8N_PORT='5678'
N8N_PATH='/'
N8N_WEBHOOK_URL='http://localhost:5678/'
N8N_EDITOR_BASE_URL='http://localhost:5678'
N8N_DB_NAME='n8n'
N8N_ENCRYPTION_KEY='$(generate_secret 32)'
N8N_BASIC_AUTH_ACTIVE='true'
N8N_BASIC_AUTH_USER='admin'
N8N_BASIC_AUTH_PASSWORD='$(generate_secret 16)'
N8N_HOST_ALLOW_LIST='localhost,127.0.0.1'
N8N_EXECUTIONS_MODE='regular'
N8N_LOG_LEVEL='info'
N8N_DIAGNOSTICS_ENABLED='false'
N8N_VERSION_NOTIFICATIONS_ENABLED='false'
N8N_COOKIE_POLICY='lax'
COMPOSE_PROJECT_NAME='abletonmcp'
EOF
chmod 600 "$DOCKER_ENV_FILE"
}
ensure_runner_env() {
if [[ -f "$RUNNER_ENV_FILE" ]]; then
log_info "Runner env already exists: $RUNNER_ENV_FILE"
return
fi
log_step "Generating runner env"
cat > "$RUNNER_ENV_FILE" <<EOF
export ANTHROPIC_BASE_URL=''
export ANTHROPIC_AUTH_TOKEN=''
export GLM_MODEL='glm-5'
export GLM_API_KEY=''
export CODEX_MODEL='gpt-5.4'
export TELEGRAM_BOT_TOKEN=''
export TELEGRAM_CHAT_ID=''
export CODEX_HOME='$AUTOMATION_DIR/wsl_runtime/codex_home'
export GLM_AGENTS_FILE='$AUTOMATION_DIR/glm_agents.team.json'
export POLL_SECONDS='30'
export WATCH='1'
export CONTINUE_ON_ERROR='1'
EOF
chmod 600 "$RUNNER_ENV_FILE"
}
main() {
log_info "Preparing AbletonMCP_AI WSL stack"
ensure_dirs
ensure_symlink
write_docker_env
ensure_runner_env
echo
log_info "Files ready:"
echo " - $DOCKER_ENV_FILE"
echo " - $RUNNER_ENV_FILE"
echo
log_info "Next:"
echo " 1. Review tokens in $RUNNER_ENV_FILE"
echo " 2. Review service passwords in $DOCKER_ENV_FILE"
echo " 3. Run ./install.sh if Docker is not installed"
echo " 4. Run ./start.sh"
}
main "$@"

View File

@@ -0,0 +1,143 @@
#!/usr/bin/env bash
set -euo pipefail
readonly RED='\033[0;31m'
readonly GREEN='\033[0;32m'
readonly YELLOW='\033[1;33m'
readonly BLUE='\033[0;34m'
readonly NC='\033[0m'
log_info() { echo -e "${GREEN}[INFO]${NC} $*"; }
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
log_error() { echo -e "${RED}[ERROR]${NC} $*"; }
log_step() { echo -e "${BLUE}[STEP]${NC} $*"; }
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
WSL_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
AUTOMATION_DIR="$(cd "$WSL_DIR/.." && pwd)"
PROJECT_ROOT="$(cd "$AUTOMATION_DIR/.." && pwd)"
RUNNER_ENV_FILE="$AUTOMATION_DIR/wsl.local.env"
DOCKER_ENV_FILE="$WSL_DIR/.env"
COMPOSE_FILE="$WSL_DIR/docker-compose.yml"
RUNTIME_DIR="$AUTOMATION_DIR/wsl_runtime"
LOGS_DIR="$RUNTIME_DIR/logs"
PID_DIR="$RUNTIME_DIR/pids"
START_QUEUE_RUNNER="${START_QUEUE_RUNNER:-1}"
mkdir -p "$LOGS_DIR" "$PID_DIR"
if [[ -f "$RUNNER_ENV_FILE" ]]; then
# shellcheck disable=SC1090
source "$RUNNER_ENV_FILE"
fi
if [[ -f "$DOCKER_ENV_FILE" ]]; then
# shellcheck disable=SC1090
set -a
source "$DOCKER_ENV_FILE"
set +a
fi
compose_cmd() {
docker compose --env-file "$DOCKER_ENV_FILE" -f "$COMPOSE_FILE" "$@"
}
check_prerequisites() {
log_step "Checking prerequisites"
command -v docker >/dev/null || { log_error "Docker is not installed"; exit 1; }
docker compose version >/dev/null || { log_error "Docker Compose plugin is not available"; exit 1; }
docker info >/dev/null || { log_error "Docker daemon is not running"; exit 1; }
[[ -f "$DOCKER_ENV_FILE" ]] || { log_error "Missing docker env: $DOCKER_ENV_FILE"; exit 1; }
[[ -f "$COMPOSE_FILE" ]] || { log_error "Missing compose file: $COMPOSE_FILE"; exit 1; }
}
wait_for_postgres() {
log_info "Waiting for PostgreSQL"
for _ in $(seq 1 60); do
if compose_cmd exec -T postgres pg_isready -U "${POSTGRES_USER:-postgres}" -d "${POSTGRES_BOOTSTRAP_DB:-postgres}" >/dev/null 2>&1; then
return 0
fi
sleep 2
done
log_error "PostgreSQL did not become ready in time"
exit 1
}
wait_for_service_http() {
local service="$1"
local url="$2"
log_info "Waiting for $service"
for _ in $(seq 1 60); do
if curl -fsS "$url" >/dev/null 2>&1; then
return 0
fi
sleep 2
done
log_warn "$service is not healthy yet: $url"
return 1
}
ensure_database() {
local db_name="$1"
if compose_cmd exec -T postgres psql -U "${POSTGRES_USER:-postgres}" -d "${POSTGRES_BOOTSTRAP_DB:-postgres}" -tAc "SELECT 1 FROM pg_database WHERE datname='${db_name}'" | grep -q 1; then
return 0
fi
compose_cmd exec -T postgres psql -U "${POSTGRES_USER:-postgres}" -d "${POSTGRES_BOOTSTRAP_DB:-postgres}" -c "CREATE DATABASE \"${db_name}\""
}
ensure_gitea_admin() {
local user="${GITEA_ADMIN_USER:-giteaadmin}"
local password="${GITEA_ADMIN_PASSWORD:-changeme}"
local email="${GITEA_ADMIN_EMAIL:-admin@localhost}"
if compose_cmd exec -T gitea sh -c "HOME=/tmp /usr/local/bin/gitea admin user list 2>/dev/null | awk 'NR > 1 && \$2 == \"${user}\" { found=1 } END { exit found ? 0 : 1 }'"; then
return 0
fi
compose_cmd exec -T gitea sh -c "HOME=/tmp /usr/local/bin/gitea admin user create --admin --username '${user}' --password '${password}' --email '${email}' --must-change-password=false" >/dev/null 2>&1 || log_warn "Could not auto-create Gitea admin user; complete first-run in UI if needed"
}
start_docker_stack() {
log_step "Starting Docker services"
compose_cmd up -d postgres redis
wait_for_postgres
ensure_database "${GITEA_DB_NAME:-gitea}"
ensure_database "${N8N_DB_NAME:-n8n}"
compose_cmd up -d gitea n8n
wait_for_service_http "Gitea" "http://localhost:${GITEA_HTTP_PORT:-3000}/api/healthz" || true
wait_for_service_http "n8n" "http://localhost:${N8N_PORT:-5678}/healthz" || true
ensure_gitea_admin
}
start_queue_runner() {
if [[ "$START_QUEUE_RUNNER" != "1" ]]; then
log_info "Queue runner startup skipped by START_QUEUE_RUNNER=$START_QUEUE_RUNNER"
return
fi
if command -v systemctl >/dev/null 2>&1 && systemctl is-active abletonmcp-queue-runner.service >/dev/null 2>&1; then
log_info "Queue runner already managed by systemd"
return
fi
local pid_file="$PID_DIR/queue-runner.pid"
if [[ -f "$pid_file" ]] && kill -0 "$(cat "$pid_file")" 2>/dev/null; then
log_info "Queue runner already running"
return
fi
log_step "Starting autonomous queue runner"
nohup bash "$WSL_DIR/run_task_queue.sh" > "$LOGS_DIR/queue-runner.log" 2>&1 &
echo $! > "$pid_file"
log_info "Queue runner PID: $(cat "$pid_file")"
}
main() {
check_prerequisites
start_docker_stack
start_queue_runner
echo
log_info "Stack started"
echo " Gitea: http://localhost:${GITEA_HTTP_PORT:-3000}"
echo " n8n: http://localhost:${N8N_PORT:-5678}"
}
main "$@"

View File

@@ -0,0 +1,58 @@
#!/usr/bin/env bash
set -euo pipefail
readonly GREEN='\033[0;32m'
readonly YELLOW='\033[1;33m'
readonly BLUE='\033[0;34m'
readonly RED='\033[0;31m'
readonly NC='\033[0m'
ok() { echo -e "${GREEN}OK${NC} $*"; }
warn() { echo -e "${YELLOW}WARN${NC} $*"; }
fail() { echo -e "${RED}FAIL${NC} $*"; }
step() { echo -e "${BLUE}$*${NC}"; }
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
WSL_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
AUTOMATION_DIR="$(cd "$WSL_DIR/.." && pwd)"
DOCKER_ENV_FILE="$WSL_DIR/.env"
COMPOSE_FILE="$WSL_DIR/docker-compose.yml"
PID_DIR="$AUTOMATION_DIR/wsl_runtime/pids"
LOGS_DIR="$AUTOMATION_DIR/wsl_runtime/logs"
compose_cmd() {
docker compose --env-file "$DOCKER_ENV_FILE" -f "$COMPOSE_FILE" "$@"
}
step "Docker"
if command -v docker >/dev/null 2>&1 && docker info >/dev/null 2>&1; then
ok "docker daemon running"
else
fail "docker daemon unavailable"
fi
echo
step "Compose services"
if command -v docker >/dev/null 2>&1 && [[ -f "$COMPOSE_FILE" ]]; then
compose_cmd ps || true
else
warn "compose file or docker missing"
fi
echo
step "Queue runner"
if [[ -f "$PID_DIR/queue-runner.pid" ]] && kill -0 "$(cat "$PID_DIR/queue-runner.pid")" 2>/dev/null; then
ok "queue runner PID $(cat "$PID_DIR/queue-runner.pid")"
elif command -v systemctl >/dev/null 2>&1 && systemctl is-active abletonmcp-queue-runner.service >/dev/null 2>&1; then
ok "queue runner managed by systemd"
else
warn "queue runner not running"
fi
echo
step "Logs"
if [[ -d "$LOGS_DIR" ]]; then
ls -1 "$LOGS_DIR" | sed 's/^/ - /'
else
warn "no logs directory"
fi

View File

@@ -0,0 +1,52 @@
#!/usr/bin/env bash
set -euo pipefail
readonly GREEN='\033[0;32m'
readonly YELLOW='\033[1;33m'
readonly BLUE='\033[0;34m'
readonly NC='\033[0m'
log_info() { echo -e "${GREEN}[INFO]${NC} $*"; }
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
log_step() { echo -e "${BLUE}[STEP]${NC} $*"; }
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
WSL_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
AUTOMATION_DIR="$(cd "$WSL_DIR/.." && pwd)"
DOCKER_ENV_FILE="$WSL_DIR/.env"
COMPOSE_FILE="$WSL_DIR/docker-compose.yml"
PID_DIR="$AUTOMATION_DIR/wsl_runtime/pids"
compose_cmd() {
docker compose --env-file "$DOCKER_ENV_FILE" -f "$COMPOSE_FILE" "$@"
}
stop_runner() {
local pid_file="$1"
if [[ ! -f "$pid_file" ]]; then
return
fi
local pid
pid="$(cat "$pid_file")"
if kill -0 "$pid" 2>/dev/null; then
kill -TERM "$pid" 2>/dev/null || true
sleep 2
kill -KILL "$pid" 2>/dev/null || true
fi
rm -f "$pid_file"
}
main() {
log_step "Stopping queue runner"
stop_runner "$PID_DIR/queue-runner.pid"
echo
log_step "Stopping Docker services"
if command -v docker >/dev/null 2>&1; then
compose_cmd down "$@" || true
else
log_warn "Docker not installed"
fi
log_info "Stack stopped"
}
main "$@"

View File

@@ -0,0 +1,28 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
LOCAL_ENV_FILE="${LOCAL_ENV_FILE:-$PROJECT_ROOT/automation/wsl.local.env}"
if [[ -f "$LOCAL_ENV_FILE" ]]; then
# shellcheck disable=SC1090
source "$LOCAL_ENV_FILE"
fi
MESSAGE="${1:-}"
if [[ -z "$MESSAGE" ]]; then
exit 0
fi
BOT_TOKEN="${TELEGRAM_BOT_TOKEN:-}"
CHAT_ID="${TELEGRAM_CHAT_ID:-}"
if [[ -z "$BOT_TOKEN" || -z "$CHAT_ID" ]]; then
exit 0
fi
curl -fsS -X POST "https://api.telegram.org/bot${BOT_TOKEN}/sendMessage" \
--data-urlencode "chat_id=${CHAT_ID}" \
--data-urlencode "text=${MESSAGE}" \
--data "disable_web_page_preview=true" >/dev/null