Initial commit: AbletonMCP-AI complete system
- MCP Server with audio fallback, sample management - Song generator with bus routing - Reference listener and audio resampler - Vector-based sample search - Master chain with limiter and calibration - Fix: Audio fallback now works without M4L - Fix: Full song detection in sample loader Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"implementer": {
|
||||
"description": "Implements the requested code changes with minimal diff.",
|
||||
"prompt": "You are a focused implementation worker. Make the requested code changes, keep the diff small, and do not overclaim."
|
||||
},
|
||||
"verifier": {
|
||||
"description": "Runs validations and checks whether the claimed work is actually complete.",
|
||||
"prompt": "You are a strict verifier. Run the requested validations, compare code against claims, and report gaps clearly."
|
||||
},
|
||||
"reporter": {
|
||||
"description": "Writes the final worker report truthfully.",
|
||||
"prompt": "You are a truthful technical reporter. Summarize only what was actually changed and verified."
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"planner": {
|
||||
"description": "Breaks the task into a small, realistic execution plan and identifies the critical path.",
|
||||
"prompt": "You are the planning agent. Read the task, identify the minimum safe plan, and tell the team what to implement first. Keep the plan concrete and short."
|
||||
},
|
||||
"implementer_core": {
|
||||
"description": "Implements the main code changes with a minimal diff.",
|
||||
"prompt": "You are the core implementation agent. Make the requested code changes with the smallest coherent diff. Do not overclaim."
|
||||
},
|
||||
"implementer_aux": {
|
||||
"description": "Implements helper scripts, manifests, reports, and offline tooling.",
|
||||
"prompt": "You are the auxiliary implementation agent. Focus on CLI helpers, manifests, reports, and utility scripts. Keep changes isolated."
|
||||
},
|
||||
"validator": {
|
||||
"description": "Runs validations and checks whether the implementation actually works.",
|
||||
"prompt": "You are the validation agent. Run the required validations, inspect failures carefully, and report only what really passed."
|
||||
},
|
||||
"retrieval_reviewer": {
|
||||
"description": "Reviews retrieval/indexing logic for role contamination, cache compatibility, and data-shape issues.",
|
||||
"prompt": "You are the retrieval reviewer. Inspect role safety, cache compatibility, manifests, and offline retrieval quality. Flag contamination and schema mismatches."
|
||||
},
|
||||
"runtime_guard": {
|
||||
"description": "Protects the Ableton runtime and blocks risky unrelated changes.",
|
||||
"prompt": "You are the runtime guard. Prevent unnecessary edits to the Remote Script, runtime socket behavior, or generation path when the task does not require it."
|
||||
},
|
||||
"reporter": {
|
||||
"description": "Writes the final task report truthfully and concisely.",
|
||||
"prompt": "You are the reporting agent. Write a technical report that only claims what was truly changed and verified."
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
param(
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$TaskFile,
|
||||
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$ReportFile,
|
||||
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$ProjectRoot,
|
||||
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$OutputFile,
|
||||
|
||||
[string]$CodexModel = ""
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
function Resolve-CodexCommand() {
|
||||
$cmd = Get-Command "codex.cmd" -ErrorAction SilentlyContinue
|
||||
if ($cmd) {
|
||||
return $cmd.Source
|
||||
}
|
||||
|
||||
$fallback = Get-Command "codex" -ErrorAction SilentlyContinue
|
||||
if ($fallback) {
|
||||
return $fallback.Source
|
||||
}
|
||||
|
||||
throw "Command not found: codex"
|
||||
}
|
||||
|
||||
$taskPath = (Resolve-Path -LiteralPath $TaskFile).Path
|
||||
$reportPath = (Resolve-Path -LiteralPath $ReportFile).Path
|
||||
$projectPath = (Resolve-Path -LiteralPath $ProjectRoot).Path
|
||||
$outputPath = [System.IO.Path]::GetFullPath($OutputFile)
|
||||
$codexCommand = Resolve-CodexCommand
|
||||
|
||||
$reviewPrompt = @"
|
||||
Read this worker task file:
|
||||
$taskPath
|
||||
|
||||
Read this GLM report:
|
||||
$reportPath
|
||||
|
||||
Your job:
|
||||
1. Inspect the real diff in the repository.
|
||||
2. Verify whether GLM actually implemented what the report claims.
|
||||
3. Fix anything incorrect, incomplete, or unsafe.
|
||||
4. Run the relevant validations mentioned by the task/report.
|
||||
5. Leave the repository in the best corrected state you can reach in one pass.
|
||||
6. Write a concise final summary to the output file configured by the CLI.
|
||||
|
||||
Be strict about overclaims. The code is the source of truth, not the report.
|
||||
"@
|
||||
|
||||
$codexArgs = @(
|
||||
"exec",
|
||||
"--dangerously-bypass-approvals-and-sandbox",
|
||||
"-C", $projectPath,
|
||||
"-o", $outputPath
|
||||
)
|
||||
|
||||
if (-not [string]::IsNullOrWhiteSpace($CodexModel)) {
|
||||
$codexArgs += @("-m", $CodexModel)
|
||||
}
|
||||
|
||||
$codexArgs += $reviewPrompt
|
||||
|
||||
$stdoutPath = [System.IO.Path]::Combine([System.IO.Path]::GetDirectoryName($outputPath), "codex_review_stdout.tmp.txt")
|
||||
$stderrPath = [System.IO.Path]::Combine([System.IO.Path]::GetDirectoryName($outputPath), "codex_review_stderr.tmp.txt")
|
||||
|
||||
if (Test-Path -LiteralPath $stdoutPath) { Remove-Item -LiteralPath $stdoutPath -Force }
|
||||
if (Test-Path -LiteralPath $stderrPath) { Remove-Item -LiteralPath $stderrPath -Force }
|
||||
|
||||
Push-Location $projectPath
|
||||
try {
|
||||
& $codexCommand @codexArgs 1> $stdoutPath 2> $stderrPath
|
||||
$exitCode = $LASTEXITCODE
|
||||
}
|
||||
finally {
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
if (Test-Path -LiteralPath $stdoutPath) {
|
||||
Get-Content -LiteralPath $stdoutPath
|
||||
}
|
||||
if (Test-Path -LiteralPath $stderrPath) {
|
||||
Get-Content -LiteralPath $stderrPath
|
||||
}
|
||||
|
||||
if ($exitCode -ne 0) {
|
||||
throw "Codex exited with code $exitCode"
|
||||
}
|
||||
@@ -0,0 +1,401 @@
|
||||
{
|
||||
"benchmark_info": {
|
||||
"library_dir": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\librerias\\all_tracks",
|
||||
"top_n": 3,
|
||||
"roles": [
|
||||
"kick",
|
||||
"snare",
|
||||
"hat",
|
||||
"bass_loop",
|
||||
"vocal_loop",
|
||||
"top_loop"
|
||||
],
|
||||
"timestamp": "2026-03-20T16:36:16",
|
||||
"device": "directml"
|
||||
},
|
||||
"references": [
|
||||
{
|
||||
"file_name": "Mr. Pauer, Goyo - Química (Video Oficial).mp3",
|
||||
"path": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\sample\\Mr. Pauer, Goyo - Química (Video Oficial).mp3",
|
||||
"analysis_time_seconds": 3.09,
|
||||
"reference_info": {
|
||||
"tempo": 123.047,
|
||||
"key": "Cm",
|
||||
"duration": 145.31,
|
||||
"rms_mean": 0.17201,
|
||||
"onset_mean": 1.956218,
|
||||
"spectral_centroid": 2465.478
|
||||
},
|
||||
"sections": [
|
||||
{
|
||||
"kind": "verse",
|
||||
"start": 0.0,
|
||||
"end": 14.954,
|
||||
"bars": 8
|
||||
},
|
||||
{
|
||||
"kind": "build",
|
||||
"start": 14.954,
|
||||
"end": 37.779,
|
||||
"bars": 12
|
||||
},
|
||||
{
|
||||
"kind": "verse",
|
||||
"start": 37.779,
|
||||
"end": 46.811,
|
||||
"bars": 5
|
||||
},
|
||||
{
|
||||
"kind": "verse",
|
||||
"start": 46.811,
|
||||
"end": 54.822,
|
||||
"bars": 4
|
||||
},
|
||||
{
|
||||
"kind": "drop",
|
||||
"start": 54.822,
|
||||
"end": 62.833,
|
||||
"bars": 4
|
||||
},
|
||||
{
|
||||
"kind": "build",
|
||||
"start": 62.833,
|
||||
"end": 70.844,
|
||||
"bars": 4
|
||||
},
|
||||
{
|
||||
"kind": "verse",
|
||||
"start": 70.844,
|
||||
"end": 92.415,
|
||||
"bars": 11
|
||||
},
|
||||
{
|
||||
"kind": "build",
|
||||
"start": 92.415,
|
||||
"end": 101.03,
|
||||
"bars": 4
|
||||
},
|
||||
{
|
||||
"kind": "verse",
|
||||
"start": 101.03,
|
||||
"end": 109.041,
|
||||
"bars": 4
|
||||
},
|
||||
{
|
||||
"kind": "build",
|
||||
"start": 109.041,
|
||||
"end": 117.098,
|
||||
"bars": 4
|
||||
},
|
||||
{
|
||||
"kind": "outro",
|
||||
"start": 117.098,
|
||||
"end": 125.109,
|
||||
"bars": 4
|
||||
},
|
||||
{
|
||||
"kind": "outro",
|
||||
"start": 125.109,
|
||||
"end": 133.422,
|
||||
"bars": 4
|
||||
},
|
||||
{
|
||||
"kind": "outro",
|
||||
"start": 133.422,
|
||||
"end": 141.433,
|
||||
"bars": 4
|
||||
}
|
||||
],
|
||||
"role_candidates": {
|
||||
"kick": {
|
||||
"total_available": 16,
|
||||
"top_candidates": [
|
||||
{
|
||||
"rank": 1,
|
||||
"file_name": "BBH - Primer Impacto - Kick 5.wav",
|
||||
"path": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\librerias\\all_tracks\\BBH - Primer Impacto - Kick 5.wav",
|
||||
"score": 0.658173,
|
||||
"cosine": 0.677478,
|
||||
"segment_score": 0.807539,
|
||||
"catalog_score": 0.540981,
|
||||
"tempo": 117.454,
|
||||
"key": "Gm",
|
||||
"duration": 0.5
|
||||
},
|
||||
{
|
||||
"rank": 2,
|
||||
"file_name": "BBH - Primer Impacto - Kick 1.wav",
|
||||
"path": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\librerias\\all_tracks\\BBH - Primer Impacto - Kick 1.wav",
|
||||
"score": 0.650067,
|
||||
"cosine": 0.633787,
|
||||
"segment_score": 0.771427,
|
||||
"catalog_score": 0.540981,
|
||||
"tempo": 117.454,
|
||||
"key": "Am",
|
||||
"duration": 0.5
|
||||
},
|
||||
{
|
||||
"rank": 3,
|
||||
"file_name": "BBH - Primer Impacto - Kick 8.wav",
|
||||
"path": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\librerias\\all_tracks\\BBH - Primer Impacto - Kick 8.wav",
|
||||
"score": 0.642297,
|
||||
"cosine": 0.689128,
|
||||
"segment_score": 0.809562,
|
||||
"catalog_score": 0.5,
|
||||
"tempo": 258.398,
|
||||
"key": "Fm",
|
||||
"duration": 0.484
|
||||
}
|
||||
]
|
||||
},
|
||||
"snare": {
|
||||
"total_available": 28,
|
||||
"top_candidates": [
|
||||
{
|
||||
"rank": 1,
|
||||
"file_name": "MT Clap & Snare Hit 05.wav",
|
||||
"path": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\librerias\\all_tracks\\MT Clap & Snare Hit 05.wav",
|
||||
"score": 0.642515,
|
||||
"cosine": 0.742869,
|
||||
"segment_score": 0.87862,
|
||||
"catalog_score": 0.529168,
|
||||
"tempo": 258.398,
|
||||
"key": "Dm",
|
||||
"duration": 0.72
|
||||
},
|
||||
{
|
||||
"rank": 2,
|
||||
"file_name": "MT Clap & Snare Hit 15.wav",
|
||||
"path": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\librerias\\all_tracks\\MT Clap & Snare Hit 15.wav",
|
||||
"score": 0.623005,
|
||||
"cosine": 0.754711,
|
||||
"segment_score": 0.800798,
|
||||
"catalog_score": 0.518602,
|
||||
"tempo": 234.908,
|
||||
"key": "Dm",
|
||||
"duration": 0.642
|
||||
},
|
||||
{
|
||||
"rank": 3,
|
||||
"file_name": "BBH - Primer Impacto - Clap 1.wav",
|
||||
"path": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\librerias\\all_tracks\\BBH - Primer Impacto - Clap 1.wav",
|
||||
"score": 0.621014,
|
||||
"cosine": 0.780775,
|
||||
"segment_score": 0.805699,
|
||||
"catalog_score": 0.528549,
|
||||
"tempo": 117.454,
|
||||
"key": "A#m",
|
||||
"duration": 0.545
|
||||
}
|
||||
]
|
||||
},
|
||||
"hat": {
|
||||
"total_available": 32,
|
||||
"top_candidates": [
|
||||
{
|
||||
"rank": 1,
|
||||
"file_name": "BBH - Primer Impacto - Open Hat 2.wav",
|
||||
"path": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\librerias\\all_tracks\\BBH - Primer Impacto - Open Hat 2.wav",
|
||||
"score": 0.602448,
|
||||
"cosine": 0.750913,
|
||||
"segment_score": 0.789455,
|
||||
"catalog_score": 0.539635,
|
||||
"tempo": 258.398,
|
||||
"key": "Cm",
|
||||
"duration": 0.625
|
||||
},
|
||||
{
|
||||
"rank": 2,
|
||||
"file_name": "BBH - Primer Impacto - Open Hat 9.wav",
|
||||
"path": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\librerias\\all_tracks\\BBH - Primer Impacto - Open Hat 9.wav",
|
||||
"score": 0.592739,
|
||||
"cosine": 0.764186,
|
||||
"segment_score": 0.682635,
|
||||
"catalog_score": 0.5,
|
||||
"tempo": 258.398,
|
||||
"key": "Gm",
|
||||
"duration": 0.38
|
||||
},
|
||||
{
|
||||
"rank": 3,
|
||||
"file_name": "MT Hat Hit 04.wav",
|
||||
"path": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\librerias\\all_tracks\\MT Hat Hit 04.wav",
|
||||
"score": 0.55811,
|
||||
"cosine": 0.747485,
|
||||
"segment_score": 0.747228,
|
||||
"catalog_score": 0.5,
|
||||
"tempo": 135.999,
|
||||
"key": "G",
|
||||
"duration": 0.233
|
||||
}
|
||||
]
|
||||
},
|
||||
"bass_loop": {
|
||||
"total_available": 37,
|
||||
"top_candidates": [
|
||||
{
|
||||
"rank": 1,
|
||||
"file_name": "Bass_Loop_03_G#m_125.wav",
|
||||
"path": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\librerias\\all_tracks\\Bass_Loop_03_G#m_125.wav",
|
||||
"score": 0.877488,
|
||||
"cosine": 0.803278,
|
||||
"segment_score": 0.883592,
|
||||
"catalog_score": 0.617711,
|
||||
"tempo": 123.047,
|
||||
"key": "Cm",
|
||||
"duration": 7.68
|
||||
},
|
||||
{
|
||||
"rank": 2,
|
||||
"file_name": "BBH - Primer Impacto - Bass Loop 06 Dmin.wav",
|
||||
"path": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\librerias\\all_tracks\\BBH - Primer Impacto - Bass Loop 06 Dmin.wav",
|
||||
"score": 0.82587,
|
||||
"cosine": 0.698374,
|
||||
"segment_score": 0.799662,
|
||||
"catalog_score": 0.890835,
|
||||
"tempo": 123.047,
|
||||
"key": "Dm",
|
||||
"duration": 3.84
|
||||
},
|
||||
{
|
||||
"rank": 3,
|
||||
"file_name": "Bass_Loop_05_Cm_125.wav",
|
||||
"path": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\librerias\\all_tracks\\Bass_Loop_05_Cm_125.wav",
|
||||
"score": 0.818811,
|
||||
"cosine": 0.695605,
|
||||
"segment_score": 0.883218,
|
||||
"catalog_score": 0.617711,
|
||||
"tempo": 63.024,
|
||||
"key": "C",
|
||||
"duration": 7.68
|
||||
}
|
||||
]
|
||||
},
|
||||
"vocal_loop": {
|
||||
"total_available": 24,
|
||||
"top_candidates": [
|
||||
{
|
||||
"rank": 1,
|
||||
"file_name": "MT Vocal Loop 12 125.wav",
|
||||
"path": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\librerias\\all_tracks\\MT Vocal Loop 12 125.wav",
|
||||
"score": 0.932334,
|
||||
"cosine": 0.827361,
|
||||
"segment_score": 0.923902,
|
||||
"catalog_score": 0.999437,
|
||||
"tempo": 123.047,
|
||||
"key": "D#",
|
||||
"duration": 1.92
|
||||
},
|
||||
{
|
||||
"rank": 2,
|
||||
"file_name": "MT Vocal Loop 11 125.wav",
|
||||
"path": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\librerias\\all_tracks\\MT Vocal Loop 11 125.wav",
|
||||
"score": 0.921701,
|
||||
"cosine": 0.832834,
|
||||
"segment_score": 0.920162,
|
||||
"catalog_score": 0.948909,
|
||||
"tempo": 123.047,
|
||||
"key": "D#m",
|
||||
"duration": 1.92
|
||||
},
|
||||
{
|
||||
"rank": 3,
|
||||
"file_name": "MT Vocal Loop 02 128.wav",
|
||||
"path": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\librerias\\all_tracks\\MT Vocal Loop 02 128.wav",
|
||||
"score": 0.862394,
|
||||
"cosine": 0.845787,
|
||||
"segment_score": 0.954025,
|
||||
"catalog_score": 0.882953,
|
||||
"tempo": 123.047,
|
||||
"key": "G#m",
|
||||
"duration": 3.75
|
||||
}
|
||||
]
|
||||
},
|
||||
"top_loop": {
|
||||
"total_available": 144,
|
||||
"top_candidates": [
|
||||
{
|
||||
"rank": 1,
|
||||
"file_name": "Top_Loop_11_Any_125.wav",
|
||||
"path": "C:\\ProgramData\\Ableton\\Live 12 Suite\\Resources\\MIDI Remote Scripts\\librerias\\all_tracks\\Top_Loop_11_Any_125.wav",
|
||||
"score": 0.906089,
|
||||
"cosine": 0.752537,
|
||||
"segment_score": 0.768995,
|
||||
"catalog_score": 0.859437,
|
||||
"tempo": 123.047,
|
||||
"key": "Cm",
|
||||
"duration": 7.68
|
||||
},
|
||||
{
|
||||
"rank": 2,
|
||||
"file_name": "drum_loop_21_am_125.wav",
|
||||
"path": "c:\\programdata\\ableton\\live 12 suite\\resources\\midi remote scripts\\librerias\\all_tracks\\drum_loop_21_am_125.wav",
|
||||
"score": 0.893566,
|
||||
"cosine": 0.813975,
|
||||
"segment_score": 0.954219,
|
||||
"catalog_score": 0.799711,
|
||||
"tempo": 123.047,
|
||||
"key": "A#m",
|
||||
"duration": 7.68
|
||||
},
|
||||
{
|
||||
"rank": 3,
|
||||
"file_name": "drum_loop_23_am_125.wav",
|
||||
"path": "c:\\programdata\\ableton\\live 12 suite\\resources\\midi remote scripts\\librerias\\all_tracks\\drum_loop_23_am_125.wav",
|
||||
"score": 0.887869,
|
||||
"cosine": 0.822104,
|
||||
"segment_score": 0.94301,
|
||||
"catalog_score": 0.799711,
|
||||
"tempo": 123.047,
|
||||
"key": "A#m",
|
||||
"duration": 7.68
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"contamination_analysis": {
|
||||
"cross_role_files": [],
|
||||
"potential_mismatches": [],
|
||||
"role_score_stats": {
|
||||
"kick": {
|
||||
"min": 0.6423,
|
||||
"max": 0.6582,
|
||||
"avg": 0.6502,
|
||||
"count": 3
|
||||
},
|
||||
"snare": {
|
||||
"min": 0.621,
|
||||
"max": 0.6425,
|
||||
"avg": 0.6288,
|
||||
"count": 3
|
||||
},
|
||||
"hat": {
|
||||
"min": 0.5581,
|
||||
"max": 0.6024,
|
||||
"avg": 0.5844,
|
||||
"count": 3
|
||||
},
|
||||
"bass_loop": {
|
||||
"min": 0.8188,
|
||||
"max": 0.8775,
|
||||
"avg": 0.8407,
|
||||
"count": 3
|
||||
},
|
||||
"vocal_loop": {
|
||||
"min": 0.8624,
|
||||
"max": 0.9323,
|
||||
"avg": 0.9055,
|
||||
"count": 3
|
||||
},
|
||||
"top_loop": {
|
||||
"min": 0.8879,
|
||||
"max": 0.9061,
|
||||
"avg": 0.8958,
|
||||
"count": 3
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,157 @@
|
||||
param(
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$TaskFile,
|
||||
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$ReportFile,
|
||||
|
||||
[string]$ProjectRoot = (Resolve-Path (Join-Path $PSScriptRoot "..")).Path,
|
||||
[string]$GlmModel = "glm-5",
|
||||
[string]$GlmBaseUrl = $(if ($env:ANTHROPIC_BASE_URL) { $env:ANTHROPIC_BASE_URL } else { "https://coding-intl.dashscope.aliyuncs.com/apps/anthropic" }),
|
||||
[string]$GlmAuthToken = $env:ANTHROPIC_AUTH_TOKEN,
|
||||
[string]$GlmAgentsFile = "",
|
||||
[string]$CodexModel = "",
|
||||
[string]$TelegramBotToken = $env:TELEGRAM_BOT_TOKEN,
|
||||
[string]$TelegramChatId = $env:TELEGRAM_CHAT_ID,
|
||||
[string]$TelegramConfigPath = (Join-Path $PSScriptRoot "telegram.local.json"),
|
||||
[switch]$SkipCodexReview
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
function Require-Command([string]$Name) {
|
||||
if (-not (Get-Command $Name -ErrorAction SilentlyContinue)) {
|
||||
throw "Command not found: $Name"
|
||||
}
|
||||
}
|
||||
|
||||
function Resolve-RepoPath([string]$BasePath, [string]$TargetPath) {
|
||||
if ([System.IO.Path]::IsPathRooted($TargetPath)) {
|
||||
return [System.IO.Path]::GetFullPath($TargetPath)
|
||||
}
|
||||
return [System.IO.Path]::GetFullPath((Join-Path $BasePath $TargetPath))
|
||||
}
|
||||
|
||||
function Resolve-TelegramSettings() {
|
||||
if (([string]::IsNullOrWhiteSpace($TelegramBotToken) -or [string]::IsNullOrWhiteSpace($TelegramChatId)) -and (Test-Path -LiteralPath $TelegramConfigPath)) {
|
||||
$config = Get-Content -LiteralPath $TelegramConfigPath -Raw | ConvertFrom-Json
|
||||
if ([string]::IsNullOrWhiteSpace($TelegramBotToken)) {
|
||||
$script:TelegramBotToken = $config.bot_token
|
||||
}
|
||||
if ([string]::IsNullOrWhiteSpace($TelegramChatId)) {
|
||||
$script:TelegramChatId = $config.chat_id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Send-LoopNotification([string]$Message) {
|
||||
Resolve-TelegramSettings
|
||||
if ([string]::IsNullOrWhiteSpace($TelegramBotToken) -or [string]::IsNullOrWhiteSpace($TelegramChatId)) {
|
||||
return
|
||||
}
|
||||
|
||||
$notifier = Join-Path $PSScriptRoot "send_telegram_notification.ps1"
|
||||
try {
|
||||
& $notifier -Message $Message -BotToken $TelegramBotToken -ChatId $TelegramChatId -ConfigPath $TelegramConfigPath
|
||||
}
|
||||
catch {
|
||||
Write-Warning ("Telegram notification failed: " + $_.Exception.Message)
|
||||
}
|
||||
}
|
||||
|
||||
function Resolve-CodexCommand() {
|
||||
$cmd = Get-Command "codex.cmd" -ErrorAction SilentlyContinue
|
||||
if ($cmd) {
|
||||
return $cmd.Source
|
||||
}
|
||||
|
||||
$fallback = Get-Command "codex" -ErrorAction SilentlyContinue
|
||||
if ($fallback) {
|
||||
return $fallback.Source
|
||||
}
|
||||
|
||||
throw "Command not found: codex"
|
||||
}
|
||||
|
||||
$projectPath = (Resolve-Path -LiteralPath $ProjectRoot).Path
|
||||
$taskPath = (Resolve-Path -LiteralPath $TaskFile).Path
|
||||
$reportPath = Resolve-RepoPath $projectPath $ReportFile
|
||||
$codexCommand = Resolve-CodexCommand
|
||||
|
||||
$timestamp = Get-Date -Format "yyyyMMdd_HHmmss"
|
||||
$runDir = Join-Path $projectPath ("automation\\runs\\loop_" + $timestamp)
|
||||
New-Item -ItemType Directory -Force -Path $runDir | Out-Null
|
||||
$codexStdoutPath = Join-Path $runDir "codex_stdout.txt"
|
||||
$codexMessagePath = Join-Path $runDir "codex_last_message.txt"
|
||||
|
||||
$glmRunner = Join-Path $PSScriptRoot "run_glm_cycle.ps1"
|
||||
Send-LoopNotification("GLM/Codex loop started: $(Split-Path -Leaf $taskPath)")
|
||||
& $glmRunner `
|
||||
-TaskFile $taskPath `
|
||||
-ReportFile $reportPath `
|
||||
-ProjectRoot $projectPath `
|
||||
-Model $GlmModel `
|
||||
-BaseUrl $GlmBaseUrl `
|
||||
-AuthToken $GlmAuthToken `
|
||||
-AgentsFile $GlmAgentsFile `
|
||||
-TelegramBotToken $TelegramBotToken `
|
||||
-TelegramChatId $TelegramChatId `
|
||||
-TelegramConfigPath $TelegramConfigPath
|
||||
|
||||
if ($SkipCodexReview) {
|
||||
Send-LoopNotification("GLM/Codex loop finished without Codex review: $(Split-Path -Leaf $taskPath)")
|
||||
Write-Host "GLM worker finished. Codex review skipped by flag."
|
||||
return
|
||||
}
|
||||
|
||||
$reviewPrompt = @"
|
||||
Read this worker task file:
|
||||
$taskPath
|
||||
|
||||
Read this GLM report:
|
||||
$reportPath
|
||||
|
||||
Your job:
|
||||
1. Inspect the real diff in the repository.
|
||||
2. Verify whether GLM actually implemented what the report claims.
|
||||
3. Fix anything incorrect, incomplete, or unsafe.
|
||||
4. Run the relevant validations mentioned by the task/report.
|
||||
5. Leave the repository in the best corrected state you can reach in one pass.
|
||||
6. Write a concise final summary to the output file configured by the CLI.
|
||||
|
||||
Be strict about overclaims. The code is the source of truth, not the report.
|
||||
"@
|
||||
|
||||
$codexArgs = @(
|
||||
"exec",
|
||||
"--dangerously-bypass-approvals-and-sandbox",
|
||||
"-C", $projectPath,
|
||||
"-o", $codexMessagePath
|
||||
)
|
||||
|
||||
if (-not [string]::IsNullOrWhiteSpace($CodexModel)) {
|
||||
$codexArgs += @("-m", $CodexModel)
|
||||
}
|
||||
|
||||
$codexArgs += $reviewPrompt
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "Running Codex review/correction pass..."
|
||||
Send-LoopNotification("Codex review started: $(Split-Path -Leaf $taskPath)")
|
||||
|
||||
try {
|
||||
& $codexCommand @codexArgs 2>&1 | Tee-Object -FilePath $codexStdoutPath
|
||||
}
|
||||
catch {
|
||||
Send-LoopNotification("Codex review failed: $(Split-Path -Leaf $taskPath)`n$($_.Exception.Message)")
|
||||
throw
|
||||
}
|
||||
|
||||
Send-LoopNotification("GLM/Codex loop finished: $(Split-Path -Leaf $taskPath)`nReport: $(Split-Path -Leaf $reportPath)`nCodex note: $(Split-Path -Leaf $codexMessagePath)")
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "Loop finished."
|
||||
Write-Host "Task: $taskPath"
|
||||
Write-Host "GLM report: $reportPath"
|
||||
Write-Host "Codex note: $codexMessagePath"
|
||||
Write-Host "Codex stdout:$codexStdoutPath"
|
||||
162
AbletonMCP_AI_BAK_20260328_200801/automation/run_glm_cycle.ps1
Normal file
162
AbletonMCP_AI_BAK_20260328_200801/automation/run_glm_cycle.ps1
Normal file
@@ -0,0 +1,162 @@
|
||||
param(
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$TaskFile,
|
||||
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$ReportFile,
|
||||
|
||||
[string]$ProjectRoot = (Resolve-Path (Join-Path $PSScriptRoot "..")).Path,
|
||||
[string]$Model = "glm-5",
|
||||
[string]$BaseUrl = $(if ($env:ANTHROPIC_BASE_URL) { $env:ANTHROPIC_BASE_URL } else { "https://coding-intl.dashscope.aliyuncs.com/apps/anthropic" }),
|
||||
[string]$AuthToken = $env:ANTHROPIC_AUTH_TOKEN,
|
||||
[string]$AgentsFile = (Join-Path $PSScriptRoot "glm_agents.team.json"),
|
||||
[string]$TelegramBotToken = $env:TELEGRAM_BOT_TOKEN,
|
||||
[string]$TelegramChatId = $env:TELEGRAM_CHAT_ID,
|
||||
[string]$TelegramConfigPath = (Join-Path $PSScriptRoot "telegram.local.json"),
|
||||
[switch]$VerboseLogs
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
function Require-Command([string]$Name) {
|
||||
if (-not (Get-Command $Name -ErrorAction SilentlyContinue)) {
|
||||
throw "Command not found: $Name"
|
||||
}
|
||||
}
|
||||
|
||||
function Require-File([string]$PathValue, [string]$Label) {
|
||||
if (-not (Test-Path -LiteralPath $PathValue)) {
|
||||
throw "$Label not found: $PathValue"
|
||||
}
|
||||
}
|
||||
|
||||
function Resolve-RepoPath([string]$BasePath, [string]$TargetPath) {
|
||||
if ([System.IO.Path]::IsPathRooted($TargetPath)) {
|
||||
return [System.IO.Path]::GetFullPath($TargetPath)
|
||||
}
|
||||
return [System.IO.Path]::GetFullPath((Join-Path $BasePath $TargetPath))
|
||||
}
|
||||
|
||||
function Resolve-TelegramSettings() {
|
||||
if (([string]::IsNullOrWhiteSpace($TelegramBotToken) -or [string]::IsNullOrWhiteSpace($TelegramChatId)) -and (Test-Path -LiteralPath $TelegramConfigPath)) {
|
||||
$config = Get-Content -LiteralPath $TelegramConfigPath -Raw | ConvertFrom-Json
|
||||
if ([string]::IsNullOrWhiteSpace($TelegramBotToken)) {
|
||||
$script:TelegramBotToken = $config.bot_token
|
||||
}
|
||||
if ([string]::IsNullOrWhiteSpace($TelegramChatId)) {
|
||||
$script:TelegramChatId = $config.chat_id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Send-RunNotification([string]$Message) {
|
||||
Resolve-TelegramSettings
|
||||
if ([string]::IsNullOrWhiteSpace($TelegramBotToken) -or [string]::IsNullOrWhiteSpace($TelegramChatId)) {
|
||||
return
|
||||
}
|
||||
|
||||
$notifier = Join-Path $PSScriptRoot "send_telegram_notification.ps1"
|
||||
try {
|
||||
& $notifier -Message $Message -BotToken $TelegramBotToken -ChatId $TelegramChatId -ConfigPath $TelegramConfigPath
|
||||
}
|
||||
catch {
|
||||
Write-Warning ("Telegram notification failed: " + $_.Exception.Message)
|
||||
}
|
||||
}
|
||||
|
||||
Require-Command "claude"
|
||||
Require-File $TaskFile "Task file"
|
||||
|
||||
if ([string]::IsNullOrWhiteSpace($BaseUrl)) {
|
||||
throw "ANTHROPIC_BASE_URL is not set. Pass -BaseUrl or export the env var first."
|
||||
}
|
||||
if ([string]::IsNullOrWhiteSpace($AuthToken)) {
|
||||
throw "ANTHROPIC_AUTH_TOKEN is not set. Pass -AuthToken or export the env var first."
|
||||
}
|
||||
|
||||
$env:ANTHROPIC_BASE_URL = $BaseUrl
|
||||
$env:ANTHROPIC_AUTH_TOKEN = $AuthToken
|
||||
$env:CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC = "1"
|
||||
$env:ANTHROPIC_MODEL = $Model
|
||||
$env:ANTHROPIC_SMALL_FAST_MODEL = $Model
|
||||
$env:ANTHROPIC_DEFAULT_HAIKU_MODEL = $Model
|
||||
$env:ANTHROPIC_DEFAULT_SONNET_MODEL = $Model
|
||||
$env:ANTHROPIC_DEFAULT_OPUS_MODEL = $Model
|
||||
|
||||
$taskPath = (Resolve-Path -LiteralPath $TaskFile).Path
|
||||
$projectPath = (Resolve-Path -LiteralPath $ProjectRoot).Path
|
||||
$reportPath = Resolve-RepoPath $projectPath $ReportFile
|
||||
$reportDir = Split-Path -Parent $reportPath
|
||||
New-Item -ItemType Directory -Force -Path $reportDir | Out-Null
|
||||
|
||||
$timestamp = Get-Date -Format "yyyyMMdd_HHmmss"
|
||||
$runDir = Join-Path $projectPath ("automation\\runs\\glm_" + $timestamp)
|
||||
New-Item -ItemType Directory -Force -Path $runDir | Out-Null
|
||||
$stdoutPath = Join-Path $runDir "glm_stdout.txt"
|
||||
|
||||
$prompt = @"
|
||||
You are running as the GLM worker on this Windows repository.
|
||||
|
||||
Repository root:
|
||||
$projectPath
|
||||
|
||||
Task file to follow exactly:
|
||||
$taskPath
|
||||
|
||||
You must:
|
||||
1. Read the task markdown and implement the requested changes in the repository.
|
||||
2. Run the validations requested by the task.
|
||||
3. Create or overwrite this report file with a truthful report:
|
||||
$reportPath
|
||||
4. Do not overclaim. If something is incomplete, say so explicitly in the report.
|
||||
5. Keep the diff focused.
|
||||
6. If custom agents are available, use them aggressively and in parallel where safe:
|
||||
- planner first
|
||||
- implementer_core and implementer_aux for disjoint work
|
||||
- validator before finishing
|
||||
- retrieval_reviewer or runtime_guard when relevant
|
||||
- reporter last
|
||||
|
||||
Open and follow the task markdown from disk instead of asking for the task again.
|
||||
"@
|
||||
|
||||
$claudeArgs = @(
|
||||
"-p",
|
||||
"--dangerously-skip-permissions",
|
||||
"--effort", "max",
|
||||
"--model", $Model,
|
||||
"--add-dir", $projectPath
|
||||
)
|
||||
|
||||
if (-not [string]::IsNullOrWhiteSpace($AgentsFile)) {
|
||||
$agentsPath = (Resolve-Path -LiteralPath $AgentsFile).Path
|
||||
$claudeArgs += @("--agents", (Get-Content -LiteralPath $agentsPath -Raw))
|
||||
}
|
||||
|
||||
if ($VerboseLogs) {
|
||||
$claudeArgs += "--verbose"
|
||||
}
|
||||
|
||||
Write-Host "Running GLM worker with model $Model..."
|
||||
Send-RunNotification("GLM worker started: $(Split-Path -Leaf $taskPath)")
|
||||
|
||||
try {
|
||||
$prompt | & claude @claudeArgs 2>&1 | Tee-Object -FilePath $stdoutPath
|
||||
}
|
||||
catch {
|
||||
Send-RunNotification("GLM worker failed: $(Split-Path -Leaf $taskPath)`n$($_.Exception.Message)")
|
||||
throw
|
||||
}
|
||||
|
||||
if (-not (Test-Path -LiteralPath $reportPath)) {
|
||||
Send-RunNotification("GLM worker failed: missing report for $(Split-Path -Leaf $taskPath)")
|
||||
throw "GLM finished but did not create the expected report file: $reportPath"
|
||||
}
|
||||
|
||||
Send-RunNotification("GLM worker finished: $(Split-Path -Leaf $taskPath)`nReport: $(Split-Path -Leaf $reportPath)")
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "GLM cycle finished."
|
||||
Write-Host "Task: $taskPath"
|
||||
Write-Host "Report: $reportPath"
|
||||
Write-Host "Stdout: $stdoutPath"
|
||||
141
AbletonMCP_AI_BAK_20260328_200801/automation/run_task_queue.ps1
Normal file
141
AbletonMCP_AI_BAK_20260328_200801/automation/run_task_queue.ps1
Normal file
@@ -0,0 +1,141 @@
|
||||
param(
|
||||
[string]$QueueFile = (Join-Path $PSScriptRoot "task_queue.json"),
|
||||
[string]$ProjectRoot = (Resolve-Path (Join-Path $PSScriptRoot "..")).Path,
|
||||
[string]$GlmModel = "glm-5",
|
||||
[string]$GlmBaseUrl = $(if ($env:ANTHROPIC_BASE_URL) { $env:ANTHROPIC_BASE_URL } else { "https://coding-intl.dashscope.aliyuncs.com/apps/anthropic" }),
|
||||
[string]$GlmAuthToken = $env:ANTHROPIC_AUTH_TOKEN,
|
||||
[string]$GlmAgentsFile = (Join-Path $PSScriptRoot "glm_agents.team.json"),
|
||||
[string]$CodexModel = "",
|
||||
[string]$TelegramBotToken = $env:TELEGRAM_BOT_TOKEN,
|
||||
[string]$TelegramChatId = $env:TELEGRAM_CHAT_ID,
|
||||
[string]$TelegramConfigPath = (Join-Path $PSScriptRoot "telegram.local.json"),
|
||||
[int]$PollSeconds = 30,
|
||||
[switch]$Watch,
|
||||
[switch]$ContinueOnError
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
function Resolve-RepoPath([string]$BasePath, [string]$TargetPath) {
|
||||
if ([System.IO.Path]::IsPathRooted($TargetPath)) {
|
||||
return [System.IO.Path]::GetFullPath($TargetPath)
|
||||
}
|
||||
return [System.IO.Path]::GetFullPath((Join-Path $BasePath $TargetPath))
|
||||
}
|
||||
|
||||
function Load-Queue([string]$PathValue) {
|
||||
return Get-Content -LiteralPath $PathValue -Raw | ConvertFrom-Json -Depth 20
|
||||
}
|
||||
|
||||
function Save-Queue([string]$PathValue, $QueueObject) {
|
||||
$QueueObject | ConvertTo-Json -Depth 20 | Set-Content -LiteralPath $PathValue -Encoding UTF8
|
||||
}
|
||||
|
||||
function Resolve-TelegramSettings() {
|
||||
if (([string]::IsNullOrWhiteSpace($TelegramBotToken) -or [string]::IsNullOrWhiteSpace($TelegramChatId)) -and (Test-Path -LiteralPath $TelegramConfigPath)) {
|
||||
$config = Get-Content -LiteralPath $TelegramConfigPath -Raw | ConvertFrom-Json
|
||||
if ([string]::IsNullOrWhiteSpace($TelegramBotToken)) {
|
||||
$script:TelegramBotToken = $config.bot_token
|
||||
}
|
||||
if ([string]::IsNullOrWhiteSpace($TelegramChatId)) {
|
||||
$script:TelegramChatId = $config.chat_id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Send-QueueNotification([string]$Message) {
|
||||
Resolve-TelegramSettings
|
||||
if ([string]::IsNullOrWhiteSpace($TelegramBotToken) -or [string]::IsNullOrWhiteSpace($TelegramChatId)) {
|
||||
return
|
||||
}
|
||||
|
||||
$notifier = Join-Path $PSScriptRoot "send_telegram_notification.ps1"
|
||||
try {
|
||||
& $notifier -Message $Message -BotToken $TelegramBotToken -ChatId $TelegramChatId -ConfigPath $TelegramConfigPath
|
||||
}
|
||||
catch {
|
||||
Write-Warning ("Telegram notification failed: " + $_.Exception.Message)
|
||||
}
|
||||
}
|
||||
|
||||
function Find-NextTask($QueueObject) {
|
||||
foreach ($task in $QueueObject.tasks) {
|
||||
if ($task.enabled -and $task.status -eq "pending") {
|
||||
return $task
|
||||
}
|
||||
}
|
||||
return $null
|
||||
}
|
||||
|
||||
$projectPath = (Resolve-Path -LiteralPath $ProjectRoot).Path
|
||||
$queuePath = Resolve-RepoPath $projectPath $QueueFile
|
||||
$loopRunner = Join-Path $PSScriptRoot "run_glm_codex_loop.ps1"
|
||||
$historyDir = Join-Path $projectPath "automation\\runs\\queue"
|
||||
New-Item -ItemType Directory -Force -Path $historyDir | Out-Null
|
||||
|
||||
Send-QueueNotification("AbletonMCP_AI queue runner started on $(Get-Date -Format 'yyyy-MM-dd HH:mm:ss'). Watching=$Watch ContinueOnError=$ContinueOnError")
|
||||
|
||||
do {
|
||||
$queue = Load-Queue $queuePath
|
||||
$task = Find-NextTask $queue
|
||||
|
||||
if ($null -eq $task) {
|
||||
if ($Watch) {
|
||||
Start-Sleep -Seconds $PollSeconds
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
$taskPath = Resolve-RepoPath $projectPath $task.task_file
|
||||
$reportPath = Resolve-RepoPath $projectPath $task.report_file
|
||||
|
||||
$task.status = "running"
|
||||
$task.started_at = (Get-Date).ToString("s")
|
||||
Save-Queue $queuePath $queue
|
||||
Send-QueueNotification("Queue task started: [$($task.id)] $($task.title)")
|
||||
|
||||
try {
|
||||
& $loopRunner `
|
||||
-TaskFile $taskPath `
|
||||
-ReportFile $reportPath `
|
||||
-ProjectRoot $projectPath `
|
||||
-GlmModel $GlmModel `
|
||||
-GlmBaseUrl $GlmBaseUrl `
|
||||
-GlmAuthToken $GlmAuthToken `
|
||||
-GlmAgentsFile $GlmAgentsFile `
|
||||
-CodexModel $CodexModel `
|
||||
-TelegramBotToken $TelegramBotToken `
|
||||
-TelegramChatId $TelegramChatId `
|
||||
-TelegramConfigPath $TelegramConfigPath
|
||||
|
||||
$queue = Load-Queue $queuePath
|
||||
foreach ($item in $queue.tasks) {
|
||||
if ($item.id -eq $task.id) {
|
||||
$item.status = "completed"
|
||||
$item.completed_at = (Get-Date).ToString("s")
|
||||
break
|
||||
}
|
||||
}
|
||||
Save-Queue $queuePath $queue
|
||||
Send-QueueNotification("Queue task completed: [$($task.id)] $($task.title)")
|
||||
}
|
||||
catch {
|
||||
$queue = Load-Queue $queuePath
|
||||
foreach ($item in $queue.tasks) {
|
||||
if ($item.id -eq $task.id) {
|
||||
$item.status = "failed"
|
||||
$item.failed_at = (Get-Date).ToString("s")
|
||||
$item.error = $_.Exception.Message
|
||||
break
|
||||
}
|
||||
}
|
||||
Save-Queue $queuePath $queue
|
||||
Send-QueueNotification("Queue task failed: [$($task.id)] $($task.title)`n$($_.Exception.Message)")
|
||||
|
||||
if (-not $ContinueOnError) {
|
||||
throw
|
||||
}
|
||||
}
|
||||
}
|
||||
while ($true)
|
||||
@@ -0,0 +1,33 @@
|
||||
param(
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Message,
|
||||
|
||||
[string]$BotToken = $env:TELEGRAM_BOT_TOKEN,
|
||||
[string]$ChatId = $env:TELEGRAM_CHAT_ID,
|
||||
[string]$ConfigPath = (Join-Path $PSScriptRoot "telegram.local.json")
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
if (([string]::IsNullOrWhiteSpace($BotToken) -or [string]::IsNullOrWhiteSpace($ChatId)) -and (Test-Path -LiteralPath $ConfigPath)) {
|
||||
$config = Get-Content -LiteralPath $ConfigPath -Raw | ConvertFrom-Json
|
||||
if ([string]::IsNullOrWhiteSpace($BotToken)) {
|
||||
$BotToken = $config.bot_token
|
||||
}
|
||||
if ([string]::IsNullOrWhiteSpace($ChatId)) {
|
||||
$ChatId = $config.chat_id
|
||||
}
|
||||
}
|
||||
|
||||
if ([string]::IsNullOrWhiteSpace($BotToken) -or [string]::IsNullOrWhiteSpace($ChatId)) {
|
||||
exit 0
|
||||
}
|
||||
|
||||
$uri = "https://api.telegram.org/bot$BotToken/sendMessage"
|
||||
$body = @{
|
||||
chat_id = $ChatId
|
||||
text = $Message
|
||||
disable_web_page_preview = $true
|
||||
}
|
||||
|
||||
Invoke-RestMethod -Uri $uri -Method Post -Body $body | Out-Null
|
||||
@@ -0,0 +1,18 @@
|
||||
[Unit]
|
||||
Description=AbletonMCP_AI autonomous GLM/Codex queue
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=ren
|
||||
WorkingDirectory=/mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI
|
||||
Environment=LOCAL_ENV_FILE=/mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI/automation/wsl.local.env
|
||||
ExecStart=/bin/bash /mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI/automation/wsl/run_task_queue.sh
|
||||
Restart=always
|
||||
RestartSec=15
|
||||
StandardOutput=append:/mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI/automation/wsl_runtime/logs/service.log
|
||||
StandardError=append:/mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI/automation/wsl_runtime/logs/service.log
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -0,0 +1,53 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
RUNTIME_DIR="$PROJECT_ROOT/automation/wsl_runtime"
|
||||
CODEX_HOME_DIR="$RUNTIME_DIR/codex_home"
|
||||
WINDOWS_CODEX_HOME="/mnt/c/Users/ren/.codex"
|
||||
ENV_FILE="$PROJECT_ROOT/automation/wsl.local.env"
|
||||
OPENAI_API_KEY_VALUE=""
|
||||
|
||||
mkdir -p "$CODEX_HOME_DIR" "$RUNTIME_DIR/logs"
|
||||
|
||||
if [[ -f "$WINDOWS_CODEX_HOME/auth.json" && ! -f "$CODEX_HOME_DIR/auth.json" ]]; then
|
||||
cp "$WINDOWS_CODEX_HOME/auth.json" "$CODEX_HOME_DIR/auth.json"
|
||||
fi
|
||||
|
||||
if [[ -f "$CODEX_HOME_DIR/auth.json" ]]; then
|
||||
OPENAI_API_KEY_VALUE="$(jq -r '.OPENAI_API_KEY // empty' "$CODEX_HOME_DIR/auth.json" 2>/dev/null || true)"
|
||||
fi
|
||||
|
||||
cat > "$CODEX_HOME_DIR/config.toml" <<'EOF'
|
||||
model = "gpt-5.4"
|
||||
|
||||
[sandbox_workspace_write]
|
||||
network_access = true
|
||||
EOF
|
||||
|
||||
cat > "$ENV_FILE" <<EOF
|
||||
export ANTHROPIC_BASE_URL='https://coding-intl.dashscope.aliyuncs.com/apps/anthropic'
|
||||
export ANTHROPIC_AUTH_TOKEN='sk-sp-e87cea7b587c4af09e465726b084f41b'
|
||||
export GLM_MODEL='glm-5'
|
||||
export CODEX_MODEL='gpt-5.4'
|
||||
export TELEGRAM_BOT_TOKEN='8444660361:AAECCo6oon0dbnQMzgaanZntYFOLgcZrcJ4'
|
||||
export TELEGRAM_CHAT_ID='692714536'
|
||||
export CODEX_HOME='/mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI/automation/wsl_runtime/codex_home'
|
||||
export GLM_AGENTS_FILE='/mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI/automation/glm_agents.team.json'
|
||||
export POLL_SECONDS='30'
|
||||
export WATCH='1'
|
||||
export CONTINUE_ON_ERROR='1'
|
||||
EOF
|
||||
|
||||
if [[ -n "$OPENAI_API_KEY_VALUE" ]]; then
|
||||
printf "export OPENAI_API_KEY='%s'\n" "$OPENAI_API_KEY_VALUE" >> "$ENV_FILE"
|
||||
fi
|
||||
|
||||
chmod 600 "$ENV_FILE" "$CODEX_HOME_DIR/auth.json" 2>/dev/null || true
|
||||
chmod +x "$SCRIPT_DIR/"*.sh
|
||||
|
||||
echo "WSL runtime bootstrapped"
|
||||
echo "Runtime dir: $RUNTIME_DIR"
|
||||
echo "Env file: $ENV_FILE"
|
||||
echo "Codex home: $CODEX_HOME_DIR"
|
||||
@@ -0,0 +1,163 @@
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
container_name: abletonmcp-postgres
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_USER: ${POSTGRES_USER:-postgres}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-changeme}
|
||||
POSTGRES_DB: ${POSTGRES_BOOTSTRAP_DB:-postgres}
|
||||
PGDATA: /var/lib/postgresql/data/pgdata
|
||||
GITEA_DB_NAME: ${GITEA_DB_NAME:-gitea}
|
||||
N8N_DB_NAME: ${N8N_DB_NAME:-n8n}
|
||||
volumes:
|
||||
- postgres-data:/var/lib/postgresql/data
|
||||
- ./initdb:/docker-entrypoint-initdb.d:ro
|
||||
ports:
|
||||
- "${POSTGRES_PORT:-5432}:5432"
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-postgres} -d ${POSTGRES_BOOTSTRAP_DB:-postgres}"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 20s
|
||||
networks:
|
||||
- internal
|
||||
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
container_name: abletonmcp-redis
|
||||
restart: unless-stopped
|
||||
command:
|
||||
- redis-server
|
||||
- --requirepass
|
||||
- ${REDIS_PASSWORD:-changeme}
|
||||
- --appendonly
|
||||
- "yes"
|
||||
- --save
|
||||
- "60"
|
||||
- "1000"
|
||||
volumes:
|
||||
- redis-data:/data
|
||||
ports:
|
||||
- "${REDIS_PORT:-6379}:6379"
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "redis-cli -a ${REDIS_PASSWORD:-changeme} ping | grep -q PONG"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 10s
|
||||
networks:
|
||||
- internal
|
||||
|
||||
gitea:
|
||||
image: gitea/gitea:1.21-rootless
|
||||
container_name: abletonmcp-gitea
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
USER_UID: 1000
|
||||
USER_GID: 1000
|
||||
GITEA__database__DB_TYPE: postgres
|
||||
GITEA__database__HOST: postgres:5432
|
||||
GITEA__database__NAME: ${GITEA_DB_NAME:-gitea}
|
||||
GITEA__database__USER: ${POSTGRES_USER:-postgres}
|
||||
GITEA__database__PASSWD: ${POSTGRES_PASSWORD:-changeme}
|
||||
GITEA__server__DOMAIN: ${GITEA_DOMAIN:-localhost}
|
||||
GITEA__server__ROOT_URL: ${GITEA_ROOT_URL:-http://localhost:3000}
|
||||
GITEA__server__HTTP_PORT: 3000
|
||||
GITEA__server__SSH_DOMAIN: ${GITEA_SSH_DOMAIN:-localhost}
|
||||
GITEA__server__SSH_PORT: ${GITEA_SSH_PORT:-222}
|
||||
GITEA__server__START_SSH_SERVER: "true"
|
||||
GITEA__server__SSH_LISTEN_PORT: 222
|
||||
GITEA__security__INSTALL_LOCK: ${GITEA_SECURITY_INSTALL_LOCK:-true}
|
||||
GITEA__service__DISABLE_REGISTRATION: "true"
|
||||
GITEA__server__OFFLINE_MODE: ${GITEA_OFFLINE_MODE:-true}
|
||||
volumes:
|
||||
- gitea-data:/var/lib/gitea
|
||||
- gitea-config:/etc/gitea
|
||||
- gitea-logs:/var/log/gitea
|
||||
ports:
|
||||
- "${GITEA_HTTP_PORT:-3000}:3000"
|
||||
- "${GITEA_SSH_PORT:-222}:222"
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "wget -q --spider http://localhost:3000/api/healthz || exit 1"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
start_period: 45s
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
networks:
|
||||
- internal
|
||||
|
||||
n8n:
|
||||
image: n8nio/n8n:latest
|
||||
container_name: abletonmcp-n8n
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
DB_TYPE: postgresdb
|
||||
DB_POSTGRESDB_HOST: postgres
|
||||
DB_POSTGRESDB_PORT: 5432
|
||||
DB_POSTGRESDB_DATABASE: ${N8N_DB_NAME:-n8n}
|
||||
DB_POSTGRESDB_USER: ${POSTGRES_USER:-postgres}
|
||||
DB_POSTGRESDB_PASSWORD: ${POSTGRES_PASSWORD:-changeme}
|
||||
N8N_PORT: 5678
|
||||
N8N_PROTOCOL: http
|
||||
N8N_HOST: ${N8N_HOST:-localhost}
|
||||
N8N_PATH: ${N8N_PATH:-/}
|
||||
N8N_ENCRYPTION_KEY: ${N8N_ENCRYPTION_KEY:-changeme-change-this}
|
||||
N8N_LOG_LEVEL: ${N8N_LOG_LEVEL:-info}
|
||||
N8N_EXECUTIONS_MODE: ${N8N_EXECUTIONS_MODE:-regular}
|
||||
N8N_BASIC_AUTH_ACTIVE: ${N8N_BASIC_AUTH_ACTIVE:-true}
|
||||
N8N_BASIC_AUTH_USER: ${N8N_BASIC_AUTH_USER:-admin}
|
||||
N8N_BASIC_AUTH_PASSWORD: ${N8N_BASIC_AUTH_PASSWORD:-changeme}
|
||||
N8N_COOKIE_POLICY: ${N8N_COOKIE_POLICY:-lax}
|
||||
N8N_HOST_ALLOW_LIST: ${N8N_HOST_ALLOW_LIST:-localhost,127.0.0.1}
|
||||
N8N_WEBHOOK_URL: ${N8N_WEBHOOK_URL:-http://localhost:5678/}
|
||||
N8N_EDITOR_BASE_URL: ${N8N_EDITOR_BASE_URL:-http://localhost:5678}
|
||||
GENERIC_TIMEZONE: ${TZ:-UTC}
|
||||
TZ: ${TZ:-UTC}
|
||||
N8N_DIAGNOSTICS_ENABLED: ${N8N_DIAGNOSTICS_ENABLED:-false}
|
||||
N8N_VERSION_NOTIFICATIONS_ENABLED: ${N8N_VERSION_NOTIFICATIONS_ENABLED:-false}
|
||||
volumes:
|
||||
- n8n-data:/home/node/.n8n
|
||||
- n8n-logs:/home/node/.npm/_logs
|
||||
- ${PROJECT_PATH:-/mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI}:/project:rw
|
||||
- ${PROJECT_PATH:-/mnt/c/ProgramData/Ableton/Live 12 Suite/Resources/MIDI Remote Scripts/AbletonMCP_AI}/automation/workflows:/workflows:ro
|
||||
ports:
|
||||
- "${N8N_PORT:-5678}:5678"
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "wget -q --spider http://localhost:5678/healthz || exit 1"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
start_period: 45s
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
networks:
|
||||
- internal
|
||||
|
||||
networks:
|
||||
internal:
|
||||
name: abletonmcp-network
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
postgres-data:
|
||||
name: abletonmcp-postgres-data
|
||||
gitea-data:
|
||||
name: abletonmcp-gitea-data
|
||||
gitea-config:
|
||||
name: abletonmcp-gitea-config
|
||||
gitea-logs:
|
||||
name: abletonmcp-gitea-logs
|
||||
redis-data:
|
||||
name: abletonmcp-redis-data
|
||||
n8n-data:
|
||||
name: abletonmcp-n8n-data
|
||||
n8n-logs:
|
||||
name: abletonmcp-n8n-logs
|
||||
@@ -0,0 +1,18 @@
|
||||
#!/bin/sh
|
||||
set -eu
|
||||
|
||||
create_db() {
|
||||
db_name="$1"
|
||||
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "${POSTGRES_BOOTSTRAP_DB:-postgres}" <<-EOSQL
|
||||
SELECT 'CREATE DATABASE "${db_name}"'
|
||||
WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = '${db_name}')\gexec
|
||||
EOSQL
|
||||
}
|
||||
|
||||
if [ -n "${GITEA_DB_NAME:-}" ]; then
|
||||
create_db "$GITEA_DB_NAME"
|
||||
fi
|
||||
|
||||
if [ -n "${N8N_DB_NAME:-}" ]; then
|
||||
create_db "$N8N_DB_NAME"
|
||||
fi
|
||||
@@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
SERVICE_SRC="$SCRIPT_DIR/ableton-glm-loop.service"
|
||||
SERVICE_DST="/etc/systemd/system/ableton-glm-loop.service"
|
||||
|
||||
sudo cp "$SERVICE_SRC" "$SERVICE_DST"
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl enable ableton-glm-loop.service
|
||||
sudo systemctl restart ableton-glm-loop.service
|
||||
sudo systemctl status --no-pager ableton-glm-loop.service || true
|
||||
@@ -0,0 +1,59 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
LOCAL_ENV_FILE="${LOCAL_ENV_FILE:-$PROJECT_ROOT/automation/wsl.local.env}"
|
||||
|
||||
if [[ -f "$LOCAL_ENV_FILE" ]]; then
|
||||
# shellcheck disable=SC1090
|
||||
source "$LOCAL_ENV_FILE"
|
||||
fi
|
||||
|
||||
TASK_FILE="${1:?task file is required}"
|
||||
REPORT_FILE="${2:?report file is required}"
|
||||
GLM_MODEL="${GLM_MODEL:-glm-5}"
|
||||
CODEX_MODEL="${CODEX_MODEL:-gpt-5.4}"
|
||||
SKIP_CODEX_REVIEW="${SKIP_CODEX_REVIEW:-0}"
|
||||
CODEX_HOME="${CODEX_HOME:-$PROJECT_ROOT/automation/wsl_runtime/codex_home}"
|
||||
export CODEX_HOME
|
||||
if [[ -n "${OPENAI_API_KEY:-}" ]]; then
|
||||
export OPENAI_API_KEY
|
||||
fi
|
||||
|
||||
RUN_DIR="$PROJECT_ROOT/automation/runs/loop_$(date +%Y%m%d_%H%M%S)"
|
||||
CODEX_STDOUT_PATH="$RUN_DIR/codex_stdout.txt"
|
||||
CODEX_MESSAGE_PATH="$RUN_DIR/codex_last_message.txt"
|
||||
mkdir -p "$RUN_DIR"
|
||||
|
||||
notify() {
|
||||
"$SCRIPT_DIR/send_telegram.sh" "$1" || true
|
||||
}
|
||||
|
||||
notify "GLM/Codex loop started: $(basename "$TASK_FILE")"
|
||||
"$SCRIPT_DIR/run_glm_cycle.sh" "$TASK_FILE" "$REPORT_FILE"
|
||||
|
||||
if [[ "$SKIP_CODEX_REVIEW" == "1" ]]; then
|
||||
notify "GLM/Codex loop finished without Codex review: $(basename "$TASK_FILE")"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
notify "Codex review started: $(basename "$TASK_FILE")"
|
||||
|
||||
WIN_TASK_FILE="$(wslpath -w "$TASK_FILE")"
|
||||
WIN_REPORT_FILE="$(wslpath -w "$REPORT_FILE")"
|
||||
WIN_PROJECT_ROOT="$(wslpath -w "$PROJECT_ROOT")"
|
||||
WIN_CODEX_MESSAGE_PATH="$(wslpath -w "$CODEX_MESSAGE_PATH")"
|
||||
WIN_REVIEW_SCRIPT="$(wslpath -w "$PROJECT_ROOT/automation/invoke_codex_review.ps1")"
|
||||
|
||||
if ! /mnt/c/Windows/System32/WindowsPowerShell/v1.0/powershell.exe -NoProfile -ExecutionPolicy Bypass -File "$WIN_REVIEW_SCRIPT" -TaskFile "$WIN_TASK_FILE" -ReportFile "$WIN_REPORT_FILE" -ProjectRoot "$WIN_PROJECT_ROOT" -OutputFile "$WIN_CODEX_MESSAGE_PATH" -CodexModel "$CODEX_MODEL" 2>&1 | tee "$CODEX_STDOUT_PATH"; then
|
||||
notify "Codex review failed: $(basename "$TASK_FILE")"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
notify "GLM/Codex loop finished: $(basename "$TASK_FILE")"
|
||||
echo "Loop finished"
|
||||
echo "Task: $TASK_FILE"
|
||||
echo "GLM report: $REPORT_FILE"
|
||||
echo "Codex note: $CODEX_MESSAGE_PATH"
|
||||
echo "Codex stdout:$CODEX_STDOUT_PATH"
|
||||
@@ -0,0 +1,92 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
LOCAL_ENV_FILE="${LOCAL_ENV_FILE:-$PROJECT_ROOT/automation/wsl.local.env}"
|
||||
|
||||
if [[ -f "$LOCAL_ENV_FILE" ]]; then
|
||||
# shellcheck disable=SC1090
|
||||
source "$LOCAL_ENV_FILE"
|
||||
fi
|
||||
|
||||
TASK_FILE="${1:?task file is required}"
|
||||
REPORT_FILE="${2:?report file is required}"
|
||||
GLM_MODEL="${GLM_MODEL:-glm-5}"
|
||||
GLM_AGENTS_FILE="${GLM_AGENTS_FILE:-$PROJECT_ROOT/automation/glm_agents.team.json}"
|
||||
|
||||
export ANTHROPIC_BASE_URL="${ANTHROPIC_BASE_URL:-https://coding-intl.dashscope.aliyuncs.com/apps/anthropic}"
|
||||
export ANTHROPIC_AUTH_TOKEN="${ANTHROPIC_AUTH_TOKEN:?ANTHROPIC_AUTH_TOKEN is required}"
|
||||
export CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC="1"
|
||||
export ANTHROPIC_MODEL="$GLM_MODEL"
|
||||
export ANTHROPIC_SMALL_FAST_MODEL="$GLM_MODEL"
|
||||
export ANTHROPIC_DEFAULT_HAIKU_MODEL="$GLM_MODEL"
|
||||
export ANTHROPIC_DEFAULT_SONNET_MODEL="$GLM_MODEL"
|
||||
export ANTHROPIC_DEFAULT_OPUS_MODEL="$GLM_MODEL"
|
||||
|
||||
RUN_DIR="$PROJECT_ROOT/automation/runs/glm_$(date +%Y%m%d_%H%M%S)"
|
||||
STDOUT_PATH="$RUN_DIR/glm_stdout.txt"
|
||||
mkdir -p "$RUN_DIR" "$(dirname "$REPORT_FILE")"
|
||||
|
||||
notify() {
|
||||
"$SCRIPT_DIR/send_telegram.sh" "$1" || true
|
||||
}
|
||||
|
||||
PROMPT=$(cat <<EOF
|
||||
You are running as the GLM worker on this Linux repository.
|
||||
|
||||
Repository root:
|
||||
$PROJECT_ROOT
|
||||
|
||||
Task file to follow exactly:
|
||||
$TASK_FILE
|
||||
|
||||
You must:
|
||||
1. Read the task markdown and implement the requested changes in the repository.
|
||||
2. Run the validations requested by the task.
|
||||
3. Create or overwrite this report file with a truthful report:
|
||||
$REPORT_FILE
|
||||
4. Do not overclaim. If something is incomplete, say so explicitly in the report.
|
||||
5. Keep the diff focused.
|
||||
6. If custom agents are available, use them aggressively and in parallel where safe:
|
||||
- planner first
|
||||
- implementer_core and implementer_aux for disjoint work
|
||||
- validator before finishing
|
||||
- retrieval_reviewer or runtime_guard when relevant
|
||||
- reporter last
|
||||
|
||||
Open and follow the task markdown from disk instead of asking for the task again.
|
||||
EOF
|
||||
)
|
||||
|
||||
ARGS=(
|
||||
-p
|
||||
--dangerously-skip-permissions
|
||||
--effort max
|
||||
--model "$GLM_MODEL"
|
||||
--add-dir "$PROJECT_ROOT"
|
||||
)
|
||||
|
||||
if [[ -f "$GLM_AGENTS_FILE" ]]; then
|
||||
AGENTS_JSON="$(cat "$GLM_AGENTS_FILE")"
|
||||
ARGS+=(--agents "$AGENTS_JSON")
|
||||
fi
|
||||
|
||||
notify "GLM worker started: $(basename "$TASK_FILE")"
|
||||
|
||||
if ! printf '%s\n' "$PROMPT" | claude "${ARGS[@]}" 2>&1 | tee "$STDOUT_PATH"; then
|
||||
notify "GLM worker failed: $(basename "$TASK_FILE")"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! -f "$REPORT_FILE" ]]; then
|
||||
notify "GLM worker failed: missing report for $(basename "$TASK_FILE")"
|
||||
echo "missing report: $REPORT_FILE" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
notify "GLM worker finished: $(basename "$TASK_FILE")"
|
||||
echo "GLM cycle finished"
|
||||
echo "Task: $TASK_FILE"
|
||||
echo "Report: $REPORT_FILE"
|
||||
echo "Stdout: $STDOUT_PATH"
|
||||
@@ -0,0 +1,106 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
QUEUE_FILE="${QUEUE_FILE:-$PROJECT_ROOT/automation/task_queue.json}"
|
||||
LOCAL_ENV_FILE="${LOCAL_ENV_FILE:-$PROJECT_ROOT/automation/wsl.local.env}"
|
||||
POLL_SECONDS="${POLL_SECONDS:-30}"
|
||||
WATCH="${WATCH:-1}"
|
||||
CONTINUE_ON_ERROR="${CONTINUE_ON_ERROR:-1}"
|
||||
|
||||
if [[ -f "$LOCAL_ENV_FILE" ]]; then
|
||||
# shellcheck disable=SC1090
|
||||
source "$LOCAL_ENV_FILE"
|
||||
fi
|
||||
|
||||
notify() {
|
||||
"$SCRIPT_DIR/send_telegram.sh" "$1" || true
|
||||
}
|
||||
|
||||
queue_has_pending() {
|
||||
jq -e '.tasks[] | select(.enabled == true and .status == "pending")' "$QUEUE_FILE" >/dev/null
|
||||
}
|
||||
|
||||
read_next_task() {
|
||||
jq -r '.tasks[] | select(.enabled == true and .status == "pending") | @base64' "$QUEUE_FILE" | head -n 1
|
||||
}
|
||||
|
||||
update_task_status() {
|
||||
local task_id="$1"
|
||||
local status="$2"
|
||||
local field="$3"
|
||||
local value="$4"
|
||||
local tmp
|
||||
local queue_dir
|
||||
queue_dir="$(dirname "$QUEUE_FILE")"
|
||||
tmp="$(mktemp "$queue_dir/.task_queue.tmp.XXXXXX")"
|
||||
jq --arg id "$task_id" --arg status "$status" --arg field "$field" --arg value "$value" '
|
||||
.tasks |= map(
|
||||
if .id == $id then
|
||||
.status = $status | .[$field] = $value
|
||||
else
|
||||
.
|
||||
end
|
||||
)' "$QUEUE_FILE" > "$tmp"
|
||||
mv "$tmp" "$QUEUE_FILE"
|
||||
}
|
||||
|
||||
set_task_error() {
|
||||
local task_id="$1"
|
||||
local message="$2"
|
||||
local tmp
|
||||
local queue_dir
|
||||
queue_dir="$(dirname "$QUEUE_FILE")"
|
||||
tmp="$(mktemp "$queue_dir/.task_queue.tmp.XXXXXX")"
|
||||
jq --arg id "$task_id" --arg msg "$message" '
|
||||
.tasks |= map(
|
||||
if .id == $id then
|
||||
.error = $msg
|
||||
else
|
||||
.
|
||||
end
|
||||
)' "$QUEUE_FILE" > "$tmp"
|
||||
mv "$tmp" "$QUEUE_FILE"
|
||||
}
|
||||
|
||||
notify "AbletonMCP_AI queue runner started on $(date '+%Y-%m-%d %H:%M:%S')"
|
||||
|
||||
while true; do
|
||||
if ! queue_has_pending; then
|
||||
if [[ "$WATCH" == "1" ]]; then
|
||||
sleep "$POLL_SECONDS"
|
||||
continue
|
||||
fi
|
||||
break
|
||||
fi
|
||||
|
||||
task_b64="$(read_next_task)"
|
||||
if [[ -z "$task_b64" ]]; then
|
||||
sleep "$POLL_SECONDS"
|
||||
continue
|
||||
fi
|
||||
|
||||
task_json="$(printf '%s' "$task_b64" | base64 -d)"
|
||||
task_id="$(printf '%s' "$task_json" | jq -r '.id')"
|
||||
task_title="$(printf '%s' "$task_json" | jq -r '.title')"
|
||||
task_file_rel="$(printf '%s' "$task_json" | jq -r '.task_file')"
|
||||
report_file_rel="$(printf '%s' "$task_json" | jq -r '.report_file')"
|
||||
task_file="$PROJECT_ROOT/${task_file_rel//\\//}"
|
||||
report_file="$PROJECT_ROOT/${report_file_rel//\\//}"
|
||||
|
||||
update_task_status "$task_id" "running" "started_at" "$(date -Iseconds)"
|
||||
notify "Queue task started: [$task_id] $task_title"
|
||||
|
||||
if "$SCRIPT_DIR/run_glm_codex_loop.sh" "$task_file" "$report_file"; then
|
||||
update_task_status "$task_id" "completed" "completed_at" "$(date -Iseconds)"
|
||||
notify "Queue task completed: [$task_id] $task_title"
|
||||
else
|
||||
update_task_status "$task_id" "failed" "failed_at" "$(date -Iseconds)"
|
||||
set_task_error "$task_id" "task runner failed"
|
||||
notify "Queue task failed: [$task_id] $task_title"
|
||||
if [[ "$CONTINUE_ON_ERROR" != "1" ]]; then
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
@@ -0,0 +1,281 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# install.sh - Install Docker, Docker Compose, and local Python runtime on Ubuntu 24.04 WSL2
|
||||
# Idempotent: safe to run multiple times
|
||||
#
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
readonly RED='\033[0;31m'
|
||||
readonly GREEN='\033[0;32m'
|
||||
readonly YELLOW='\033[1;33m'
|
||||
readonly NC='\033[0m'
|
||||
|
||||
log_info() { echo -e "${GREEN}[INFO]${NC} $*"; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
|
||||
log_error() { echo -e "${RED}[ERROR]${NC} $*"; }
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
WSL_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
AUTOMATION_DIR="$(cd "$WSL_DIR/.." && pwd)"
|
||||
PROJECT_ROOT="$(cd "$AUTOMATION_DIR/.." && pwd)"
|
||||
RUNTIME_DIR="$AUTOMATION_DIR/wsl_runtime"
|
||||
VENV_DIR="$RUNTIME_DIR/venv"
|
||||
|
||||
check_sudo() {
|
||||
if [[ $EUID -eq 0 ]]; then
|
||||
log_error "This script should not be run as root. It will use sudo when needed."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
detect_ubuntu() {
|
||||
if [[ ! -f /etc/os-release ]]; then
|
||||
log_error "Cannot detect OS version. /etc/os-release not found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC1091
|
||||
source /etc/os-release
|
||||
if [[ "${ID:-}" != "ubuntu" ]]; then
|
||||
log_warn "This script is designed for Ubuntu. Detected: ${ID:-unknown}"
|
||||
fi
|
||||
|
||||
log_info "Detected Ubuntu ${VERSION_ID:-unknown}"
|
||||
}
|
||||
|
||||
check_wsl2() {
|
||||
if [[ ! -f /proc/version ]]; then
|
||||
log_warn "Cannot verify WSL environment"
|
||||
return
|
||||
fi
|
||||
|
||||
if grep -qi microsoft /proc/version; then
|
||||
log_info "Running in WSL environment"
|
||||
else
|
||||
log_warn "Not running in WSL. This script is designed for WSL2."
|
||||
fi
|
||||
}
|
||||
|
||||
install_docker() {
|
||||
log_info "Checking Docker installation..."
|
||||
|
||||
if command -v docker >/dev/null 2>&1; then
|
||||
log_info "Docker already installed: $(docker --version)"
|
||||
else
|
||||
log_info "Installing Docker..."
|
||||
sudo apt-get update -q
|
||||
sudo apt-get install -y \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gnupg \
|
||||
lsb-release \
|
||||
software-properties-common
|
||||
|
||||
sudo install -m 0755 -d /etc/apt/keyrings
|
||||
if [[ ! -f /etc/apt/keyrings/docker.gpg ]]; then
|
||||
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg
|
||||
sudo chmod a+r /etc/apt/keyrings/docker.gpg
|
||||
fi
|
||||
|
||||
local codename
|
||||
codename=$(. /etc/os-release && echo "$VERSION_CODENAME")
|
||||
sudo tee /etc/apt/sources.list.d/docker.list >/dev/null <<EOF
|
||||
deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu $codename stable
|
||||
EOF
|
||||
|
||||
sudo apt-get update -q
|
||||
sudo apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
|
||||
fi
|
||||
|
||||
if ! groups "$USER" | grep -q '\bdocker\b'; then
|
||||
log_info "Adding user $USER to docker group..."
|
||||
sudo usermod -aG docker "$USER"
|
||||
log_warn "A new login session may be needed for docker group membership."
|
||||
fi
|
||||
|
||||
sudo systemctl enable docker
|
||||
sudo systemctl start docker
|
||||
}
|
||||
|
||||
install_python() {
|
||||
log_info "Checking Python installation..."
|
||||
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
log_info "Python already installed: $(python3 --version)"
|
||||
else
|
||||
sudo apt-get update -q
|
||||
sudo apt-get install -y python3 python3-pip python3-venv python3-full
|
||||
fi
|
||||
}
|
||||
|
||||
install_utilities() {
|
||||
log_info "Installing system utilities..."
|
||||
|
||||
sudo apt-get update -q
|
||||
sudo apt-get install -y \
|
||||
jq \
|
||||
git \
|
||||
curl \
|
||||
wget \
|
||||
rsync \
|
||||
net-tools \
|
||||
dnsutils \
|
||||
htop \
|
||||
ncdu \
|
||||
tree \
|
||||
unzip \
|
||||
zip \
|
||||
httpie \
|
||||
python3-rich \
|
||||
pipx
|
||||
}
|
||||
|
||||
configure_docker_wsl2() {
|
||||
log_info "Configuring Docker for WSL..."
|
||||
|
||||
local docker_config_dir="/etc/docker"
|
||||
local docker_config_file="$docker_config_dir/daemon.json"
|
||||
|
||||
if [[ ! -f "$docker_config_file" ]]; then
|
||||
sudo mkdir -p "$docker_config_dir"
|
||||
sudo tee "$docker_config_file" >/dev/null <<'EOF'
|
||||
{
|
||||
"log-driver": "json-file",
|
||||
"log-opts": {
|
||||
"max-size": "10m",
|
||||
"max-file": "3"
|
||||
},
|
||||
"features": {
|
||||
"containerd-snapshotter": true
|
||||
},
|
||||
"iptables": false
|
||||
}
|
||||
EOF
|
||||
sudo systemctl restart docker
|
||||
fi
|
||||
|
||||
local bashrc_file="$HOME/.bashrc"
|
||||
if ! grep -q 'WSL Docker helpers' "$bashrc_file" 2>/dev/null; then
|
||||
cat >> "$bashrc_file" <<'EOF'
|
||||
|
||||
# WSL Docker helpers
|
||||
export DOCKER_HOST=unix:///var/run/docker.sock
|
||||
EOF
|
||||
fi
|
||||
}
|
||||
|
||||
handle_windows_paths() {
|
||||
log_info "Ensuring project symlink exists..."
|
||||
if [[ ! -L "$HOME/ableton-mcp-ai" ]]; then
|
||||
ln -sfn "$PROJECT_ROOT" "$HOME/ableton-mcp-ai"
|
||||
fi
|
||||
}
|
||||
|
||||
install_python_dependencies() {
|
||||
log_info "Preparing local virtual environment..."
|
||||
mkdir -p "$RUNTIME_DIR"
|
||||
|
||||
if [[ ! -d "$VENV_DIR" ]]; then
|
||||
python3 -m venv "$VENV_DIR"
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC1091
|
||||
source "$VENV_DIR/bin/activate"
|
||||
python -m pip install --upgrade pip
|
||||
|
||||
local found_req=false
|
||||
local requirements_files=(
|
||||
"$PROJECT_ROOT/MCP_Server/requirements.txt"
|
||||
"$PROJECT_ROOT/requirements.txt"
|
||||
)
|
||||
|
||||
for req_file in "${requirements_files[@]}"; do
|
||||
if [[ -f "$req_file" ]]; then
|
||||
log_info "Installing dependencies from: $req_file"
|
||||
python -m pip install -r "$req_file"
|
||||
found_req=true
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "$found_req" == "false" ]]; then
|
||||
log_warn "No requirements.txt files found"
|
||||
fi
|
||||
|
||||
deactivate
|
||||
}
|
||||
|
||||
verify_installation() {
|
||||
log_info "Verifying installation..."
|
||||
|
||||
local all_good=true
|
||||
|
||||
if command -v docker >/dev/null 2>&1; then
|
||||
log_info "OK Docker: $(docker --version)"
|
||||
else
|
||||
log_error "FAIL Docker not found"
|
||||
all_good=false
|
||||
fi
|
||||
|
||||
if docker compose version >/dev/null 2>&1; then
|
||||
log_info "OK Docker Compose: $(docker compose version)"
|
||||
else
|
||||
log_error "FAIL Docker Compose not found"
|
||||
all_good=false
|
||||
fi
|
||||
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
log_info "OK Python: $(python3 --version)"
|
||||
else
|
||||
log_error "FAIL Python3 not found"
|
||||
all_good=false
|
||||
fi
|
||||
|
||||
if [[ -x "$VENV_DIR/bin/python" ]]; then
|
||||
log_info "OK Venv: $VENV_DIR"
|
||||
else
|
||||
log_error "FAIL Venv not found at $VENV_DIR"
|
||||
all_good=false
|
||||
fi
|
||||
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
log_info "OK jq installed"
|
||||
else
|
||||
log_error "FAIL jq not found"
|
||||
all_good=false
|
||||
fi
|
||||
|
||||
if [[ "$all_good" == "true" ]]; then
|
||||
log_info "All dependencies installed successfully"
|
||||
return 0
|
||||
fi
|
||||
|
||||
log_error "Some dependencies failed to install"
|
||||
return 1
|
||||
}
|
||||
|
||||
main() {
|
||||
log_info "Starting AbletonMCP-AI WSL installation..."
|
||||
echo
|
||||
|
||||
check_sudo
|
||||
detect_ubuntu
|
||||
check_wsl2
|
||||
echo
|
||||
|
||||
install_docker
|
||||
install_python
|
||||
install_utilities
|
||||
configure_docker_wsl2
|
||||
handle_windows_paths
|
||||
install_python_dependencies
|
||||
echo
|
||||
|
||||
verify_installation
|
||||
echo
|
||||
|
||||
log_info "Installation complete"
|
||||
log_info "Next step: run ./setup.sh and then ./start.sh"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
WSL_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
SYSTEMD_DIR="$WSL_DIR/systemd"
|
||||
|
||||
if [[ $EUID -ne 0 ]]; then
|
||||
echo "Run with sudo"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for service_file in "$SYSTEMD_DIR"/*.service; do
|
||||
cp "$service_file" /etc/systemd/system/"$(basename "$service_file")"
|
||||
done
|
||||
|
||||
systemctl daemon-reload
|
||||
systemctl enable abletonmcp-stack.service abletonmcp-queue-runner.service
|
||||
echo "Installed systemd units"
|
||||
echo "Enabled by default: abletonmcp-stack.service, abletonmcp-queue-runner.service"
|
||||
echo "Optional unit left disabled: abletonmcp-glm-runner.service"
|
||||
@@ -0,0 +1,39 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
WSL_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
AUTOMATION_DIR="$(cd "$WSL_DIR/.." && pwd)"
|
||||
DOCKER_ENV_FILE="$WSL_DIR/.env"
|
||||
COMPOSE_FILE="$WSL_DIR/docker-compose.yml"
|
||||
LOGS_DIR="$AUTOMATION_DIR/wsl_runtime/logs"
|
||||
|
||||
follow="${1:-all}"
|
||||
|
||||
compose_cmd() {
|
||||
docker compose --env-file "$DOCKER_ENV_FILE" -f "$COMPOSE_FILE" "$@"
|
||||
}
|
||||
|
||||
case "$follow" in
|
||||
docker)
|
||||
compose_cmd logs -f
|
||||
;;
|
||||
queue)
|
||||
tail -f "$LOGS_DIR/queue-runner.log"
|
||||
;;
|
||||
all)
|
||||
compose_cmd logs -f &
|
||||
docker_pid=$!
|
||||
if [[ -f "$LOGS_DIR/queue-runner.log" ]]; then
|
||||
tail -f "$LOGS_DIR/queue-runner.log" &
|
||||
tail_pid=$!
|
||||
wait "$docker_pid" "$tail_pid"
|
||||
else
|
||||
wait "$docker_pid"
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
echo "Usage: $0 [all|docker|queue]"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
"$SCRIPT_DIR/stop.sh"
|
||||
sleep 2
|
||||
"$SCRIPT_DIR/start.sh"
|
||||
@@ -0,0 +1,140 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
readonly RED='\033[0;31m'
|
||||
readonly GREEN='\033[0;32m'
|
||||
readonly YELLOW='\033[1;33m'
|
||||
readonly BLUE='\033[0;34m'
|
||||
readonly NC='\033[0m'
|
||||
|
||||
log_info() { echo -e "${GREEN}[INFO]${NC} $*"; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
|
||||
log_step() { echo -e "${BLUE}[STEP]${NC} $*"; }
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
WSL_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
AUTOMATION_DIR="$(cd "$WSL_DIR/.." && pwd)"
|
||||
PROJECT_ROOT="$(cd "$AUTOMATION_DIR/.." && pwd)"
|
||||
RUNTIME_DIR="$AUTOMATION_DIR/wsl_runtime"
|
||||
DOCKER_ENV_FILE="$WSL_DIR/.env"
|
||||
RUNNER_ENV_FILE="$AUTOMATION_DIR/wsl.local.env"
|
||||
PROJECT_LINK="$HOME/ableton-mcp-ai"
|
||||
|
||||
generate_secret() {
|
||||
openssl rand -hex "${1:-16}" 2>/dev/null || python3 - <<'PY'
|
||||
import secrets
|
||||
print(secrets.token_hex(16))
|
||||
PY
|
||||
}
|
||||
|
||||
ensure_dirs() {
|
||||
log_step "Creating runtime directories"
|
||||
mkdir -p \
|
||||
"$RUNTIME_DIR/logs" \
|
||||
"$RUNTIME_DIR/pids" \
|
||||
"$RUNTIME_DIR/data" \
|
||||
"$AUTOMATION_DIR/reports" \
|
||||
"$AUTOMATION_DIR/runs" \
|
||||
"$AUTOMATION_DIR/tasks" \
|
||||
"$AUTOMATION_DIR/workflows" \
|
||||
"$WSL_DIR/initdb"
|
||||
}
|
||||
|
||||
ensure_symlink() {
|
||||
if [[ ! -L "$PROJECT_LINK" ]]; then
|
||||
ln -sfn "$PROJECT_ROOT" "$PROJECT_LINK"
|
||||
fi
|
||||
log_info "Project link: $PROJECT_LINK"
|
||||
}
|
||||
|
||||
write_docker_env() {
|
||||
if [[ -f "$DOCKER_ENV_FILE" ]]; then
|
||||
log_info "Docker env already exists: $DOCKER_ENV_FILE"
|
||||
return
|
||||
fi
|
||||
|
||||
log_step "Generating docker env"
|
||||
cat > "$DOCKER_ENV_FILE" <<EOF
|
||||
PROJECT_PATH='$PROJECT_ROOT'
|
||||
TZ='America/Buenos_Aires'
|
||||
POSTGRES_USER='postgres'
|
||||
POSTGRES_PASSWORD='$(generate_secret 16)'
|
||||
POSTGRES_BOOTSTRAP_DB='postgres'
|
||||
POSTGRES_PORT='5432'
|
||||
GITEA_DOMAIN='localhost'
|
||||
GITEA_ROOT_URL='http://localhost:3000'
|
||||
GITEA_HTTP_PORT='3000'
|
||||
GITEA_SSH_DOMAIN='localhost'
|
||||
GITEA_SSH_PORT='222'
|
||||
GITEA_ADMIN_USER='giteaadmin'
|
||||
GITEA_ADMIN_PASSWORD='$(generate_secret 16)'
|
||||
GITEA_ADMIN_EMAIL='admin@localhost'
|
||||
GITEA_DB_NAME='gitea'
|
||||
GITEA_SECURITY_INSTALL_LOCK='true'
|
||||
GITEA_OFFLINE_MODE='true'
|
||||
REDIS_PASSWORD='$(generate_secret 16)'
|
||||
REDIS_PORT='6379'
|
||||
N8N_HOST='localhost'
|
||||
N8N_PORT='5678'
|
||||
N8N_PATH='/'
|
||||
N8N_WEBHOOK_URL='http://localhost:5678/'
|
||||
N8N_EDITOR_BASE_URL='http://localhost:5678'
|
||||
N8N_DB_NAME='n8n'
|
||||
N8N_ENCRYPTION_KEY='$(generate_secret 32)'
|
||||
N8N_BASIC_AUTH_ACTIVE='true'
|
||||
N8N_BASIC_AUTH_USER='admin'
|
||||
N8N_BASIC_AUTH_PASSWORD='$(generate_secret 16)'
|
||||
N8N_HOST_ALLOW_LIST='localhost,127.0.0.1'
|
||||
N8N_EXECUTIONS_MODE='regular'
|
||||
N8N_LOG_LEVEL='info'
|
||||
N8N_DIAGNOSTICS_ENABLED='false'
|
||||
N8N_VERSION_NOTIFICATIONS_ENABLED='false'
|
||||
N8N_COOKIE_POLICY='lax'
|
||||
COMPOSE_PROJECT_NAME='abletonmcp'
|
||||
EOF
|
||||
chmod 600 "$DOCKER_ENV_FILE"
|
||||
}
|
||||
|
||||
ensure_runner_env() {
|
||||
if [[ -f "$RUNNER_ENV_FILE" ]]; then
|
||||
log_info "Runner env already exists: $RUNNER_ENV_FILE"
|
||||
return
|
||||
fi
|
||||
|
||||
log_step "Generating runner env"
|
||||
cat > "$RUNNER_ENV_FILE" <<EOF
|
||||
export ANTHROPIC_BASE_URL=''
|
||||
export ANTHROPIC_AUTH_TOKEN=''
|
||||
export GLM_MODEL='glm-5'
|
||||
export GLM_API_KEY=''
|
||||
export CODEX_MODEL='gpt-5.4'
|
||||
export TELEGRAM_BOT_TOKEN=''
|
||||
export TELEGRAM_CHAT_ID=''
|
||||
export CODEX_HOME='$AUTOMATION_DIR/wsl_runtime/codex_home'
|
||||
export GLM_AGENTS_FILE='$AUTOMATION_DIR/glm_agents.team.json'
|
||||
export POLL_SECONDS='30'
|
||||
export WATCH='1'
|
||||
export CONTINUE_ON_ERROR='1'
|
||||
EOF
|
||||
chmod 600 "$RUNNER_ENV_FILE"
|
||||
}
|
||||
|
||||
main() {
|
||||
log_info "Preparing AbletonMCP_AI WSL stack"
|
||||
ensure_dirs
|
||||
ensure_symlink
|
||||
write_docker_env
|
||||
ensure_runner_env
|
||||
echo
|
||||
log_info "Files ready:"
|
||||
echo " - $DOCKER_ENV_FILE"
|
||||
echo " - $RUNNER_ENV_FILE"
|
||||
echo
|
||||
log_info "Next:"
|
||||
echo " 1. Review tokens in $RUNNER_ENV_FILE"
|
||||
echo " 2. Review service passwords in $DOCKER_ENV_FILE"
|
||||
echo " 3. Run ./install.sh if Docker is not installed"
|
||||
echo " 4. Run ./start.sh"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@@ -0,0 +1,143 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
readonly RED='\033[0;31m'
|
||||
readonly GREEN='\033[0;32m'
|
||||
readonly YELLOW='\033[1;33m'
|
||||
readonly BLUE='\033[0;34m'
|
||||
readonly NC='\033[0m'
|
||||
|
||||
log_info() { echo -e "${GREEN}[INFO]${NC} $*"; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
|
||||
log_error() { echo -e "${RED}[ERROR]${NC} $*"; }
|
||||
log_step() { echo -e "${BLUE}[STEP]${NC} $*"; }
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
WSL_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
AUTOMATION_DIR="$(cd "$WSL_DIR/.." && pwd)"
|
||||
PROJECT_ROOT="$(cd "$AUTOMATION_DIR/.." && pwd)"
|
||||
RUNNER_ENV_FILE="$AUTOMATION_DIR/wsl.local.env"
|
||||
DOCKER_ENV_FILE="$WSL_DIR/.env"
|
||||
COMPOSE_FILE="$WSL_DIR/docker-compose.yml"
|
||||
RUNTIME_DIR="$AUTOMATION_DIR/wsl_runtime"
|
||||
LOGS_DIR="$RUNTIME_DIR/logs"
|
||||
PID_DIR="$RUNTIME_DIR/pids"
|
||||
START_QUEUE_RUNNER="${START_QUEUE_RUNNER:-1}"
|
||||
|
||||
mkdir -p "$LOGS_DIR" "$PID_DIR"
|
||||
|
||||
if [[ -f "$RUNNER_ENV_FILE" ]]; then
|
||||
# shellcheck disable=SC1090
|
||||
source "$RUNNER_ENV_FILE"
|
||||
fi
|
||||
|
||||
if [[ -f "$DOCKER_ENV_FILE" ]]; then
|
||||
# shellcheck disable=SC1090
|
||||
set -a
|
||||
source "$DOCKER_ENV_FILE"
|
||||
set +a
|
||||
fi
|
||||
|
||||
compose_cmd() {
|
||||
docker compose --env-file "$DOCKER_ENV_FILE" -f "$COMPOSE_FILE" "$@"
|
||||
}
|
||||
|
||||
check_prerequisites() {
|
||||
log_step "Checking prerequisites"
|
||||
command -v docker >/dev/null || { log_error "Docker is not installed"; exit 1; }
|
||||
docker compose version >/dev/null || { log_error "Docker Compose plugin is not available"; exit 1; }
|
||||
docker info >/dev/null || { log_error "Docker daemon is not running"; exit 1; }
|
||||
[[ -f "$DOCKER_ENV_FILE" ]] || { log_error "Missing docker env: $DOCKER_ENV_FILE"; exit 1; }
|
||||
[[ -f "$COMPOSE_FILE" ]] || { log_error "Missing compose file: $COMPOSE_FILE"; exit 1; }
|
||||
}
|
||||
|
||||
wait_for_postgres() {
|
||||
log_info "Waiting for PostgreSQL"
|
||||
for _ in $(seq 1 60); do
|
||||
if compose_cmd exec -T postgres pg_isready -U "${POSTGRES_USER:-postgres}" -d "${POSTGRES_BOOTSTRAP_DB:-postgres}" >/dev/null 2>&1; then
|
||||
return 0
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
log_error "PostgreSQL did not become ready in time"
|
||||
exit 1
|
||||
}
|
||||
|
||||
wait_for_service_http() {
|
||||
local service="$1"
|
||||
local url="$2"
|
||||
log_info "Waiting for $service"
|
||||
for _ in $(seq 1 60); do
|
||||
if curl -fsS "$url" >/dev/null 2>&1; then
|
||||
return 0
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
log_warn "$service is not healthy yet: $url"
|
||||
return 1
|
||||
}
|
||||
|
||||
ensure_database() {
|
||||
local db_name="$1"
|
||||
if compose_cmd exec -T postgres psql -U "${POSTGRES_USER:-postgres}" -d "${POSTGRES_BOOTSTRAP_DB:-postgres}" -tAc "SELECT 1 FROM pg_database WHERE datname='${db_name}'" | grep -q 1; then
|
||||
return 0
|
||||
fi
|
||||
compose_cmd exec -T postgres psql -U "${POSTGRES_USER:-postgres}" -d "${POSTGRES_BOOTSTRAP_DB:-postgres}" -c "CREATE DATABASE \"${db_name}\""
|
||||
}
|
||||
|
||||
ensure_gitea_admin() {
|
||||
local user="${GITEA_ADMIN_USER:-giteaadmin}"
|
||||
local password="${GITEA_ADMIN_PASSWORD:-changeme}"
|
||||
local email="${GITEA_ADMIN_EMAIL:-admin@localhost}"
|
||||
if compose_cmd exec -T gitea sh -c "HOME=/tmp /usr/local/bin/gitea admin user list 2>/dev/null | awk 'NR > 1 && \$2 == \"${user}\" { found=1 } END { exit found ? 0 : 1 }'"; then
|
||||
return 0
|
||||
fi
|
||||
compose_cmd exec -T gitea sh -c "HOME=/tmp /usr/local/bin/gitea admin user create --admin --username '${user}' --password '${password}' --email '${email}' --must-change-password=false" >/dev/null 2>&1 || log_warn "Could not auto-create Gitea admin user; complete first-run in UI if needed"
|
||||
}
|
||||
|
||||
start_docker_stack() {
|
||||
log_step "Starting Docker services"
|
||||
compose_cmd up -d postgres redis
|
||||
wait_for_postgres
|
||||
ensure_database "${GITEA_DB_NAME:-gitea}"
|
||||
ensure_database "${N8N_DB_NAME:-n8n}"
|
||||
compose_cmd up -d gitea n8n
|
||||
wait_for_service_http "Gitea" "http://localhost:${GITEA_HTTP_PORT:-3000}/api/healthz" || true
|
||||
wait_for_service_http "n8n" "http://localhost:${N8N_PORT:-5678}/healthz" || true
|
||||
ensure_gitea_admin
|
||||
}
|
||||
|
||||
start_queue_runner() {
|
||||
if [[ "$START_QUEUE_RUNNER" != "1" ]]; then
|
||||
log_info "Queue runner startup skipped by START_QUEUE_RUNNER=$START_QUEUE_RUNNER"
|
||||
return
|
||||
fi
|
||||
|
||||
if command -v systemctl >/dev/null 2>&1 && systemctl is-active abletonmcp-queue-runner.service >/dev/null 2>&1; then
|
||||
log_info "Queue runner already managed by systemd"
|
||||
return
|
||||
fi
|
||||
|
||||
local pid_file="$PID_DIR/queue-runner.pid"
|
||||
if [[ -f "$pid_file" ]] && kill -0 "$(cat "$pid_file")" 2>/dev/null; then
|
||||
log_info "Queue runner already running"
|
||||
return
|
||||
fi
|
||||
|
||||
log_step "Starting autonomous queue runner"
|
||||
nohup bash "$WSL_DIR/run_task_queue.sh" > "$LOGS_DIR/queue-runner.log" 2>&1 &
|
||||
echo $! > "$pid_file"
|
||||
log_info "Queue runner PID: $(cat "$pid_file")"
|
||||
}
|
||||
|
||||
main() {
|
||||
check_prerequisites
|
||||
start_docker_stack
|
||||
start_queue_runner
|
||||
echo
|
||||
log_info "Stack started"
|
||||
echo " Gitea: http://localhost:${GITEA_HTTP_PORT:-3000}"
|
||||
echo " n8n: http://localhost:${N8N_PORT:-5678}"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@@ -0,0 +1,58 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
readonly GREEN='\033[0;32m'
|
||||
readonly YELLOW='\033[1;33m'
|
||||
readonly BLUE='\033[0;34m'
|
||||
readonly RED='\033[0;31m'
|
||||
readonly NC='\033[0m'
|
||||
|
||||
ok() { echo -e "${GREEN}OK${NC} $*"; }
|
||||
warn() { echo -e "${YELLOW}WARN${NC} $*"; }
|
||||
fail() { echo -e "${RED}FAIL${NC} $*"; }
|
||||
step() { echo -e "${BLUE}$*${NC}"; }
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
WSL_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
AUTOMATION_DIR="$(cd "$WSL_DIR/.." && pwd)"
|
||||
DOCKER_ENV_FILE="$WSL_DIR/.env"
|
||||
COMPOSE_FILE="$WSL_DIR/docker-compose.yml"
|
||||
PID_DIR="$AUTOMATION_DIR/wsl_runtime/pids"
|
||||
LOGS_DIR="$AUTOMATION_DIR/wsl_runtime/logs"
|
||||
|
||||
compose_cmd() {
|
||||
docker compose --env-file "$DOCKER_ENV_FILE" -f "$COMPOSE_FILE" "$@"
|
||||
}
|
||||
|
||||
step "Docker"
|
||||
if command -v docker >/dev/null 2>&1 && docker info >/dev/null 2>&1; then
|
||||
ok "docker daemon running"
|
||||
else
|
||||
fail "docker daemon unavailable"
|
||||
fi
|
||||
echo
|
||||
|
||||
step "Compose services"
|
||||
if command -v docker >/dev/null 2>&1 && [[ -f "$COMPOSE_FILE" ]]; then
|
||||
compose_cmd ps || true
|
||||
else
|
||||
warn "compose file or docker missing"
|
||||
fi
|
||||
echo
|
||||
|
||||
step "Queue runner"
|
||||
if [[ -f "$PID_DIR/queue-runner.pid" ]] && kill -0 "$(cat "$PID_DIR/queue-runner.pid")" 2>/dev/null; then
|
||||
ok "queue runner PID $(cat "$PID_DIR/queue-runner.pid")"
|
||||
elif command -v systemctl >/dev/null 2>&1 && systemctl is-active abletonmcp-queue-runner.service >/dev/null 2>&1; then
|
||||
ok "queue runner managed by systemd"
|
||||
else
|
||||
warn "queue runner not running"
|
||||
fi
|
||||
echo
|
||||
|
||||
step "Logs"
|
||||
if [[ -d "$LOGS_DIR" ]]; then
|
||||
ls -1 "$LOGS_DIR" | sed 's/^/ - /'
|
||||
else
|
||||
warn "no logs directory"
|
||||
fi
|
||||
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
readonly GREEN='\033[0;32m'
|
||||
readonly YELLOW='\033[1;33m'
|
||||
readonly BLUE='\033[0;34m'
|
||||
readonly NC='\033[0m'
|
||||
|
||||
log_info() { echo -e "${GREEN}[INFO]${NC} $*"; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
|
||||
log_step() { echo -e "${BLUE}[STEP]${NC} $*"; }
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
WSL_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
AUTOMATION_DIR="$(cd "$WSL_DIR/.." && pwd)"
|
||||
DOCKER_ENV_FILE="$WSL_DIR/.env"
|
||||
COMPOSE_FILE="$WSL_DIR/docker-compose.yml"
|
||||
PID_DIR="$AUTOMATION_DIR/wsl_runtime/pids"
|
||||
|
||||
compose_cmd() {
|
||||
docker compose --env-file "$DOCKER_ENV_FILE" -f "$COMPOSE_FILE" "$@"
|
||||
}
|
||||
|
||||
stop_runner() {
|
||||
local pid_file="$1"
|
||||
if [[ ! -f "$pid_file" ]]; then
|
||||
return
|
||||
fi
|
||||
local pid
|
||||
pid="$(cat "$pid_file")"
|
||||
if kill -0 "$pid" 2>/dev/null; then
|
||||
kill -TERM "$pid" 2>/dev/null || true
|
||||
sleep 2
|
||||
kill -KILL "$pid" 2>/dev/null || true
|
||||
fi
|
||||
rm -f "$pid_file"
|
||||
}
|
||||
|
||||
main() {
|
||||
log_step "Stopping queue runner"
|
||||
stop_runner "$PID_DIR/queue-runner.pid"
|
||||
echo
|
||||
log_step "Stopping Docker services"
|
||||
if command -v docker >/dev/null 2>&1; then
|
||||
compose_cmd down "$@" || true
|
||||
else
|
||||
log_warn "Docker not installed"
|
||||
fi
|
||||
log_info "Stack stopped"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
LOCAL_ENV_FILE="${LOCAL_ENV_FILE:-$PROJECT_ROOT/automation/wsl.local.env}"
|
||||
|
||||
if [[ -f "$LOCAL_ENV_FILE" ]]; then
|
||||
# shellcheck disable=SC1090
|
||||
source "$LOCAL_ENV_FILE"
|
||||
fi
|
||||
|
||||
MESSAGE="${1:-}"
|
||||
if [[ -z "$MESSAGE" ]]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
BOT_TOKEN="${TELEGRAM_BOT_TOKEN:-}"
|
||||
CHAT_ID="${TELEGRAM_CHAT_ID:-}"
|
||||
|
||||
if [[ -z "$BOT_TOKEN" || -z "$CHAT_ID" ]]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
curl -fsS -X POST "https://api.telegram.org/bot${BOT_TOKEN}/sendMessage" \
|
||||
--data-urlencode "chat_id=${CHAT_ID}" \
|
||||
--data-urlencode "text=${MESSAGE}" \
|
||||
--data "disable_web_page_preview=true" >/dev/null
|
||||
Reference in New Issue
Block a user