Replace FL Studio binary .flp output with REAPER text-based .rpp output using the rpp Python library (Perlence/rpp). - Add core/schema.py: DAW-agnostic data types (SongDefinition, TrackDef, ClipDef, MidiNote, PluginDef) - Add reaper_builder/: RPP file generation via rpp.Element + headless render via reaper.exe CLI - Add composer/converters.py: bridge rhythm.py/melodic.py note dicts to core.schema MidiNote objects - Rewrite scripts/compose.py: real generator pipeline with --render flag - Delete src/flp_builder/, src/scanner/, mcp/, flstudio-mcp/, old scripts - Add 40 passing tests (schema, builder, converters, compose, render)
96 lines
3.3 KiB
Python
96 lines
3.3 KiB
Python
"""Tests for src/composer/converters.py — rhythm_to_midi, melodic_to_midi."""
|
|
|
|
import sys
|
|
from pathlib import Path
|
|
|
|
sys.path.insert(0, str(Path(__file__).parents[1]))
|
|
|
|
import pytest
|
|
from src.composer.converters import rhythm_to_midi, melodic_to_midi
|
|
from src.core.schema import MidiNote
|
|
|
|
|
|
class TestRhythmToMidi:
|
|
"""Tests for rhythm_to_midi() — channel → GM pitch mapping."""
|
|
|
|
def test_rhythm_to_midi_kick_channel(self):
|
|
"""Channel 11 (kick) maps to pitch 36 with correct start/duration/velocity."""
|
|
note_dict = {
|
|
11: [
|
|
{"pos": 0.0, "len": 0.25, "key": 36, "vel": 115},
|
|
{"pos": 1.0, "len": 0.25, "key": 36, "vel": 100},
|
|
]
|
|
}
|
|
result = rhythm_to_midi(note_dict)
|
|
|
|
assert len(result) == 2
|
|
# Pitch is resolved from CHANNEL_PITCH, not from the dict's "key"
|
|
assert result[0].pitch == 36
|
|
assert result[0].start == 0.0
|
|
assert result[0].duration == 0.25
|
|
assert result[0].velocity == 115
|
|
|
|
assert result[1].pitch == 36
|
|
assert result[1].start == 1.0
|
|
assert result[1].duration == 0.25
|
|
assert result[1].velocity == 100
|
|
|
|
def test_rhythm_to_midi_hihat_channel(self):
|
|
"""Channel 15 (hihat) maps to pitch 42."""
|
|
note_dict = {15: [{"pos": 0.0, "len": 0.125, "key": 42, "vel": 90}]}
|
|
result = rhythm_to_midi(note_dict)
|
|
|
|
assert len(result) == 1
|
|
assert result[0].pitch == 42
|
|
assert result[0].start == 0.0
|
|
assert result[0].duration == 0.125
|
|
assert result[0].velocity == 90
|
|
|
|
def test_rhythm_to_midi_unknown_channel(self):
|
|
"""Unknown channel (not in CHANNEL_PITCH) defaults to pitch 60."""
|
|
note_dict = {99: [{"pos": 0.0, "len": 0.25, "key": 60, "vel": 100}]}
|
|
result = rhythm_to_midi(note_dict)
|
|
|
|
assert len(result) == 1
|
|
assert result[0].pitch == 60 # default fallback
|
|
assert result[0].start == 0.0
|
|
|
|
def test_rhythm_to_midi_multi_channel(self):
|
|
"""3 different channels return a flat list with all notes combined."""
|
|
note_dict = {
|
|
11: [{"pos": 0.0, "len": 0.25, "key": 36, "vel": 115}],
|
|
15: [{"pos": 0.5, "len": 0.125, "key": 42, "vel": 90}],
|
|
10: [{"pos": 1.0, "len": 0.25, "key": 39, "vel": 80}],
|
|
}
|
|
result = rhythm_to_midi(note_dict)
|
|
|
|
assert len(result) == 3
|
|
pitches = {n.pitch for n in result}
|
|
assert pitches == {36, 42, 39}
|
|
|
|
|
|
class TestMelodicToMidi:
|
|
"""Tests for melodic_to_midi() — key field used directly as pitch."""
|
|
|
|
def test_melodic_to_midi_uses_key_as_pitch(self):
|
|
"""key=60 → pitch 60 (key field is used directly, not mapped)."""
|
|
note_list = [
|
|
{"pos": 0.0, "len": 0.5, "key": 60, "vel": 100},
|
|
{"pos": 0.5, "len": 0.5, "key": 64, "vel": 90},
|
|
{"pos": 1.0, "len": 0.5, "key": 67, "vel": 95},
|
|
]
|
|
result = melodic_to_midi(note_list)
|
|
|
|
assert len(result) == 3
|
|
assert result[0].pitch == 60
|
|
assert result[1].pitch == 64
|
|
assert result[2].pitch == 67
|
|
assert result[0].start == 0.0
|
|
assert result[0].duration == 0.5
|
|
assert result[0].velocity == 100
|
|
|
|
def test_melodic_to_midi_empty_list(self):
|
|
"""Empty list returns empty list."""
|
|
result = melodic_to_midi([])
|
|
assert result == []
|