feat: fix sample variety per section and reorganize sample library
- Fix compose.py to select different samples per section instead of one per role
- Add select_many() to SampleSelector for diverse sample selection
- Migrate 862 samples from scattered dirs to libreria/samples/{role}/
- Rename files with consistent convention: {role}_{key}_{bpm}_{character}_{hash}.wav
- Add migrate_library.py script with dry-run and verification
- Backup original index as sample_index_pre_migration.json
- 72 tests passing
This commit is contained in:
128508
data/sample_index_pre_migration.json
Normal file
128508
data/sample_index_pre_migration.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -235,13 +235,11 @@ def build_section_tracks(
|
|||||||
# Build one track per role
|
# Build one track per role
|
||||||
tracks: list[TrackDef] = []
|
tracks: list[TrackDef] = []
|
||||||
|
|
||||||
|
# Track used sample IDs per role for diversity
|
||||||
|
used_sample_ids: dict[str, list[str]] = {}
|
||||||
|
|
||||||
for role, role_cfg in roles.items():
|
for role, role_cfg in roles.items():
|
||||||
sample_role = ROLE_TO_SAMPLE_ROLE.get(role, role)
|
sample_role = ROLE_TO_SAMPLE_ROLE.get(role, role)
|
||||||
generator_name = role_cfg.get("notes_template", "")
|
|
||||||
|
|
||||||
# Select sample for this role
|
|
||||||
sample_match = selector.select_one(role=sample_role, key=key, bpm=bpm)
|
|
||||||
sample_path = sample_match.get("original_path") if sample_match else None
|
|
||||||
|
|
||||||
# Collect clips for each section
|
# Collect clips for each section
|
||||||
section_clips: list[ClipDef] = []
|
section_clips: list[ClipDef] = []
|
||||||
@@ -251,6 +249,20 @@ def build_section_tracks(
|
|||||||
vel_mult = section.energy
|
vel_mult = section.energy
|
||||||
vol_mult = section.energy
|
vol_mult = section.energy
|
||||||
|
|
||||||
|
# For audio roles, select a different sample per section
|
||||||
|
sample_path = None
|
||||||
|
if role in AUDIO_ROLES:
|
||||||
|
exclude = used_sample_ids.get(role, [])
|
||||||
|
diverse_results = selector.select_diverse(
|
||||||
|
role=sample_role, n=1, exclude=exclude, key=key, bpm=bpm
|
||||||
|
)
|
||||||
|
if diverse_results:
|
||||||
|
sample = diverse_results[0]
|
||||||
|
sample_path = sample.get("original_path")
|
||||||
|
sample_id = sample.get("file_hash", "")
|
||||||
|
if sample_id:
|
||||||
|
used_sample_ids.setdefault(role, []).append(sample_id)
|
||||||
|
|
||||||
if role in ROLE_RHYTHM_GENERATORS:
|
if role in ROLE_RHYTHM_GENERATORS:
|
||||||
gen_name = ROLE_RHYTHM_GENERATORS[role]
|
gen_name = ROLE_RHYTHM_GENERATORS[role]
|
||||||
note_dict = get_notes(gen_name, section.bars, velocity_mult=vel_mult)
|
note_dict = get_notes(gen_name, section.bars, velocity_mult=vel_mult)
|
||||||
@@ -281,12 +293,12 @@ def build_section_tracks(
|
|||||||
gen_fn = ROLE_MELODIC_GENERATORS[role]
|
gen_fn = ROLE_MELODIC_GENERATORS[role]
|
||||||
note_list = gen_fn(key=key, bars=section.bars, velocity_mult=vel_mult)
|
note_list = gen_fn(key=key, bars=section.bars, velocity_mult=vel_mult)
|
||||||
midi_notes = melodic_to_midi(note_list)
|
midi_notes = melodic_to_midi(note_list)
|
||||||
|
# Melodic roles use MIDI instruments — no audio_path needed
|
||||||
clip = ClipDef(
|
clip = ClipDef(
|
||||||
position=sec_offset * 4.0,
|
position=sec_offset * 4.0,
|
||||||
length=section.bars * 4.0,
|
length=section.bars * 4.0,
|
||||||
name=f"{section.name.capitalize()} {role.capitalize()}",
|
name=f"{section.name.capitalize()} {role.capitalize()}",
|
||||||
midi_notes=midi_notes,
|
midi_notes=midi_notes,
|
||||||
audio_path=sample_path,
|
|
||||||
)
|
)
|
||||||
section_clips.append(clip)
|
section_clips.append(clip)
|
||||||
|
|
||||||
|
|||||||
281
scripts/migrate_library.py
Normal file
281
scripts/migrate_library.py
Normal file
@@ -0,0 +1,281 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
"""Migrate sample library to new organized structure.
|
||||||
|
|
||||||
|
Copies all 862 samples from scattered subdirectories in `libreria/reggaeton/`
|
||||||
|
to flat role-based directories under `libreria/samples/{role}/` with consistent
|
||||||
|
naming.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python scripts/migrate_library.py [--dry-run] [--verify]
|
||||||
|
|
||||||
|
CRITICAL RULES:
|
||||||
|
- COPY files, do NOT move them (keep originals as backup)
|
||||||
|
- Do NOT delete original files
|
||||||
|
- Keep the old index as backup before modifying
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
# Project root
|
||||||
|
_ROOT = Path(__file__).parent.parent
|
||||||
|
INDEX_PATH = _ROOT / "data" / "sample_index.json"
|
||||||
|
BACKUP_PATH = _ROOT / "data" / "sample_index_pre_migration.json"
|
||||||
|
SAMPLES_ROOT = _ROOT / "libreria" / "samples"
|
||||||
|
LOG_PATH = _ROOT / "scripts" / "migration_log.json"
|
||||||
|
|
||||||
|
# All known roles
|
||||||
|
ROLES = {
|
||||||
|
"kick", "snare", "hihat", "perc", "bass", "lead", "keys", "pad",
|
||||||
|
"drumloop", "fx", "vocal", "guitar", "brass", "synth", "arp",
|
||||||
|
"pluck", "oneshot", "fill",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def log(msg: str) -> None:
|
||||||
|
print(msg, flush=True)
|
||||||
|
|
||||||
|
|
||||||
|
def create_directories() -> None:
|
||||||
|
"""Create libreria/samples/ with all role subdirectories."""
|
||||||
|
SAMPLES_ROOT.mkdir(parents=True, exist_ok=True)
|
||||||
|
for role in ROLES:
|
||||||
|
(SAMPLES_ROOT / role).mkdir(exist_ok=True)
|
||||||
|
log(f"[OK] Created directories under {SAMPLES_ROOT}")
|
||||||
|
|
||||||
|
|
||||||
|
def load_index() -> dict:
|
||||||
|
"""Load sample index."""
|
||||||
|
with open(INDEX_PATH, "r", encoding="utf-8") as f:
|
||||||
|
return json.load(f)
|
||||||
|
|
||||||
|
|
||||||
|
def save_index(data: dict) -> None:
|
||||||
|
"""Write updated index atomically."""
|
||||||
|
temp_path = INDEX_PATH.with_suffix(".json.tmp")
|
||||||
|
with open(temp_path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||||
|
temp_path.replace(INDEX_PATH)
|
||||||
|
|
||||||
|
|
||||||
|
def backup_index() -> None:
|
||||||
|
"""Create backup of original index."""
|
||||||
|
if not BACKUP_PATH.exists():
|
||||||
|
shutil.copy2(INDEX_PATH, BACKUP_PATH)
|
||||||
|
log(f"[OK] Backed up index to {BACKUP_PATH}")
|
||||||
|
else:
|
||||||
|
log(f"[WARN] Backup already exists at {BACKUP_PATH}, skipping")
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_samples(data: dict, dry_run: bool = False) -> tuple[list[dict], list[dict], dict]:
|
||||||
|
"""Migrate all samples.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: The loaded index data dict (will be modified in-place)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(migrated list, error list, role_counts)
|
||||||
|
"""
|
||||||
|
samples = data["samples"]
|
||||||
|
|
||||||
|
migrated: list[dict] = []
|
||||||
|
errors: list[dict] = []
|
||||||
|
|
||||||
|
total = len(samples)
|
||||||
|
log(f"Starting migration of {total} samples...")
|
||||||
|
|
||||||
|
for idx, sample in enumerate(samples):
|
||||||
|
if idx > 0 and idx % 100 == 0:
|
||||||
|
log(f" Progress: {idx}/{total} ({100*idx/total:.1f}%)")
|
||||||
|
|
||||||
|
role = sample.get("role", "unknown")
|
||||||
|
if role not in ROLES:
|
||||||
|
errors.append({
|
||||||
|
"sample": sample.get("original_name", "unknown"),
|
||||||
|
"error": f"Unknown role: {role}",
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
|
||||||
|
original_path = Path(sample["original_path"])
|
||||||
|
new_name = sample.get("new_name")
|
||||||
|
if not new_name:
|
||||||
|
errors.append({
|
||||||
|
"sample": sample.get("original_name", "unknown"),
|
||||||
|
"error": "No new_name in index",
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
|
||||||
|
dest_path = SAMPLES_ROOT / role / new_name
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
migrated.append({
|
||||||
|
"original": str(original_path),
|
||||||
|
"destination": str(dest_path),
|
||||||
|
"role": role,
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
if not original_path.exists():
|
||||||
|
errors.append({
|
||||||
|
"sample": sample.get("original_name", "unknown"),
|
||||||
|
"original_path": str(original_path),
|
||||||
|
"error": "Source file not found",
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
|
||||||
|
# COPY (not move) to preserve originals
|
||||||
|
shutil.copy2(original_path, dest_path)
|
||||||
|
|
||||||
|
# Update index fields IN-PLACE (modifies data dict)
|
||||||
|
sample["migrated_from"] = sample["original_path"]
|
||||||
|
sample["original_path"] = str(dest_path)
|
||||||
|
sample["original_name"] = new_name
|
||||||
|
|
||||||
|
migrated.append({
|
||||||
|
"original": str(original_path),
|
||||||
|
"destination": str(dest_path),
|
||||||
|
"role": role,
|
||||||
|
"new_name": new_name,
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
errors.append({
|
||||||
|
"sample": sample.get("original_name", "unknown"),
|
||||||
|
"original_path": str(original_path),
|
||||||
|
"error": str(e),
|
||||||
|
})
|
||||||
|
|
||||||
|
# Compute role counts
|
||||||
|
role_counts: dict[str, int] = {}
|
||||||
|
for m in migrated:
|
||||||
|
role = m.get("role", "unknown")
|
||||||
|
role_counts[role] = role_counts.get(role, 0) + 1
|
||||||
|
|
||||||
|
return migrated, errors, role_counts
|
||||||
|
|
||||||
|
|
||||||
|
def write_log(role_counts: dict[str, int], migrated: list[dict], errors: list[dict]) -> None:
|
||||||
|
"""Write migration log."""
|
||||||
|
log_data = {
|
||||||
|
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||||
|
"total_samples": len(migrated) + len(errors),
|
||||||
|
"migrated_count": len(migrated),
|
||||||
|
"error_count": len(errors),
|
||||||
|
"role_counts": role_counts,
|
||||||
|
"migrated_sample": migrated[:10], # First 10 as sample
|
||||||
|
"errors": errors,
|
||||||
|
}
|
||||||
|
|
||||||
|
LOG_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with open(LOG_PATH, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(log_data, f, indent=2, ensure_ascii=False)
|
||||||
|
log(f"[OK] Migration log written to {LOG_PATH}")
|
||||||
|
|
||||||
|
|
||||||
|
def verify_migration(migrated: list[dict]) -> list[dict]:
|
||||||
|
"""Verify all migrated files exist."""
|
||||||
|
missing: list[dict] = []
|
||||||
|
for m in migrated:
|
||||||
|
dest = Path(m["destination"])
|
||||||
|
if not dest.exists():
|
||||||
|
missing.append(m)
|
||||||
|
return missing
|
||||||
|
|
||||||
|
|
||||||
|
def run_migration(dry_run: bool = False, verify: bool = True) -> int:
|
||||||
|
"""Execute full migration.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
0 on success, 1 on errors
|
||||||
|
"""
|
||||||
|
log("=" * 60)
|
||||||
|
log("SAMPLE LIBRARY MIGRATION")
|
||||||
|
log("=" * 60)
|
||||||
|
log(f"Mode: {'DRY RUN' if dry_run else 'LIVE'}")
|
||||||
|
log(f"Index: {INDEX_PATH}")
|
||||||
|
log(f"Backup: {BACKUP_PATH}")
|
||||||
|
log(f"Target: {SAMPLES_ROOT}")
|
||||||
|
log("")
|
||||||
|
|
||||||
|
# Phase 1: Setup
|
||||||
|
log("[PHASE 1] Creating directories...")
|
||||||
|
create_directories()
|
||||||
|
backup_index()
|
||||||
|
|
||||||
|
# Load index once
|
||||||
|
data = load_index()
|
||||||
|
|
||||||
|
# Phase 2: Migrate
|
||||||
|
log("")
|
||||||
|
log("[PHASE 2] Migrating samples...")
|
||||||
|
migrated, errors, role_counts = migrate_samples(data, dry_run=dry_run)
|
||||||
|
|
||||||
|
log("")
|
||||||
|
log(f" Migrated: {len(migrated)} samples")
|
||||||
|
if errors:
|
||||||
|
log(f" Errors: {len(errors)} samples")
|
||||||
|
for e in errors[:5]:
|
||||||
|
log(f" - {e.get('sample', 'unknown')}: {e.get('error', 'unknown error')}")
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
log("")
|
||||||
|
log("[DRY RUN] No files were copied. Showing first 5 destinations:")
|
||||||
|
for m in migrated[:5]:
|
||||||
|
log(f" {m['original']} -> {m['destination']}")
|
||||||
|
return 0 if not errors else 1
|
||||||
|
|
||||||
|
# Phase 3: Write updated index (data already modified in-place by migrate_samples)
|
||||||
|
log("")
|
||||||
|
log("[PHASE 3] Writing updated index...")
|
||||||
|
save_index(data)
|
||||||
|
|
||||||
|
# Phase 4: Write log and verify
|
||||||
|
log("")
|
||||||
|
log("[PHASE 4] Writing migration log...")
|
||||||
|
write_log(role_counts, migrated, errors)
|
||||||
|
|
||||||
|
if verify:
|
||||||
|
log("")
|
||||||
|
log("[PHASE 5] Verifying migration...")
|
||||||
|
missing = verify_migration(migrated)
|
||||||
|
if missing:
|
||||||
|
log(f"[ERROR] {len(missing)} migrated files are missing!")
|
||||||
|
for m in missing[:5]:
|
||||||
|
log(f" - {m['destination']}")
|
||||||
|
return 1
|
||||||
|
else:
|
||||||
|
log(f"[OK] All {len(migrated)} migrated files verified")
|
||||||
|
|
||||||
|
log("")
|
||||||
|
log("=" * 60)
|
||||||
|
log("MIGRATION COMPLETE")
|
||||||
|
log(f"Migrated: {len(migrated)} samples")
|
||||||
|
log(f"Errors: {len(errors)} samples")
|
||||||
|
log("=" * 60)
|
||||||
|
|
||||||
|
# Print role breakdown
|
||||||
|
log("")
|
||||||
|
log("Sample count per role:")
|
||||||
|
for role, count in sorted(role_counts.items()):
|
||||||
|
log(f" {role}: {count}")
|
||||||
|
|
||||||
|
return 0 if not errors else 1
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> int:
|
||||||
|
import argparse
|
||||||
|
parser = argparse.ArgumentParser(description="Migrate sample library to new structure")
|
||||||
|
parser.add_argument("--dry-run", action="store_true", help="Show what would be done without copying")
|
||||||
|
parser.add_argument("--verify", action="store_true", default=True, help="Verify migrated files exist")
|
||||||
|
parser.add_argument("--no-verify", dest="verify", action="store_false", help="Skip verification")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
return run_migration(dry_run=args.dry_run, verify=args.verify)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
||||||
89
scripts/migration_log.json
Normal file
89
scripts/migration_log.json
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
{
|
||||||
|
"timestamp": "2026-05-03T17:39:20.050855+00:00",
|
||||||
|
"total_samples": 862,
|
||||||
|
"migrated_count": 862,
|
||||||
|
"error_count": 0,
|
||||||
|
"role_counts": {
|
||||||
|
"guitar": 10,
|
||||||
|
"lead": 58,
|
||||||
|
"pluck": 97,
|
||||||
|
"brass": 12,
|
||||||
|
"keys": 65,
|
||||||
|
"pad": 57,
|
||||||
|
"synth": 22,
|
||||||
|
"drumloop": 128,
|
||||||
|
"vocal": 134,
|
||||||
|
"bass": 59,
|
||||||
|
"arp": 12,
|
||||||
|
"fx": 57,
|
||||||
|
"oneshot": 6,
|
||||||
|
"perc": 22,
|
||||||
|
"snare": 54,
|
||||||
|
"hihat": 18,
|
||||||
|
"kick": 42,
|
||||||
|
"fill": 9
|
||||||
|
},
|
||||||
|
"migrated_sample": [
|
||||||
|
{
|
||||||
|
"original": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\reggaeton\\SentimientoLatino2025\\01\\LATINOS - ONE SHOTS\\Midilatino_GUITAR_Atomos_C.wav",
|
||||||
|
"destination": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\samples\\guitar\\guitar_C5_117_warm_74d1d8.wav",
|
||||||
|
"role": "guitar",
|
||||||
|
"new_name": "guitar_C5_117_warm_74d1d8.wav"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"original": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\reggaeton\\SentimientoLatino2025\\01\\LATINOS - ONE SHOTS\\Midilatino_LEAD_Sola_C.wav",
|
||||||
|
"destination": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\samples\\lead\\lead_C5_117_warm_71fa17.wav",
|
||||||
|
"role": "lead",
|
||||||
|
"new_name": "lead_C5_117_warm_71fa17.wav"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"original": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\reggaeton\\SentimientoLatino2025\\01\\LATINOS - ONE SHOTS\\Midilatino_KEY_Largo_C.wav",
|
||||||
|
"destination": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\samples\\lead\\lead_C4_129_neutral_f9f4a8.wav",
|
||||||
|
"role": "lead",
|
||||||
|
"new_name": "lead_C4_129_neutral_f9f4a8.wav"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"original": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\reggaeton\\SentimientoLatino2025\\01\\LATINOS - ONE SHOTS\\Midilatino_BELL_Church_C.wav",
|
||||||
|
"destination": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\samples\\pluck\\pluck_C#5_136_warm_0a3554.wav",
|
||||||
|
"role": "pluck",
|
||||||
|
"new_name": "pluck_C#5_136_warm_0a3554.wav"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"original": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\reggaeton\\SentimientoLatino2025\\01\\LATINOS - ONE SHOTS\\Midilatino_LEAD_Amor_C.wav",
|
||||||
|
"destination": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\samples\\lead\\lead_C5_114_warm_59ead9.wav",
|
||||||
|
"role": "lead",
|
||||||
|
"new_name": "lead_C5_114_warm_59ead9.wav"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"original": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\reggaeton\\SentimientoLatino2025\\01\\LATINOS - ONE SHOTS\\Midilatino_BRASS_Electrica_C.wav",
|
||||||
|
"destination": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\samples\\brass\\brass_D#3_132_soft_d7ec68.wav",
|
||||||
|
"role": "brass",
|
||||||
|
"new_name": "brass_D#3_132_soft_d7ec68.wav"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"original": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\reggaeton\\SentimientoLatino2025\\01\\LATINOS - ONE SHOTS\\Midilatino_BRASS_Bird_C.wav",
|
||||||
|
"destination": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\samples\\brass\\brass_C4_129_warm_525d7a.wav",
|
||||||
|
"role": "brass",
|
||||||
|
"new_name": "brass_C4_129_warm_525d7a.wav"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"original": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\reggaeton\\SentimientoLatino2025\\01\\LATINOS - ONE SHOTS\\Midilatino_LEAD_Fino_C.wav",
|
||||||
|
"destination": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\samples\\lead\\lead_C5_139_warm_98d113.wav",
|
||||||
|
"role": "lead",
|
||||||
|
"new_name": "lead_C5_139_warm_98d113.wav"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"original": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\reggaeton\\SentimientoLatino2025\\01\\LATINOS - ONE SHOTS\\Midilatino_BRASS_Thunder_C.wav",
|
||||||
|
"destination": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\samples\\brass\\brass_C#3_143_boomy_9c139e.wav",
|
||||||
|
"role": "brass",
|
||||||
|
"new_name": "brass_C#3_143_boomy_9c139e.wav"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"original": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\reggaeton\\SentimientoLatino2025\\01\\LATINOS - ONE SHOTS\\Midilatino_KEY_Profundo_C.wav",
|
||||||
|
"destination": "C:\\Users\\Administrator\\Documents\\fl_control\\libreria\\samples\\keys\\keys_C4_126_deep_947173.wav",
|
||||||
|
"role": "keys",
|
||||||
|
"new_name": "keys_C4_126_deep_947173.wav"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": []
|
||||||
|
}
|
||||||
@@ -328,3 +328,51 @@ class SampleSelector:
|
|||||||
if not results:
|
if not results:
|
||||||
return None
|
return None
|
||||||
return random.choice(results).sample
|
return random.choice(results).sample
|
||||||
|
|
||||||
|
def select_diverse(
|
||||||
|
self,
|
||||||
|
role: str,
|
||||||
|
n: int = 1,
|
||||||
|
exclude: Optional[list[str]] = None,
|
||||||
|
**kwargs,
|
||||||
|
) -> list[dict]:
|
||||||
|
"""Return n different samples for role, excluding known IDs.
|
||||||
|
|
||||||
|
Uses randomized scoring to ensure diversity across calls.
|
||||||
|
Returns fewer than n if not enough candidates available after exclusion.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
role: Required. Production role (kick, bass, lead, etc.)
|
||||||
|
n: Number of different samples to return
|
||||||
|
exclude: List of sample IDs (file_hash) to exclude from results
|
||||||
|
**kwargs: Passed to select() (key, bpm, character, etc.)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of sample dicts (length <= n, never includes excluded IDs)
|
||||||
|
"""
|
||||||
|
import random
|
||||||
|
|
||||||
|
exclude = exclude or []
|
||||||
|
results: list[dict] = []
|
||||||
|
|
||||||
|
# Keep trying until we have n samples or run out of candidates
|
||||||
|
remaining = self.select(role=role, limit=100, **kwargs) # Get enough candidates
|
||||||
|
|
||||||
|
for match in remaining:
|
||||||
|
sample = match.sample
|
||||||
|
sample_id = sample.get("file_hash", "")
|
||||||
|
|
||||||
|
if sample_id in exclude:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Add small random noise to score for diversity
|
||||||
|
# This way repeated calls with same params can return different results
|
||||||
|
scored_sample = (match.score + random.uniform(-0.05, 0.05), sample)
|
||||||
|
results.append(scored_sample)
|
||||||
|
|
||||||
|
if len(results) >= n:
|
||||||
|
break
|
||||||
|
|
||||||
|
# Sort by randomized score (descending) and extract samples
|
||||||
|
results.sort(key=lambda x: x[0], reverse=True)
|
||||||
|
return [sample for _, sample in results[:n]]
|
||||||
|
|||||||
Reference in New Issue
Block a user