🎵 Initial commit: MusiaIA - AI Music Generator
✨ Features: - ALS file generator (creates Ableton Live projects) - ALS parser (reads and analyzes projects) - AI clients (GLM4.6 + Minimax M2) - Multiple music genres (House, Techno, Hip-Hop) - Complete documentation 🤖 Ready to generate music with AI!
This commit is contained in:
547
docs/api_chatbot.md
Normal file
547
docs/api_chatbot.md
Normal file
@@ -0,0 +1,547 @@
|
||||
# API & Chatbot - Documentación
|
||||
|
||||
## 🤖 Integración con IA (GLM4.6 & Minimax M2)
|
||||
|
||||
### Proveedores de IA
|
||||
|
||||
```python
|
||||
# ai_providers.py
|
||||
class GLM46Provider:
|
||||
"""Cliente para GLM4.6 API"""
|
||||
def __init__(self, api_key: str):
|
||||
self.api_key = api_key
|
||||
self.base_url = "https://open.bigmodel.cn/api/paas/v4"
|
||||
|
||||
def complete(self, prompt: str, **kwargs) -> str:
|
||||
response = requests.post(
|
||||
f"{self.base_url}/chat/completions",
|
||||
headers={"Authorization": f"Bearer {self.api_key}"},
|
||||
json={
|
||||
"model": "glm-4-plus",
|
||||
"messages": [{"role": "user", "content": prompt}],
|
||||
**kwargs
|
||||
}
|
||||
)
|
||||
return response.json()['choices'][0]['message']['content']
|
||||
|
||||
class MinimaxM2Provider:
|
||||
"""Cliente para Minimax M2 API"""
|
||||
def __init__(self, api_key: str):
|
||||
self.api_key = api_key
|
||||
self.base_url = "https://api.minimax.chat/v1"
|
||||
|
||||
def complete(self, prompt: str, **kwargs) -> str:
|
||||
# Implementar según documentación de Minimax
|
||||
pass
|
||||
|
||||
class AIOrchestrator:
|
||||
"""Orquestador que usa múltiples proveedores"""
|
||||
def __init__(self):
|
||||
self.providers = {
|
||||
'glm46': GLM46Provider(os.getenv('GLM46_API_KEY')),
|
||||
'minimax': MinimaxM2Provider(os.getenv('MINIMAX_API_KEY'))
|
||||
}
|
||||
|
||||
async def chat(self, message: str, context: list) -> str:
|
||||
# Determinar qué modelo usar
|
||||
model = self._select_model(message)
|
||||
|
||||
# Obtener respuesta
|
||||
provider = self.providers[model]
|
||||
return await provider.complete(message, context=context)
|
||||
|
||||
def _select_model(self, message: str) -> str:
|
||||
"""Selecciona el mejor modelo para la query"""
|
||||
# Lógica para elegir entre GLM4.6 y Minimax M2
|
||||
# Ejemplo: usar Minimax para conversación, GLM para análisis técnico
|
||||
if 'generar' in message.lower() or 'crear' in message.lower():
|
||||
return 'glm46' # Mejor para generación estructurada
|
||||
return 'minimax' # Mejor para conversación
|
||||
```
|
||||
|
||||
## 💬 Sistema de Chat
|
||||
|
||||
### WebSocket Handler (Real-time)
|
||||
|
||||
```python
|
||||
# chat_websocket.py
|
||||
from fastapi import WebSocket, WebSocketDisconnect
|
||||
import json
|
||||
|
||||
class ChatManager:
|
||||
def __init__(self):
|
||||
self.active_connections: List[WebSocket] = []
|
||||
|
||||
async def connect(self, websocket: WebSocket, user_id: str):
|
||||
await websocket.accept()
|
||||
self.active_connections.append(websocket)
|
||||
|
||||
def disconnect(self, websocket: WebSocket):
|
||||
self.active_connections.remove(websocket)
|
||||
|
||||
async def send_message(self, message: str, websocket: WebSocket):
|
||||
await websocket.send_text(json.dumps({
|
||||
"type": "message",
|
||||
"content": message,
|
||||
"timestamp": datetime.now().isoformat()
|
||||
}))
|
||||
|
||||
async def broadcast_progress(self, progress: dict):
|
||||
"""Envía actualizaciones de progreso"""
|
||||
for connection in self.active_connections:
|
||||
await connection.send_text(json.dumps({
|
||||
"type": "progress",
|
||||
"data": progress
|
||||
}))
|
||||
|
||||
@router.websocket("/chat/{user_id}")
|
||||
async def chat_endpoint(websocket: WebSocket, user_id: str):
|
||||
chat_manager = ChatManager()
|
||||
await chat_manager.connect(websocket, user_id)
|
||||
|
||||
try:
|
||||
while True:
|
||||
# Recibir mensaje
|
||||
data = await websocket.receive_text()
|
||||
message_data = json.loads(data)
|
||||
|
||||
# Procesar mensaje
|
||||
processor = ChatProcessor(user_id)
|
||||
response = await processor.process_message(message_data['content'])
|
||||
|
||||
# Enviar respuesta
|
||||
await chat_manager.send_message(response, websocket)
|
||||
|
||||
except WebSocketDisconnect:
|
||||
chat_manager.disconnect(websocket)
|
||||
```
|
||||
|
||||
### Procesador de Chat
|
||||
|
||||
```python
|
||||
# chat_processor.py
|
||||
class ChatProcessor:
|
||||
"""Procesa mensajes y coordina generación"""
|
||||
|
||||
def __init__(self, user_id: str):
|
||||
self.user_id = user_id
|
||||
self.ai_orchestrator = AIOrchestrator()
|
||||
self.project_generator = ProjectGenerator()
|
||||
|
||||
async def process_message(self, message: str) -> str:
|
||||
# 1. Determinar intención
|
||||
intent = await self._analyze_intent(message)
|
||||
|
||||
# 2. Responder según intención
|
||||
if intent['type'] == 'generate_project':
|
||||
return await self._handle_generation(message, intent)
|
||||
elif intent['type'] == 'chat':
|
||||
return await self._handle_chat(message)
|
||||
elif intent['type'] == 'modify_project':
|
||||
return await self._handle_modification(message, intent)
|
||||
|
||||
async def _analyze_intent(self, message: str) -> dict:
|
||||
"""Analiza la intención del mensaje"""
|
||||
prompt = f"""
|
||||
Analiza este mensaje y determina la intención:
|
||||
"{message}"
|
||||
|
||||
Clasifica como:
|
||||
- generate_project: quiere crear un nuevo proyecto
|
||||
- modify_project: quiere modificar un proyecto existente
|
||||
- chat: conversación general
|
||||
|
||||
Responde en JSON: {{"type": "valor", "params": {{}}}}
|
||||
"""
|
||||
|
||||
response = await self.ai_orchestrator.chat(message, [])
|
||||
return json.loads(response)
|
||||
|
||||
async def _handle_generation(self, message: str, intent: dict):
|
||||
"""Maneja solicitud de generación de proyecto"""
|
||||
# Enviar mensaje inicial
|
||||
await self._send_progress("🎵 Analizando tu solicitud...")
|
||||
|
||||
# 2. Generar proyecto
|
||||
als_path = await self.project_generator.create_from_chat(
|
||||
user_id=self.user_id,
|
||||
requirements=intent['params']
|
||||
)
|
||||
|
||||
# 3. Responder con éxito
|
||||
return f"""
|
||||
✅ ¡Proyecto generado con éxito!
|
||||
|
||||
🎹 Proyecto: {os.path.basename(als_path)}
|
||||
📁 Ubicación: /projects/{self.user_id}/{als_path}
|
||||
|
||||
💡 Puedes abrir este archivo directamente en Ableton Live.
|
||||
"""
|
||||
```
|
||||
|
||||
## 🎼 Motor de Generación Musical
|
||||
|
||||
```python
|
||||
# project_generator.py
|
||||
class ProjectGenerator:
|
||||
"""Genera proyectos ALS basado en chat"""
|
||||
|
||||
def __init__(self):
|
||||
self.musical_ai = MusicalIntelligence()
|
||||
self.sample_db = SampleDatabase()
|
||||
self.als_generator = ALSGenerator()
|
||||
|
||||
async def create_from_chat(self, user_id: str, requirements: dict) -> str:
|
||||
"""Crea proyecto desde chat input"""
|
||||
|
||||
# 1. Analizar musicalmente
|
||||
await self._send_progress("🎼 Analizando estructura musical...")
|
||||
analysis = await self.musical_ai.analyze_requirements(requirements)
|
||||
|
||||
# 2. Seleccionar samples
|
||||
await self._send_progress("🥁 Seleccionando samples...")
|
||||
selected_samples = await self._select_samples_for_project(analysis)
|
||||
|
||||
# 3. Generar layout
|
||||
await self._send_progress("🎨 Diseñando layout...")
|
||||
layout = self._generate_track_layout(analysis, selected_samples)
|
||||
|
||||
# 4. Crear archivo ALS
|
||||
await self._send_progress("⚙️ Generando archivo ALS...")
|
||||
project_config = {
|
||||
'name': f"IA Project {datetime.now().strftime('%Y%m%d_%H%M%S')}",
|
||||
'bpm': analysis['bpm'],
|
||||
'key': analysis['key'],
|
||||
'tracks': layout,
|
||||
'metadata': {
|
||||
'generated_by': 'MusiaIA',
|
||||
'style': analysis['style'],
|
||||
'mood': analysis['mood']
|
||||
}
|
||||
}
|
||||
|
||||
als_path = self.als_generator.create_project(project_config)
|
||||
|
||||
# 5. Guardar en historial
|
||||
await self._save_to_history(user_id, requirements, als_path)
|
||||
|
||||
return als_path
|
||||
|
||||
async def _select_samples_for_project(self, analysis: dict) -> dict:
|
||||
"""Selecciona samples automáticamente"""
|
||||
selected = {}
|
||||
|
||||
for track_type in ['drums', 'bass', 'leads', 'pads', 'fx']:
|
||||
if track_type in analysis.get('required_tracks', []):
|
||||
samples = self.sample_db.search({
|
||||
'type': track_type,
|
||||
'style': analysis['style'],
|
||||
'bpm_range': [analysis['bpm'] - 5, analysis['bpm'] + 5]
|
||||
})
|
||||
selected[track_type] = samples[:4] # Top 4 matches
|
||||
|
||||
return selected
|
||||
```
|
||||
|
||||
## 📡 API REST Endpoints
|
||||
|
||||
```python
|
||||
# api_endpoints.py
|
||||
from fastapi import FastAPI, UploadFile, File
|
||||
from fastapi.responses import FileResponse
|
||||
|
||||
router = FastAPI()
|
||||
|
||||
@router.post("/chat/message")
|
||||
async def send_message(request: ChatRequest):
|
||||
"""Envía mensaje al chatbot"""
|
||||
processor = ChatProcessor(request.user_id)
|
||||
response = await processor.process_message(request.message)
|
||||
return {"response": response}
|
||||
|
||||
@router.post("/projects/generate")
|
||||
async def generate_project(request: GenerationRequest):
|
||||
"""Genera nuevo proyecto ALS"""
|
||||
generator = ProjectGenerator()
|
||||
als_path = await generator.create_from_chat(
|
||||
user_id=request.user_id,
|
||||
requirements=request.requirements
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"project_path": als_path,
|
||||
"download_url": f"/projects/{request.user_id}/{os.path.basename(als_path)}"
|
||||
}
|
||||
|
||||
@router.get("/projects/{user_id}/{project_name}")
|
||||
async def download_project(user_id: str, project_name: str):
|
||||
"""Descarga proyecto generado"""
|
||||
project_path = f"/data/projects/{user_id}/{project_name}"
|
||||
return FileResponse(project_path, filename=project_name)
|
||||
|
||||
@router.get("/projects/{user_id}")
|
||||
async def list_projects(user_id: str):
|
||||
"""Lista proyectos del usuario"""
|
||||
projects = db.get_user_projects(user_id)
|
||||
return {"projects": projects}
|
||||
|
||||
@router.get("/samples")
|
||||
async def list_samples(filters: SampleFilters = None):
|
||||
"""Lista samples disponibles"""
|
||||
samples = sample_db.search(filters.dict() if filters else {})
|
||||
return {"samples": samples}
|
||||
|
||||
@router.post("/samples/upload")
|
||||
async def upload_sample(file: UploadFile = File(...)):
|
||||
"""Sube nuevo sample"""
|
||||
sample_id = sample_manager.upload(file)
|
||||
return {"sample_id": sample_id, "status": "uploaded"}
|
||||
|
||||
@router.get("/chat/history/{user_id}")
|
||||
async def get_chat_history(user_id: str, limit: int = 50):
|
||||
"""Obtiene historial de chat"""
|
||||
history = db.get_chat_history(user_id, limit=limit)
|
||||
return {"history": history}
|
||||
```
|
||||
|
||||
## 💾 Base de Datos (SQLAlchemy Models)
|
||||
|
||||
```python
|
||||
# models.py
|
||||
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, JSON
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = 'users'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
username = Column(String(50), unique=True)
|
||||
email = Column(String(100), unique=True)
|
||||
api_provider = Column(String(20)) # glm46 or minimax
|
||||
|
||||
projects = relationship("Project", back_populates="user")
|
||||
chat_history = relationship("ChatMessage", back_populates="user")
|
||||
|
||||
class Project(Base):
|
||||
__tablename__ = 'projects'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
user_id = Column(Integer, ForeignKey('users.id'))
|
||||
name = Column(String(100))
|
||||
als_path = Column(String(255))
|
||||
style = Column(String(50))
|
||||
bpm = Column(Integer)
|
||||
key = Column(String(10))
|
||||
config = Column(JSON) # Project configuration
|
||||
|
||||
user = relationship("User", back_populates="projects")
|
||||
samples = relationship("ProjectSample", back_populates="project")
|
||||
|
||||
class ChatMessage(Base):
|
||||
__tablename__ = 'chat_messages'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
user_id = Column(Integer, ForeignKey('users.id'))
|
||||
message = Column(String(1000))
|
||||
response = Column(String(1000))
|
||||
timestamp = Column(DateTime, default=datetime.utcnow)
|
||||
|
||||
user = relationship("User", back_populates="chat_history")
|
||||
|
||||
class Sample(Base):
|
||||
__tablename__ = 'samples'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(100))
|
||||
type = Column(String(50)) # kick, snare, bass, etc
|
||||
file_path = Column(String(255))
|
||||
bpm = Column(Integer)
|
||||
key = Column(String(10))
|
||||
tags = Column(JSON)
|
||||
|
||||
class ProjectSample(Base):
|
||||
__tablename__ = 'project_samples'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
project_id = Column(Integer, ForeignKey('projects.id'))
|
||||
sample_id = Column(Integer, ForeignKey('samples.id'))
|
||||
track_name = Column(String(50))
|
||||
|
||||
project = relationship("Project", back_populates="samples")
|
||||
sample = relationship("Sample")
|
||||
```
|
||||
|
||||
## 🔐 Autenticación
|
||||
|
||||
```python
|
||||
# auth.py
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from fastapi.security import OAuth2PasswordBearer
|
||||
import jwt
|
||||
|
||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
|
||||
|
||||
async def get_current_user(token: str = Depends(oauth2_scheme)):
|
||||
try:
|
||||
payload = jwt.decode(token, SECRET_KEY, algorithms=["HS256"])
|
||||
user_id: int = payload.get("sub")
|
||||
if user_id is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid authentication credentials"
|
||||
)
|
||||
user = db.get_user(user_id)
|
||||
return user
|
||||
except jwt.PyJWTError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid token"
|
||||
)
|
||||
|
||||
@router.post("/auth/login")
|
||||
async def login(form_data: OAuth2PasswordRequestForm = Depends()):
|
||||
user = db.authenticate_user(form_data.username, form_data.password)
|
||||
if not user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Incorrect username or password"
|
||||
)
|
||||
|
||||
access_token = create_access_token(data={"sub": user.id})
|
||||
return {"access_token": access_token, "token_type": "bearer"}
|
||||
```
|
||||
|
||||
## 📊 Request/Response Models
|
||||
|
||||
```python
|
||||
# schemas.py
|
||||
from pydantic import BaseModel
|
||||
from typing import List, Optional, Dict, Any
|
||||
|
||||
class ChatRequest(BaseModel):
|
||||
user_id: str
|
||||
message: str
|
||||
|
||||
class GenerationRequest(BaseModel):
|
||||
user_id: str
|
||||
requirements: Dict[str, Any]
|
||||
|
||||
class ProjectResponse(BaseModel):
|
||||
status: str
|
||||
project_path: str
|
||||
download_url: str
|
||||
|
||||
class ChatResponse(BaseModel):
|
||||
response: str
|
||||
timestamp: str
|
||||
|
||||
class SampleFilters(BaseModel):
|
||||
type: Optional[str] = None
|
||||
bpm_min: Optional[int] = None
|
||||
bpm_max: Optional[int] = None
|
||||
key: Optional[str] = None
|
||||
style: Optional[str] = None
|
||||
|
||||
class ProjectSummary(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
style: str
|
||||
bpm: int
|
||||
key: str
|
||||
created_at: str
|
||||
als_path: str
|
||||
```
|
||||
|
||||
## 🚀 Inicio del Servidor
|
||||
|
||||
```python
|
||||
# main.py
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
import uvicorn
|
||||
|
||||
app = FastAPI(title="MusiaIA - AI Music Generator", version="1.0.0")
|
||||
|
||||
# CORS
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["http://localhost:3000"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Incluir routers
|
||||
app.include_router(router, prefix="/api/v1")
|
||||
|
||||
# WebSocket
|
||||
app.websocket_route("/ws/chat/{user_id}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run(
|
||||
"main:app",
|
||||
host="0.0.0.0",
|
||||
port=8000,
|
||||
reload=True,
|
||||
log_level="info"
|
||||
)
|
||||
```
|
||||
|
||||
## 🔄 Flujo de Ejemplo
|
||||
|
||||
```python
|
||||
# Ejemplo de flujo completo
|
||||
async def example_usage():
|
||||
# 1. Usuario envía mensaje
|
||||
user_message = "Genera un track de house a 124 BPM en La menor"
|
||||
|
||||
# 2. Chat API recibe mensaje
|
||||
chat_request = ChatRequest(user_id="user123", message=user_message)
|
||||
response = await send_message(chat_request)
|
||||
|
||||
# 3. IA analiza
|
||||
analysis = await musical_ai.analyze_requirements(user_message)
|
||||
# Returns: {'style': 'house', 'bpm': 124, 'key': 'Am', ...}
|
||||
|
||||
# 4. Genera proyecto
|
||||
als_path = await project_generator.create_from_chat(
|
||||
user_id="user123",
|
||||
requirements=analysis
|
||||
)
|
||||
|
||||
# 5. Retorna URL de descarga
|
||||
download_url = f"/projects/user123/{os.path.basename(als_path)}"
|
||||
|
||||
return {
|
||||
"response": "¡Proyecto generado!",
|
||||
"download_url": download_url
|
||||
}
|
||||
```
|
||||
|
||||
## 📝 Logging y Monitoreo
|
||||
|
||||
```python
|
||||
# logging_config.py
|
||||
import logging
|
||||
from pythonjsonlogger import jsonlogger
|
||||
|
||||
logHandler = logging.StreamHandler()
|
||||
formatter = jsonlogger.JsonFormatter()
|
||||
logHandler.setFormatter(formatter)
|
||||
|
||||
logger = logging.getLogger()
|
||||
logger.addHandler(logHandler)
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
# Uso
|
||||
logger.info("User generated project", extra={
|
||||
"user_id": user_id,
|
||||
"project_type": "house",
|
||||
"bpm": 124,
|
||||
"generation_time": generation_time
|
||||
})
|
||||
```
|
||||
Reference in New Issue
Block a user