Files
cbc2027/docs/TESTING.md

10 KiB

Guia de Testing - CBCFacil

Esta guia describe como ejecutar y escribir tests para CBCFacil.

Estructura de Tests

tests/
├── conftest.py                    # Fixtures compartidos
├── test_config.py                 # Tests de configuracion
├── test_storage.py                # Tests de almacenamiento
├── test_webdav.py                 # Tests de WebDAV
├── test_processors.py             # Tests de procesadores
├── test_ai_providers.py           # Tests de AI providers
├── test_vram_manager.py           # Tests de VRAM manager
└── test_main_integration.py       # Tests de integracion

Instalacion de Dependencias de Test

# Activar entorno virtual
source .venv/bin/activate

# Instalar dependencias de desarrollo
pip install -r requirements-dev.txt

# Verificar instalacion
pytest --version

Ejecutar Tests

Todos los Tests

# Ejecutar todos los tests
pytest tests/

# Con output detallado
pytest tests/ -v

Tests Especificos

# Tests de configuracion
pytest tests/test_config.py -v

# Tests de almacenamiento
pytest tests/test_storage.py -v

# Tests de WebDAV
pytest tests/test_webdav.py -v

# Tests de procesadores
pytest tests/test_processors.py -v

# Tests de AI providers
pytest tests/test_ai_providers.py -v

# Tests de VRAM manager
pytest tests/test_vram_manager.py -v

# Tests de integracion
pytest tests/test_main_integration.py -v

Tests con Coverage

# Coverage basico
pytest tests/ --cov=cbcfacil

# Coverage con reporte HTML
pytest tests/ --cov=cbcfacil --cov-report=html

# Coverage con reporte term-missing
pytest tests/ --cov=cbcfacil --cov-report=term-missing

# Coverage por modulo
pytest tests/ --cov=cbcfacil --cov-report=term-missing --cov-report=annotate

Tests en Modo Watch

# Recargar automaticamente al detectar cambios
pytest-watch tests/

Tests Parallelos

# Ejecutar tests en paralelo
pytest tests/ -n auto

# Con numero fijo de workers
pytest tests/ -n 4

Escribir Nuevos Tests

Estructura Basica

# tests/test_ejemplo.py
import pytest
from pathlib import Path

class TestEjemplo:
    """Clase de tests para un modulo"""

    def setup_method(self):
        """Setup antes de cada test"""
        pass

    def teardown_method(self):
        """Cleanup despues de cada test"""
        pass

    def test_funcion_basica(self):
        """Test de una funcion basica"""
        # Arrange
        input_value = "test"

        # Act
        result = mi_funcion(input_value)

        # Assert
        assert result is not None
        assert result == "expected"

Usar Fixtures

# tests/conftest.py
import pytest
from pathlib import Path

@pytest.fixture
def temp_directory(tmp_path):
    """Fixture para directorio temporal"""
    dir_path = tmp_path / "test_files"
    dir_path.mkdir()
    return dir_path

@pytest.fixture
def mock_settings():
    """Fixture con settings de prueba"""
    class MockSettings:
        NEXTCLOUD_URL = "https://test.example.com"
        NEXTCLOUD_USER = "test_user"
        NEXTCLOUD_PASSWORD = "test_pass"
    return MockSettings()

# En tu test
def test_con_fixture(temp_directory, mock_settings):
    """Test usando fixtures"""
    assert temp_directory.exists()
    assert mock_settings.NEXTCLOUD_URL == "https://test.example.com"

Tests de Configuracion

# tests/test_config.py
import pytest
from config import settings

class TestSettings:
    """Tests para configuracion"""

    def test_has_webdav_config_true(self):
        """Test con WebDAV configurado"""
        # Verificar que las properties funcionan
        assert hasattr(settings, 'has_webdav_config')
        assert hasattr(settings, 'has_ai_config')

    def test_processed_files_path(self):
        """Test del path de archivos procesados"""
        path = settings.processed_files_path
        assert isinstance(path, Path)
        assert path.suffix == ".txt"

Tests de WebDAV

# tests/test_webdav.py
import pytest
from unittest.mock import Mock, patch

class TestWebDAVService:
    """Tests para WebDAV Service"""

    @pytest.fixture
    def webdav_service(self):
        """Crear instancia del servicio"""
        from services.webdav_service import webdav_service
        return webdav_service

    def test_list_remote_path(self, webdav_service):
        """Test de listado de archivos remotos"""
        # Mock del cliente WebDAV
        with patch('services.webdav_service.WebDAVClient') as mock_client:
            mock_instance = Mock()
            mock_instance.list.return_value = ['file1.pdf', 'file2.mp3']
            mock_client.return_value = mock_instance

            # Inicializar servicio
            webdav_service.initialize()

            # Test
            files = webdav_service.list("TestFolder")
            assert len(files) == 2
            assert "file1.pdf" in files

Tests de Procesadores

# tests/test_processors.py
import pytest
from unittest.mock import Mock, patch

class TestAudioProcessor:
    """Tests para Audio Processor"""

    @pytest.fixture
    def processor(self):
        """Crear procesador"""
        from processors.audio_processor import AudioProcessor
        return AudioProcessor()

    def test_process_audio_file(self, processor, tmp_path):
        """Test de procesamiento de audio"""
        # Crear archivo de prueba
        audio_file = tmp_path / "test.mp3"
        audio_file.write_bytes(b"fake audio content")

        # Mock de Whisper
        with patch('processors.audio_processor.whisper') as mock_whisper:
            mock_whisper.load_model.return_value.transcribe.return_value = {
                "text": "Texto transcrito de prueba"
            }

            # Ejecutar
            result = processor.process(str(audio_file))

            # Verificar
            assert result is not None

Tests de AI Providers

# tests/test_ai_providers.py
import pytest
from unittest.mock import Mock, patch

class TestClaudeProvider:
    """Tests para Claude Provider"""

    def test_summarize_text(self):
        """Test de resumen con Claude"""
        from services.ai.claude_provider import ClaudeProvider

        provider = ClaudeProvider()
        test_text = "Texto largo para resumir..."

        # Mock de la llamada API
        with patch.object(provider, '_call_api') as mock_call:
            mock_call.return_value = "Texto resumido"

            result = provider.summarize(test_text)

            assert result == "Texto resumido"
            mock_call.assert_called_once()

Tests de Integracion

# tests/test_main_integration.py
import pytest
from unittest.mock import patch

class TestMainIntegration:
    """Tests de integracion del main"""

    def test_main_loop_no_files(self):
        """Test del loop principal sin archivos nuevos"""
            with patch('main.webdav_service') as mock_webdav:
                with patch('main.processed_registry') as mock_registry:
                    mock_webdav.list.return_value = []
                    mock_registry.is_processed.return_value = True

                    # El loop no debe procesar nada
                    # Verificar que no se llama a procesadores

Configuracion de pytest

# pytest.ini o pyproject.toml
[tool.pytest.ini_options]
testpaths = ["tests"]
python_files = ["test_*.py"]
python_classes = ["Test*"]
python_functions = ["test_*"]
addopts = [
    "-v",
    "--tb=short",
    "--strict-markers",
]
filterwarnings = [
    "ignore::DeprecationWarning",
]

Mejores Practicas

1. Nombrado de Tests

# BIEN
def test_webdav_service_list_returns_files():
    ...

def test_processed_registry_is_processed_true_for_processed_file():
    ...

# MAL
def test_list():
    ...

def test_check():
    ...

2. Estructura AAA

def test_ejemplo_aaa():
    # Arrange
    input_data = {"key": "value"}
    expected = "result"

    # Act
    actual = function_under_test(input_data)

    # Assert
    assert actual == expected

3. Tests Aislados

# Cada test debe ser independiente
def test_independent():
    # No depender de estado de otros tests
    # Usar fixtures para setup/cleanup
    pass

4. Evitar TestLego

# BIEN - Test del comportamiento, no la implementacion
def test_registry_returns_true_for_processed_file():
    registry = ProcessedRegistry()
    registry.save("file.txt")
    assert registry.is_processed("file.txt") is True

# MAL - Test de implementacion
def test_registry_uses_set_internally():
    # No testar detalles de implementacion
    registry = ProcessedRegistry()
    assert hasattr(registry, '_processed_files')

5. Mocks Appropriados

# Usar mocks para dependencias externas
from unittest.mock import Mock, patch, MagicMock

def test_with_mocked_api():
    with patch('requests.get') as mock_get:
        mock_response = Mock()
        mock_response.json.return_value = {"key": "value"}
        mock_get.return_value = mock_response

        result = my_api_function()

        assert result == {"key": "value"}

Coverage Objetivo

Componente Coverage Minimo
config/ 90%
core/ 90%
services/ 70%
processors/ 60%
storage/ 90%
api/ 80%

Integracion con CI/CD

# .github/workflows/tests.yml
name: Tests

on: [push, pull_request]

jobs:
  test:
    runs-on: ubuntu-latest

    steps:
      - uses: actions/checkout@v4

      - name: Set up Python
        uses: actions/setup-python@v5
        with:
          python-version: '3.11'

      - name: Install dependencies
        run: |
          python -m pip install --upgrade pip
          pip install -r requirements.txt
          pip install -r requirements-dev.txt

      - name: Run tests
        run: |
          pytest tests/ --cov=cbcfacil --cov-report=xml

      - name: Upload coverage
        uses: codecov/codecov-action@v3

Troubleshooting

Tests Fallan por Imports

# Verificar que el venv esta activado
source .venv/bin/activate

# Reinstalar el paquete en modo desarrollo
pip install -e .

Tests Muy Lentos

# Ejecutar en paralelo
pytest tests/ -n auto

# O ejecutar solo tests rapidos
pytest tests/ -m "not slow"

Memory Errors

# Reducir workers
pytest tests/ -n 2

# O ejecutar secuencial
pytest tests/ -n 0

Recursos Adicionales