Skip to content

Testing Flows

Honey Badgeria ships a complete testing framework designed for graph-based workflows. You can build test graphs, mock handlers, run flows with assertions, and test every execution mode with standard pytest tooling.

GraphFactory — Building Test Graphs

GraphFactory provides convenience methods for creating graphs in tests:

From Vertex Dicts

from honey_badgeria.testing import GraphFactory

graph = GraphFactory.create(
    vertices=[
        {"name": "fetch", "handler": fetch_fn},
        {"name": "process", "handler": process_fn, "inputs": {"data": "fetch.data"}},
    ],
    edges=[("fetch", "process")],
)

Linear Pipeline

Create a simple A → B → C pipeline:

graph = GraphFactory.linear(["extract", "transform", "load"])

Diamond Pattern

Create an A → (B, C) → D pattern:

graph = GraphFactory.diamond(
    top="fetch",
    left="analyze_text",
    right="analyze_images",
    bottom="combine",
    handlers={
        "fetch": fetch_fn,
        "analyze_text": text_fn,
        "analyze_images": image_fn,
        "combine": combine_fn,
    },
)

VertexMock — Mocking Handlers

VertexMock replaces real handlers with controllable mocks that track calls:

Basic Usage

from honey_badgeria.testing import VertexMock

mock = VertexMock(return_value={"user_id": 42, "name": "Alice"})

# Call it
result = mock(user_id=1)

# Inspect
assert result == {"user_id": 42, "name": "Alice"}
assert mock.call_count == 1
assert mock.called is True
assert mock.last_call == {"user_id": 1}

Sequences

Return different values on consecutive calls:

mock = VertexMock.with_sequence(
    {"step": 1},
    {"step": 2},
    {"step": 3},
)

assert mock()["step"] == 1
assert mock()["step"] == 2
assert mock()["step"] == 3

Side Effects

Simulate errors:

mock = VertexMock(side_effect=ValueError("invalid input"))

with pytest.raises(ValueError, match="invalid input"):
    mock()

Dynamic responses:

mock = VertexMock(side_effect=lambda x: {"doubled": x * 2})

assert mock(x=5)["doubled"] == 10

Async Mocks

from honey_badgeria.testing import AsyncVertexMock

mock = AsyncVertexMock(return_value={"result": 42})
result = await mock(input="test")
assert mock.call_count == 1

Reset

mock = VertexMock(return_value={"ok": True})
mock()
mock()
assert mock.call_count == 2

mock.reset()
assert mock.call_count == 0
assert mock.called is False

FlowTester — Running Flows with Assertions

FlowTester wraps graph execution with assertion helpers:

from honey_badgeria.testing import FlowTester, VertexMock

tester = FlowTester(
    graph,
    handlers={
        "fetch": VertexMock(return_value={"data": [1, 2, 3]}),
        "process": VertexMock(return_value={"done": True}),
    },
)

result = tester.run(initial_data={"seed": 42})

# Assert outputs exist
result.assert_has_key("process.done")

# Assert output values
result.assert_output("process.done", True)

# Assert a vertex was executed
result.assert_vertex_ran("fetch", tester)

StageRunner — Testing Individual Stages

StageRunner lets you test individual execution stages in isolation:

from honey_badgeria.testing import StageRunner

runner = StageRunner(graph)

# Run only stage 1
stage_result = runner.run_stage(0, handlers={...}, initial_data={...})

Testing Execution Modes

Cache Testing

from honey_badgeria.testing import GraphFactory, VertexMock, FlowTester

mock = VertexMock(return_value={"result": 42})
graph = GraphFactory.linear(["compute"])

tester = FlowTester(graph, handlers={"compute": mock})

# First run — cache miss
tester.run(initial_data={}, cache_enabled=True)
assert mock.call_count == 1

# Second run — cache hit (mock not called again)
tester.run(initial_data={}, cache_enabled=True)
assert mock.call_count == 1  # Still 1 — cached

Parallel Testing

tester = FlowTester(graph, handlers={...})
result = tester.run(
    initial_data={...},
    parallel_enabled=True,
    max_workers=4,
)

Async Testing

from honey_badgeria.testing import AsyncVertexMock

tester = FlowTester(graph, handlers={
    "fetch": AsyncVertexMock(return_value={"data": "ok"}),
})

result = await tester.run_async(
    initial_data={...},
    async_enabled=True,
)

Testing Atomic Groups

from honey_badgeria.back.atomicity import InMemoryBackend

backend = InMemoryBackend()

tester = FlowTester(
    graph,
    handlers={...},
    transaction_backend=backend,
)

result = tester.run(initial_data={...})

# Verify atomicity behavior
assert backend.was_committed()
assert not backend.was_rolled_back()

Test rollback on failure:

tester = FlowTester(
    graph,
    handlers={
        "validate": VertexMock(return_value={"ok": True}),
        "charge": VertexMock(side_effect=RuntimeError("payment failed")),
    },
    transaction_backend=backend,
)

with pytest.raises(RuntimeError):
    tester.run(initial_data={...})

assert backend.was_rolled_back()
assert not backend.was_committed()

Pytest Fixtures

HBIA provides fixture factory functions for pytest:

from honey_badgeria.testing.fixtures import (
    make_graph_fixture,
    make_flow_tester,
    make_vertex_mock,
)

Example conftest.py

import pytest
from honey_badgeria.testing import GraphFactory, VertexMock, FlowTester

@pytest.fixture
def simple_graph():
    return GraphFactory.linear(["extract", "transform", "load"])

@pytest.fixture
def mock_handlers():
    return {
        "extract": VertexMock(return_value={"raw": "data"}),
        "transform": VertexMock(return_value={"clean": "data"}),
        "load": VertexMock(return_value={"success": True}),
    }

@pytest.fixture
def tester(simple_graph, mock_handlers):
    return FlowTester(simple_graph, handlers=mock_handlers)

Example Test

def test_etl_pipeline(tester):
    result = tester.run(initial_data={"source": "test"})
    result.assert_has_key("load.success")
    result.assert_output("load.success", True)

Test Organization

Recommended project structure for tests:

tests/
├── conftest.py              # Shared fixtures
├── test_flows/
│   ├── test_create_user.py  # Flow-level integration tests
│   └── test_payment.py
├── test_vertices/
│   ├── test_normalize.py    # Individual handler unit tests
│   └── test_validate.py
└── test_integration/
    └── test_api.py          # FastAPI integration tests

Running Tests

# Run all tests
pytest

# Run with verbose output
pytest -v

# Run specific test file
pytest tests/test_flows/test_create_user.py

# Run tests matching a pattern
pytest -k "payment"

What's Next?

FastAPI Integration