diff --git a/.claude/settings.local.json b/.claude/settings.local.json index e0a7dc3..19c23a9 100644 --- a/.claude/settings.local.json +++ b/.claude/settings.local.json @@ -35,7 +35,11 @@ "Bash(/root/.dotnet/dotnet sln:*)", "Bash(./docflow-dev.sh:*)", "Bash(rm:*)", - "Bash(grep:*)" + "Bash(grep:*)", + "Bash(mkdir -p /tmp/diff-demo)", + "Bash(cp samples/integration-demos/petstore.json /tmp/diff-demo/old.json)", + "Bash(cp samples/integration-demos/petstore.json /tmp/diff-demo/new.json)", + "Bash(sed -i 's/\"placeOrder\"/\"placeOrderV2\"/' /tmp/diff-demo/new.json)" ] } } diff --git a/CLAUDE.md b/CLAUDE.md index 99fcb56..0a6dec6 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -14,6 +14,8 @@ DocFlow is an intelligent documentation and modeling toolkit. Current implementa | Whiteboard Scanner | **Complete** | Claude Vision API integration | | CLI | **Complete** | System.CommandLine + Spectre.Console | | Integration Module | **Complete** | OpenAPI parsing, CDM mapping, SLA validation, code generation | +| API Docs Generation | **Complete (Phases 1–5)** | OpenAPI → Markdown or HTML with class/ER/sequence/C4-context/endpoint-flow diagrams, synthesised JSON examples, dedicated security page, cross-linked entities, `--watch` regeneration, and pluggable `IApiSpecParser`s (`integrate docs`) | +| API Spec Diff | **Complete (Phase 5)** | `integrate diff` produces a breaking / non-breaking Markdown changelog across two specs | | IMS Learning | Designed | Pattern learning system (not implemented) | | Document Pipeline | Planned | PDF/Word conversion | @@ -73,6 +75,12 @@ docflow integrate sla --expected [--samples 10] [--interval 5s] # Generate integration code (DTOs, AutoMapper, HTTP client, validators) docflow integrate generate --cdm -o [-n namespace] + +# Generate a design-documentation bundle (Markdown or HTML + Mermaid diagrams) +docflow integrate docs -o [--format markdown|html] [--diagrams class,er,sequence,context,flow,all,none] [--group-by tag|path] [--title "My API"] [--with-examples] [--watch] [-v] + +# Produce a breaking / non-breaking Markdown changelog between two specs +docflow integrate diff -o ``` ## Architecture Overview @@ -236,6 +244,21 @@ API keys can be configured via: | `src/DocFlow.Integration/Mapping/CdmMapper.cs` | CDM mapping with confidence | | `src/DocFlow.Integration/Validation/SlaValidator.cs` | SLA data freshness validation | | `src/DocFlow.Integration/CodeGen/IntegrationCodeGenerator.cs` | Integration code generation | +| `src/DocFlow.Core/CanonicalModel/ApiSurface.cs` | Canonical API-surface records (operations, params, responses, security) | +| `src/DocFlow.Documentation/Abstractions/IDocumentationGenerator.cs` | Documentation generator contract | +| `src/DocFlow.Documentation/Markdown/MarkdownDocumentationGenerator.cs` | Phase 1 Markdown doc generator | +| `src/DocFlow.Documentation/Markdown/Sections/` | Per-section builders (Overview/DomainModel/Endpoint/Index) | +| `src/DocFlow.Diagrams/Mermaid/MermaidErDiagramGenerator.cs` | ER diagram from SemanticModel (Composition/Aggregation/Association) | +| `src/DocFlow.Diagrams/Mermaid/MermaidSequenceDiagramGenerator.cs` | Per-operation sequence diagram from ApiOperation | +| `src/DocFlow.Diagrams/Mermaid/MermaidC4ContextGenerator.cs` | C4-style system-context diagram (flowchart-LR fallback) | +| `src/DocFlow.Diagrams/Mermaid/MermaidEndpointFlowchartGenerator.cs` | Per-operation request-lifecycle flowchart | +| `src/DocFlow.Documentation/Examples/ExampleSynthesizer.cs` | Schema-driven JSON example synthesis (spec examples preferred) | +| `src/DocFlow.Documentation/Html/StaticSiteRenderer.cs` | Markdown bundle → self-contained static HTML site (Markdig) | +| `src/DocFlow.Documentation/Html/Assets/theme.css` | Embedded HTML theme (dark/light) | +| `src/DocFlow.Documentation/Diff/SpecDiffer.cs` | Computes a breaking / non-breaking SpecDiff between two SemanticModels | +| `src/DocFlow.Documentation/Diff/ChangelogGenerator.cs` | Renders a SpecDiff as a grouped Markdown changelog | +| `src/DocFlow.Core/Abstractions/IApiSpecParser.cs` | Pluggable spec-parser strategy interface | +| `src/DocFlow.Core/Abstractions/SpecParserRegistry.cs` | Picks the first registered parser whose CanParse matches | | `src/DocFlow.CLI/Program.cs` | CLI entry point | ## Sample Files diff --git a/DocFlow.sln b/DocFlow.sln index d0f46fb..36c2809 100644 --- a/DocFlow.sln +++ b/DocFlow.sln @@ -33,64 +33,242 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{10000000-000 EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "tests", "tests", "{20000000-0000-0000-0000-000000000002}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DocFlow.Core.Tests", "tests\DocFlow.Core.Tests\DocFlow.Core.Tests.csproj", "{DE7C8E71-A9CA-40DE-B794-1D54FC1BB65D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DocFlow.Documentation", "src\DocFlow.Documentation\DocFlow.Documentation.csproj", "{C0722CFC-7249-4F7F-9082-24448E7128DB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DocFlow.Documentation.Tests", "tests\DocFlow.Documentation.Tests\DocFlow.Documentation.Tests.csproj", "{0B9761C1-4DD6-41E5-9E46-0A9ACEF6D2A5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DocFlow.Integration.Tests", "tests\DocFlow.Integration.Tests\DocFlow.Integration.Tests.csproj", "{BE04557C-735E-4601-AC2A-DE96DE83CE0C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DocFlow.CLI.Tests", "tests\DocFlow.CLI.Tests\DocFlow.CLI.Tests.csproj", "{720D2F0D-FE32-411A-8EAA-86F7C081EDB1}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution {A1B2C3D4-1234-5678-9ABC-DEF012345678}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {A1B2C3D4-1234-5678-9ABC-DEF012345678}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A1B2C3D4-1234-5678-9ABC-DEF012345678}.Debug|x64.ActiveCfg = Debug|Any CPU + {A1B2C3D4-1234-5678-9ABC-DEF012345678}.Debug|x64.Build.0 = Debug|Any CPU + {A1B2C3D4-1234-5678-9ABC-DEF012345678}.Debug|x86.ActiveCfg = Debug|Any CPU + {A1B2C3D4-1234-5678-9ABC-DEF012345678}.Debug|x86.Build.0 = Debug|Any CPU {A1B2C3D4-1234-5678-9ABC-DEF012345678}.Release|Any CPU.ActiveCfg = Release|Any CPU {A1B2C3D4-1234-5678-9ABC-DEF012345678}.Release|Any CPU.Build.0 = Release|Any CPU + {A1B2C3D4-1234-5678-9ABC-DEF012345678}.Release|x64.ActiveCfg = Release|Any CPU + {A1B2C3D4-1234-5678-9ABC-DEF012345678}.Release|x64.Build.0 = Release|Any CPU + {A1B2C3D4-1234-5678-9ABC-DEF012345678}.Release|x86.ActiveCfg = Release|Any CPU + {A1B2C3D4-1234-5678-9ABC-DEF012345678}.Release|x86.Build.0 = Release|Any CPU {B2C3D4E5-2345-6789-ABCD-EF0123456789}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {B2C3D4E5-2345-6789-ABCD-EF0123456789}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B2C3D4E5-2345-6789-ABCD-EF0123456789}.Debug|x64.ActiveCfg = Debug|Any CPU + {B2C3D4E5-2345-6789-ABCD-EF0123456789}.Debug|x64.Build.0 = Debug|Any CPU + {B2C3D4E5-2345-6789-ABCD-EF0123456789}.Debug|x86.ActiveCfg = Debug|Any CPU + {B2C3D4E5-2345-6789-ABCD-EF0123456789}.Debug|x86.Build.0 = Debug|Any CPU {B2C3D4E5-2345-6789-ABCD-EF0123456789}.Release|Any CPU.ActiveCfg = Release|Any CPU {B2C3D4E5-2345-6789-ABCD-EF0123456789}.Release|Any CPU.Build.0 = Release|Any CPU + {B2C3D4E5-2345-6789-ABCD-EF0123456789}.Release|x64.ActiveCfg = Release|Any CPU + {B2C3D4E5-2345-6789-ABCD-EF0123456789}.Release|x64.Build.0 = Release|Any CPU + {B2C3D4E5-2345-6789-ABCD-EF0123456789}.Release|x86.ActiveCfg = Release|Any CPU + {B2C3D4E5-2345-6789-ABCD-EF0123456789}.Release|x86.Build.0 = Release|Any CPU {C3D4E5F6-3456-789A-BCDE-F01234567890}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {C3D4E5F6-3456-789A-BCDE-F01234567890}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C3D4E5F6-3456-789A-BCDE-F01234567890}.Debug|x64.ActiveCfg = Debug|Any CPU + {C3D4E5F6-3456-789A-BCDE-F01234567890}.Debug|x64.Build.0 = Debug|Any CPU + {C3D4E5F6-3456-789A-BCDE-F01234567890}.Debug|x86.ActiveCfg = Debug|Any CPU + {C3D4E5F6-3456-789A-BCDE-F01234567890}.Debug|x86.Build.0 = Debug|Any CPU {C3D4E5F6-3456-789A-BCDE-F01234567890}.Release|Any CPU.ActiveCfg = Release|Any CPU {C3D4E5F6-3456-789A-BCDE-F01234567890}.Release|Any CPU.Build.0 = Release|Any CPU + {C3D4E5F6-3456-789A-BCDE-F01234567890}.Release|x64.ActiveCfg = Release|Any CPU + {C3D4E5F6-3456-789A-BCDE-F01234567890}.Release|x64.Build.0 = Release|Any CPU + {C3D4E5F6-3456-789A-BCDE-F01234567890}.Release|x86.ActiveCfg = Release|Any CPU + {C3D4E5F6-3456-789A-BCDE-F01234567890}.Release|x86.Build.0 = Release|Any CPU {D4E5F6A7-4567-89AB-CDEF-012345678901}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {D4E5F6A7-4567-89AB-CDEF-012345678901}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D4E5F6A7-4567-89AB-CDEF-012345678901}.Debug|x64.ActiveCfg = Debug|Any CPU + {D4E5F6A7-4567-89AB-CDEF-012345678901}.Debug|x64.Build.0 = Debug|Any CPU + {D4E5F6A7-4567-89AB-CDEF-012345678901}.Debug|x86.ActiveCfg = Debug|Any CPU + {D4E5F6A7-4567-89AB-CDEF-012345678901}.Debug|x86.Build.0 = Debug|Any CPU {D4E5F6A7-4567-89AB-CDEF-012345678901}.Release|Any CPU.ActiveCfg = Release|Any CPU {D4E5F6A7-4567-89AB-CDEF-012345678901}.Release|Any CPU.Build.0 = Release|Any CPU + {D4E5F6A7-4567-89AB-CDEF-012345678901}.Release|x64.ActiveCfg = Release|Any CPU + {D4E5F6A7-4567-89AB-CDEF-012345678901}.Release|x64.Build.0 = Release|Any CPU + {D4E5F6A7-4567-89AB-CDEF-012345678901}.Release|x86.ActiveCfg = Release|Any CPU + {D4E5F6A7-4567-89AB-CDEF-012345678901}.Release|x86.Build.0 = Release|Any CPU {E5F6A7B8-5678-9ABC-DEF0-123456789012}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {E5F6A7B8-5678-9ABC-DEF0-123456789012}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E5F6A7B8-5678-9ABC-DEF0-123456789012}.Debug|x64.ActiveCfg = Debug|Any CPU + {E5F6A7B8-5678-9ABC-DEF0-123456789012}.Debug|x64.Build.0 = Debug|Any CPU + {E5F6A7B8-5678-9ABC-DEF0-123456789012}.Debug|x86.ActiveCfg = Debug|Any CPU + {E5F6A7B8-5678-9ABC-DEF0-123456789012}.Debug|x86.Build.0 = Debug|Any CPU {E5F6A7B8-5678-9ABC-DEF0-123456789012}.Release|Any CPU.ActiveCfg = Release|Any CPU {E5F6A7B8-5678-9ABC-DEF0-123456789012}.Release|Any CPU.Build.0 = Release|Any CPU + {E5F6A7B8-5678-9ABC-DEF0-123456789012}.Release|x64.ActiveCfg = Release|Any CPU + {E5F6A7B8-5678-9ABC-DEF0-123456789012}.Release|x64.Build.0 = Release|Any CPU + {E5F6A7B8-5678-9ABC-DEF0-123456789012}.Release|x86.ActiveCfg = Release|Any CPU + {E5F6A7B8-5678-9ABC-DEF0-123456789012}.Release|x86.Build.0 = Release|Any CPU {F6A7B8C9-6789-ABCD-EF01-234567890123}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {F6A7B8C9-6789-ABCD-EF01-234567890123}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F6A7B8C9-6789-ABCD-EF01-234567890123}.Debug|x64.ActiveCfg = Debug|Any CPU + {F6A7B8C9-6789-ABCD-EF01-234567890123}.Debug|x64.Build.0 = Debug|Any CPU + {F6A7B8C9-6789-ABCD-EF01-234567890123}.Debug|x86.ActiveCfg = Debug|Any CPU + {F6A7B8C9-6789-ABCD-EF01-234567890123}.Debug|x86.Build.0 = Debug|Any CPU {F6A7B8C9-6789-ABCD-EF01-234567890123}.Release|Any CPU.ActiveCfg = Release|Any CPU {F6A7B8C9-6789-ABCD-EF01-234567890123}.Release|Any CPU.Build.0 = Release|Any CPU + {F6A7B8C9-6789-ABCD-EF01-234567890123}.Release|x64.ActiveCfg = Release|Any CPU + {F6A7B8C9-6789-ABCD-EF01-234567890123}.Release|x64.Build.0 = Release|Any CPU + {F6A7B8C9-6789-ABCD-EF01-234567890123}.Release|x86.ActiveCfg = Release|Any CPU + {F6A7B8C9-6789-ABCD-EF01-234567890123}.Release|x86.Build.0 = Release|Any CPU {A7B8C9D0-789A-BCDE-F012-345678901234}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {A7B8C9D0-789A-BCDE-F012-345678901234}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A7B8C9D0-789A-BCDE-F012-345678901234}.Debug|x64.ActiveCfg = Debug|Any CPU + {A7B8C9D0-789A-BCDE-F012-345678901234}.Debug|x64.Build.0 = Debug|Any CPU + {A7B8C9D0-789A-BCDE-F012-345678901234}.Debug|x86.ActiveCfg = Debug|Any CPU + {A7B8C9D0-789A-BCDE-F012-345678901234}.Debug|x86.Build.0 = Debug|Any CPU {A7B8C9D0-789A-BCDE-F012-345678901234}.Release|Any CPU.ActiveCfg = Release|Any CPU {A7B8C9D0-789A-BCDE-F012-345678901234}.Release|Any CPU.Build.0 = Release|Any CPU + {A7B8C9D0-789A-BCDE-F012-345678901234}.Release|x64.ActiveCfg = Release|Any CPU + {A7B8C9D0-789A-BCDE-F012-345678901234}.Release|x64.Build.0 = Release|Any CPU + {A7B8C9D0-789A-BCDE-F012-345678901234}.Release|x86.ActiveCfg = Release|Any CPU + {A7B8C9D0-789A-BCDE-F012-345678901234}.Release|x86.Build.0 = Release|Any CPU {B8C9D0E1-89AB-CDEF-0123-456789012345}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {B8C9D0E1-89AB-CDEF-0123-456789012345}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B8C9D0E1-89AB-CDEF-0123-456789012345}.Debug|x64.ActiveCfg = Debug|Any CPU + {B8C9D0E1-89AB-CDEF-0123-456789012345}.Debug|x64.Build.0 = Debug|Any CPU + {B8C9D0E1-89AB-CDEF-0123-456789012345}.Debug|x86.ActiveCfg = Debug|Any CPU + {B8C9D0E1-89AB-CDEF-0123-456789012345}.Debug|x86.Build.0 = Debug|Any CPU {B8C9D0E1-89AB-CDEF-0123-456789012345}.Release|Any CPU.ActiveCfg = Release|Any CPU {B8C9D0E1-89AB-CDEF-0123-456789012345}.Release|Any CPU.Build.0 = Release|Any CPU + {B8C9D0E1-89AB-CDEF-0123-456789012345}.Release|x64.ActiveCfg = Release|Any CPU + {B8C9D0E1-89AB-CDEF-0123-456789012345}.Release|x64.Build.0 = Release|Any CPU + {B8C9D0E1-89AB-CDEF-0123-456789012345}.Release|x86.ActiveCfg = Release|Any CPU + {B8C9D0E1-89AB-CDEF-0123-456789012345}.Release|x86.Build.0 = Release|Any CPU {C9D0E1F2-9ABC-DEF0-1234-567890123456}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {C9D0E1F2-9ABC-DEF0-1234-567890123456}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C9D0E1F2-9ABC-DEF0-1234-567890123456}.Debug|x64.ActiveCfg = Debug|Any CPU + {C9D0E1F2-9ABC-DEF0-1234-567890123456}.Debug|x64.Build.0 = Debug|Any CPU + {C9D0E1F2-9ABC-DEF0-1234-567890123456}.Debug|x86.ActiveCfg = Debug|Any CPU + {C9D0E1F2-9ABC-DEF0-1234-567890123456}.Debug|x86.Build.0 = Debug|Any CPU {C9D0E1F2-9ABC-DEF0-1234-567890123456}.Release|Any CPU.ActiveCfg = Release|Any CPU {C9D0E1F2-9ABC-DEF0-1234-567890123456}.Release|Any CPU.Build.0 = Release|Any CPU + {C9D0E1F2-9ABC-DEF0-1234-567890123456}.Release|x64.ActiveCfg = Release|Any CPU + {C9D0E1F2-9ABC-DEF0-1234-567890123456}.Release|x64.Build.0 = Release|Any CPU + {C9D0E1F2-9ABC-DEF0-1234-567890123456}.Release|x86.ActiveCfg = Release|Any CPU + {C9D0E1F2-9ABC-DEF0-1234-567890123456}.Release|x86.Build.0 = Release|Any CPU {D0E1F2A3-ABCD-EF01-2345-678901234567}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {D0E1F2A3-ABCD-EF01-2345-678901234567}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D0E1F2A3-ABCD-EF01-2345-678901234567}.Debug|x64.ActiveCfg = Debug|Any CPU + {D0E1F2A3-ABCD-EF01-2345-678901234567}.Debug|x64.Build.0 = Debug|Any CPU + {D0E1F2A3-ABCD-EF01-2345-678901234567}.Debug|x86.ActiveCfg = Debug|Any CPU + {D0E1F2A3-ABCD-EF01-2345-678901234567}.Debug|x86.Build.0 = Debug|Any CPU {D0E1F2A3-ABCD-EF01-2345-678901234567}.Release|Any CPU.ActiveCfg = Release|Any CPU {D0E1F2A3-ABCD-EF01-2345-678901234567}.Release|Any CPU.Build.0 = Release|Any CPU + {D0E1F2A3-ABCD-EF01-2345-678901234567}.Release|x64.ActiveCfg = Release|Any CPU + {D0E1F2A3-ABCD-EF01-2345-678901234567}.Release|x64.Build.0 = Release|Any CPU + {D0E1F2A3-ABCD-EF01-2345-678901234567}.Release|x86.ActiveCfg = Release|Any CPU + {D0E1F2A3-ABCD-EF01-2345-678901234567}.Release|x86.Build.0 = Release|Any CPU {F8A9B0C1-2345-6789-ABCD-456789012345}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {F8A9B0C1-2345-6789-ABCD-456789012345}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F8A9B0C1-2345-6789-ABCD-456789012345}.Debug|x64.ActiveCfg = Debug|Any CPU + {F8A9B0C1-2345-6789-ABCD-456789012345}.Debug|x64.Build.0 = Debug|Any CPU + {F8A9B0C1-2345-6789-ABCD-456789012345}.Debug|x86.ActiveCfg = Debug|Any CPU + {F8A9B0C1-2345-6789-ABCD-456789012345}.Debug|x86.Build.0 = Debug|Any CPU {F8A9B0C1-2345-6789-ABCD-456789012345}.Release|Any CPU.ActiveCfg = Release|Any CPU {F8A9B0C1-2345-6789-ABCD-456789012345}.Release|Any CPU.Build.0 = Release|Any CPU + {F8A9B0C1-2345-6789-ABCD-456789012345}.Release|x64.ActiveCfg = Release|Any CPU + {F8A9B0C1-2345-6789-ABCD-456789012345}.Release|x64.Build.0 = Release|Any CPU + {F8A9B0C1-2345-6789-ABCD-456789012345}.Release|x86.ActiveCfg = Release|Any CPU + {F8A9B0C1-2345-6789-ABCD-456789012345}.Release|x86.Build.0 = Release|Any CPU {C5D6E7F8-F012-3456-789A-123456789012}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {C5D6E7F8-F012-3456-789A-123456789012}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C5D6E7F8-F012-3456-789A-123456789012}.Debug|x64.ActiveCfg = Debug|Any CPU + {C5D6E7F8-F012-3456-789A-123456789012}.Debug|x64.Build.0 = Debug|Any CPU + {C5D6E7F8-F012-3456-789A-123456789012}.Debug|x86.ActiveCfg = Debug|Any CPU + {C5D6E7F8-F012-3456-789A-123456789012}.Debug|x86.Build.0 = Debug|Any CPU {C5D6E7F8-F012-3456-789A-123456789012}.Release|Any CPU.ActiveCfg = Release|Any CPU {C5D6E7F8-F012-3456-789A-123456789012}.Release|Any CPU.Build.0 = Release|Any CPU + {C5D6E7F8-F012-3456-789A-123456789012}.Release|x64.ActiveCfg = Release|Any CPU + {C5D6E7F8-F012-3456-789A-123456789012}.Release|x64.Build.0 = Release|Any CPU + {C5D6E7F8-F012-3456-789A-123456789012}.Release|x86.ActiveCfg = Release|Any CPU + {C5D6E7F8-F012-3456-789A-123456789012}.Release|x86.Build.0 = Release|Any CPU {D6E7F8A9-0123-4567-89AB-234567890123}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {D6E7F8A9-0123-4567-89AB-234567890123}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D6E7F8A9-0123-4567-89AB-234567890123}.Debug|x64.ActiveCfg = Debug|Any CPU + {D6E7F8A9-0123-4567-89AB-234567890123}.Debug|x64.Build.0 = Debug|Any CPU + {D6E7F8A9-0123-4567-89AB-234567890123}.Debug|x86.ActiveCfg = Debug|Any CPU + {D6E7F8A9-0123-4567-89AB-234567890123}.Debug|x86.Build.0 = Debug|Any CPU {D6E7F8A9-0123-4567-89AB-234567890123}.Release|Any CPU.ActiveCfg = Release|Any CPU {D6E7F8A9-0123-4567-89AB-234567890123}.Release|Any CPU.Build.0 = Release|Any CPU + {D6E7F8A9-0123-4567-89AB-234567890123}.Release|x64.ActiveCfg = Release|Any CPU + {D6E7F8A9-0123-4567-89AB-234567890123}.Release|x64.Build.0 = Release|Any CPU + {D6E7F8A9-0123-4567-89AB-234567890123}.Release|x86.ActiveCfg = Release|Any CPU + {D6E7F8A9-0123-4567-89AB-234567890123}.Release|x86.Build.0 = Release|Any CPU + {DE7C8E71-A9CA-40DE-B794-1D54FC1BB65D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {DE7C8E71-A9CA-40DE-B794-1D54FC1BB65D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {DE7C8E71-A9CA-40DE-B794-1D54FC1BB65D}.Debug|x64.ActiveCfg = Debug|Any CPU + {DE7C8E71-A9CA-40DE-B794-1D54FC1BB65D}.Debug|x64.Build.0 = Debug|Any CPU + {DE7C8E71-A9CA-40DE-B794-1D54FC1BB65D}.Debug|x86.ActiveCfg = Debug|Any CPU + {DE7C8E71-A9CA-40DE-B794-1D54FC1BB65D}.Debug|x86.Build.0 = Debug|Any CPU + {DE7C8E71-A9CA-40DE-B794-1D54FC1BB65D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {DE7C8E71-A9CA-40DE-B794-1D54FC1BB65D}.Release|Any CPU.Build.0 = Release|Any CPU + {DE7C8E71-A9CA-40DE-B794-1D54FC1BB65D}.Release|x64.ActiveCfg = Release|Any CPU + {DE7C8E71-A9CA-40DE-B794-1D54FC1BB65D}.Release|x64.Build.0 = Release|Any CPU + {DE7C8E71-A9CA-40DE-B794-1D54FC1BB65D}.Release|x86.ActiveCfg = Release|Any CPU + {DE7C8E71-A9CA-40DE-B794-1D54FC1BB65D}.Release|x86.Build.0 = Release|Any CPU + {C0722CFC-7249-4F7F-9082-24448E7128DB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C0722CFC-7249-4F7F-9082-24448E7128DB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C0722CFC-7249-4F7F-9082-24448E7128DB}.Debug|x64.ActiveCfg = Debug|Any CPU + {C0722CFC-7249-4F7F-9082-24448E7128DB}.Debug|x64.Build.0 = Debug|Any CPU + {C0722CFC-7249-4F7F-9082-24448E7128DB}.Debug|x86.ActiveCfg = Debug|Any CPU + {C0722CFC-7249-4F7F-9082-24448E7128DB}.Debug|x86.Build.0 = Debug|Any CPU + {C0722CFC-7249-4F7F-9082-24448E7128DB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C0722CFC-7249-4F7F-9082-24448E7128DB}.Release|Any CPU.Build.0 = Release|Any CPU + {C0722CFC-7249-4F7F-9082-24448E7128DB}.Release|x64.ActiveCfg = Release|Any CPU + {C0722CFC-7249-4F7F-9082-24448E7128DB}.Release|x64.Build.0 = Release|Any CPU + {C0722CFC-7249-4F7F-9082-24448E7128DB}.Release|x86.ActiveCfg = Release|Any CPU + {C0722CFC-7249-4F7F-9082-24448E7128DB}.Release|x86.Build.0 = Release|Any CPU + {0B9761C1-4DD6-41E5-9E46-0A9ACEF6D2A5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0B9761C1-4DD6-41E5-9E46-0A9ACEF6D2A5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0B9761C1-4DD6-41E5-9E46-0A9ACEF6D2A5}.Debug|x64.ActiveCfg = Debug|Any CPU + {0B9761C1-4DD6-41E5-9E46-0A9ACEF6D2A5}.Debug|x64.Build.0 = Debug|Any CPU + {0B9761C1-4DD6-41E5-9E46-0A9ACEF6D2A5}.Debug|x86.ActiveCfg = Debug|Any CPU + {0B9761C1-4DD6-41E5-9E46-0A9ACEF6D2A5}.Debug|x86.Build.0 = Debug|Any CPU + {0B9761C1-4DD6-41E5-9E46-0A9ACEF6D2A5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0B9761C1-4DD6-41E5-9E46-0A9ACEF6D2A5}.Release|Any CPU.Build.0 = Release|Any CPU + {0B9761C1-4DD6-41E5-9E46-0A9ACEF6D2A5}.Release|x64.ActiveCfg = Release|Any CPU + {0B9761C1-4DD6-41E5-9E46-0A9ACEF6D2A5}.Release|x64.Build.0 = Release|Any CPU + {0B9761C1-4DD6-41E5-9E46-0A9ACEF6D2A5}.Release|x86.ActiveCfg = Release|Any CPU + {0B9761C1-4DD6-41E5-9E46-0A9ACEF6D2A5}.Release|x86.Build.0 = Release|Any CPU + {BE04557C-735E-4601-AC2A-DE96DE83CE0C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BE04557C-735E-4601-AC2A-DE96DE83CE0C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BE04557C-735E-4601-AC2A-DE96DE83CE0C}.Debug|x64.ActiveCfg = Debug|Any CPU + {BE04557C-735E-4601-AC2A-DE96DE83CE0C}.Debug|x64.Build.0 = Debug|Any CPU + {BE04557C-735E-4601-AC2A-DE96DE83CE0C}.Debug|x86.ActiveCfg = Debug|Any CPU + {BE04557C-735E-4601-AC2A-DE96DE83CE0C}.Debug|x86.Build.0 = Debug|Any CPU + {BE04557C-735E-4601-AC2A-DE96DE83CE0C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BE04557C-735E-4601-AC2A-DE96DE83CE0C}.Release|Any CPU.Build.0 = Release|Any CPU + {BE04557C-735E-4601-AC2A-DE96DE83CE0C}.Release|x64.ActiveCfg = Release|Any CPU + {BE04557C-735E-4601-AC2A-DE96DE83CE0C}.Release|x64.Build.0 = Release|Any CPU + {BE04557C-735E-4601-AC2A-DE96DE83CE0C}.Release|x86.ActiveCfg = Release|Any CPU + {BE04557C-735E-4601-AC2A-DE96DE83CE0C}.Release|x86.Build.0 = Release|Any CPU + {720D2F0D-FE32-411A-8EAA-86F7C081EDB1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {720D2F0D-FE32-411A-8EAA-86F7C081EDB1}.Debug|Any CPU.Build.0 = Debug|Any CPU + {720D2F0D-FE32-411A-8EAA-86F7C081EDB1}.Debug|x64.ActiveCfg = Debug|Any CPU + {720D2F0D-FE32-411A-8EAA-86F7C081EDB1}.Debug|x64.Build.0 = Debug|Any CPU + {720D2F0D-FE32-411A-8EAA-86F7C081EDB1}.Debug|x86.ActiveCfg = Debug|Any CPU + {720D2F0D-FE32-411A-8EAA-86F7C081EDB1}.Debug|x86.Build.0 = Debug|Any CPU + {720D2F0D-FE32-411A-8EAA-86F7C081EDB1}.Release|Any CPU.ActiveCfg = Release|Any CPU + {720D2F0D-FE32-411A-8EAA-86F7C081EDB1}.Release|Any CPU.Build.0 = Release|Any CPU + {720D2F0D-FE32-411A-8EAA-86F7C081EDB1}.Release|x64.ActiveCfg = Release|Any CPU + {720D2F0D-FE32-411A-8EAA-86F7C081EDB1}.Release|x64.Build.0 = Release|Any CPU + {720D2F0D-FE32-411A-8EAA-86F7C081EDB1}.Release|x86.ActiveCfg = Release|Any CPU + {720D2F0D-FE32-411A-8EAA-86F7C081EDB1}.Release|x86.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -109,5 +287,10 @@ Global {F8A9B0C1-2345-6789-ABCD-456789012345} = {10000000-0000-0000-0000-000000000001} {C5D6E7F8-F012-3456-789A-123456789012} = {20000000-0000-0000-0000-000000000002} {D6E7F8A9-0123-4567-89AB-234567890123} = {20000000-0000-0000-0000-000000000002} + {DE7C8E71-A9CA-40DE-B794-1D54FC1BB65D} = {20000000-0000-0000-0000-000000000002} + {C0722CFC-7249-4F7F-9082-24448E7128DB} = {10000000-0000-0000-0000-000000000001} + {0B9761C1-4DD6-41E5-9E46-0A9ACEF6D2A5} = {20000000-0000-0000-0000-000000000002} + {BE04557C-735E-4601-AC2A-DE96DE83CE0C} = {20000000-0000-0000-0000-000000000002} + {720D2F0D-FE32-411A-8EAA-86F7C081EDB1} = {20000000-0000-0000-0000-000000000002} EndGlobalSection EndGlobal diff --git a/README.md b/README.md index 5e3bb2c..ea983c1 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ Transform whiteboard sketches into working code. Generate diagrams from source f [![.NET](https://img.shields.io/badge/.NET-8.0-512BD4)](https://dotnet.microsoft.com/) [![License](https://img.shields.io/badge/License-MIT-green.svg)](LICENSE) [![Build](https://img.shields.io/badge/build-passing-brightgreen)]() -[![Tests](https://img.shields.io/badge/tests-72%20passing-brightgreen)]() +[![Tests](https://img.shields.io/badge/tests-152%20passing-brightgreen)]() --- @@ -117,8 +117,43 @@ docflow integrate sla https://api.example.com/data --expected 30s --samples 10 # Generate integration code (DTOs, AutoMapper, HTTP client, validators) docflow integrate generate petstore.json --cdm Models/ -o Generated/ -n MyApp.Integration + +# Generate a navigable design-docs bundle (Markdown + Mermaid + HTML) +docflow integrate docs petstore.json -o docs/api -v + +# Watch the spec and regenerate on change (debounced) +docflow integrate docs petstore.json -o docs/api --watch + +# Emit a breaking / non-breaking Markdown changelog between two specs +docflow integrate diff old.json new.json -o CHANGELOG.md +``` + +#### Docs Bundle Contents + +Running `docflow integrate docs` produces: + +``` +docs/api/ +├── index.md # TOC +├── overview.md # API title, servers, auth summary +├── architecture.md # C4-style system-context diagram + deployments table +├── domain-model.md # Mermaid class + ER diagrams + entity table (with anchors) +├── security.md # Scheme table, OAuth2 flow diagrams, per-operation requirements +├── endpoints/.md # One page per tag (or path segment); sequence + flowchart embedded +├── sequences/.md # Standalone per-operation sequence diagrams +├── diagrams/context.mmd # Standalone Mermaid context diagram +└── assets/openapi.json # Verbatim copy of the source spec ``` +The default `--diagrams` set is `all` (class, ER, sequence, context, flow). Entity references on +endpoint pages link into `domain-model.md` via stable `#entity-` anchors. Passing +`--with-examples` adds synthesized JSON request/response examples (spec-provided examples take +precedence when present). + +Passing `--format html` converts the bundle into a self-contained static site: one `.html` per +`.md`, an embedded dark/light CSS theme (`assets/theme.css`), a sidebar nav, and Mermaid.js via +CDN. See [Documentation module design](docs/design/documentation-module.md). + #### Generate Options | Option | Description | diff --git a/docs/ARCHITECTURE.md b/docs/ARCHITECTURE.md index 1f11aa3..aa81e1d 100644 --- a/docs/ARCHITECTURE.md +++ b/docs/ARCHITECTURE.md @@ -55,9 +55,15 @@ public sealed class SemanticModel public List Relationships { get; init; } public List Namespaces { get; init; } public ModelProvenance? Provenance { get; set; } + public ApiSurface? Api { get; set; } // API-level surface (operations, params, servers, security) } ``` +The optional `Api` property is populated by `OpenApiParser` and carries the API surface — +operations, parameters, request/response bodies, servers, tags, and security schemes — so +downstream generators (documentation, diagrams, clients) can reason about the API without +re-parsing the source spec. See `src/DocFlow.Core/CanonicalModel/ApiSurface.cs`. + ### SemanticEntity Represents any type-like construct (class, interface, enum, etc.): @@ -154,9 +160,15 @@ public interface IModelGenerator | Component | Parser | Generator | |-----------|--------|-----------| | C# | `CSharpModelParser` | `CSharpModelGenerator` | -| Mermaid | `MermaidClassDiagramParser` | `MermaidClassDiagramGenerator` | +| Mermaid (class) | `MermaidClassDiagramParser` | `MermaidClassDiagramGenerator` | +| Mermaid (ER) | - | `MermaidErDiagramGenerator` | +| Mermaid (sequence) | - | `MermaidSequenceDiagramGenerator` | +| Mermaid (C4 context) | - | `MermaidC4ContextGenerator` | +| Mermaid (endpoint flow) | - | `MermaidEndpointFlowchartGenerator` | | Whiteboard | `WhiteboardScanner` | - | -| OpenAPI | `OpenApiParser` | - | +| OpenAPI | `OpenApiParser` (populates `SemanticModel.Api` incl. examples) | - | +| Design Docs (Markdown) | - | `MarkdownDocumentationGenerator` | +| Design Docs (HTML) | - | `StaticSiteRenderer` (Markdig) | --- @@ -336,6 +348,201 @@ The Integration module ships with pre-seeded patterns across four categories: --- +## Documentation Module Architecture + +`DocFlow.Documentation` composes the OpenAPI parser with the existing Mermaid class diagram +generator to produce a navigable design-docs bundle. Phase 1 ships the Markdown MVP. + +``` +Source spec (OpenAPI 3.x) + | + v ++--------------------+ +| OpenApiParser | populates SemanticModel.Entities + Api ++---------+----------+ + | + v ++-------------------------------+ +| MarkdownDocumentationGen | +| | +| +--------------------------+| +| | OverviewSectionBuilder || +| +--------------------------+| +| +--------------------------+| +| | DomainModelSectionBuilder|| invokes MermaidClassDiagramGenerator +| | || + MermaidErDiagramGenerator (Er flag) +| | || + injects anchors +| +--------------------------+| +| +--------------------------+| +| | ArchitectureSectionBuilder| Context flag: +| | || + MermaidC4ContextGenerator +| | || emits architecture.md + diagrams/context.mmd +| +--------------------------+| +| +--------------------------+| +| | SecuritySectionBuilder || emits security.md when schemes or +| | || operation requirements exist +| +--------------------------+| +| +--------------------------+| +| | EndpointSectionBuilder || one page per tag (or path segment); +| | || embeds MermaidSequenceDiagramGenerator (Sequence flag) +| | || and MermaidEndpointFlowchartGenerator (Flow flag); +| | || optional WithExamples → ExampleSynthesizer JSON; +| | || entity refs linked into domain-model.md anchors; +| | || also emits standalone sequences/.md +| +--------------------------+| +| +--------------------------+| +| | IndexSectionBuilder || TOC built after siblings +| +--------------------------+| ++---------+---------------------+ + | + v +IReadOnlyList (pure in-memory; CLI writes to disk) + | + | (optional, when --format html) + v ++-------------------------------+ +| StaticSiteRenderer (Markdig)| parallel .html for each .md; +| | mermaid fences via Markdig diagrams extension; +| | .md → .html link rewrite (fragments preserved); +| | per-page sidebar nav with .active highlight; +| | embedded assets/theme.css + Mermaid.js CDN ++-------------------------------+ + | + v +/{index,overview,domain-model,architecture,security}.md(+.html), + endpoints/.md(+.html), + sequences/.md(+.html), + diagrams/context.mmd, + assets/{openapi.{json|yaml},theme.css} +``` + +### Diagram kind mapping + +The `DocumentationOptions.Diagrams` flags enum selects which diagrams the generator emits. +The CLI default is `all`. + +| Flag | Generator | Where it lands | +|------|-----------|----------------| +| `Class` | `MermaidClassDiagramGenerator` | `domain-model.md` | +| `Er` | `MermaidErDiagramGenerator` | `domain-model.md` (after the class fence) | +| `Sequence` | `MermaidSequenceDiagramGenerator` | `endpoints/.md` per operation + standalone `sequences/.md` | +| `Context` | `MermaidC4ContextGenerator` | `architecture.md` + standalone `diagrams/context.mmd` | +| `Flow` | `MermaidEndpointFlowchartGenerator` | `endpoints/.md` per operation | + +**ER cardinality mapping:** `Composition` → `||--o{`, `Aggregation` → `}o--o{`, +`Association` → `}o--||`. Other `RelationshipType` values (e.g. `Inheritance`, `Dependency`) are +not rendered as ER relationships — the involved entities still appear as standalone blocks. + +**Sequence participants:** always `Client` and `API`; `Auth` is added when the operation's +`SecurityRequirements` is non-empty. The request message includes HTTP method, path, and the +first request-body media type (preferring named entity references); the response message picks +the first 2xx response (falling back to the first listed). + +**Context diagram:** a Mermaid `flowchart LR` (C4's dedicated primitive is still experimental) +with `Client`, an API container labelled with the spec title, one node per `ApiServer`, and one +external-system node per OAuth2 / OpenID-Connect security scheme. + +**Endpoint flowchart:** `Request → Validate Params → [Authorize] → Handler → Response`. The +`Authorize` node is omitted when no security requirements are declared. Non-2xx responses are +rendered as dashed branches from `Handler`; 2xx responses use solid edges. + +### Content depth (Phase 3) + +- `--with-examples` activates `ExampleSynthesizer`, which prefers spec-provided + `ApiMediaType.Example` payloads (captured by the OpenAPI parser via `OpenApiJsonWriter`) and + otherwise synthesises JSON from the schema: enum[0] for constrained strings, ISO-8601 + placeholders for `date-time`, zero for numerics, single-element arrays, and an ellipsis + (`"..."`) to terminate circular entity references. +- `SecuritySectionBuilder` emits `security.md` whenever the spec declares `securitySchemes` or + any operation references a scheme. It produces a scheme-details table, a Mermaid + `sequenceDiagram` per OAuth2 flow (authorizationCode, clientCredentials, implicit, password), + and a per-operation requirements cross-reference. +- Endpoint pages link entity mentions to stable anchors inside `domain-model.md` in the form + `[\`Pet\`](../domain-model.md#entity-pet)`; the anchors are inlined into the entity table + cells with `` and survive both GitHub-flavoured Markdown and + Markdig HTML rendering. + +### HTML rendering (Phase 4) + +`StaticSiteRenderer` uses `Markdig` with `UseAdvancedExtensions()` so fenced code blocks tagged +`mermaid` emit as `
` — Mermaid.js picks these up automatically. A +small compiled regex rewrites intra-bundle `.md` hrefs to `.html` (preserving the `#fragment` +tail). The sidebar nav is built from the file tree; the current page is tagged `class="active"` +on its ``. `assets/theme.css` is shipped as an embedded resource of +`DocFlow.Documentation` (dark/light via `prefers-color-scheme`). Mermaid.js loads from +`cdn.jsdelivr.net/npm/mermaid@10`; an offline-asset follow-up is tracked in +`docs/todo.md`. + +### Pluggable spec parsing (Phase 5) + +`IApiSpecParser` in `DocFlow.Core/Abstractions/` abstracts "parse an API spec stream into a +`SemanticModel`"; each implementation exposes a `Name`, a `CanParse(path, content)` predicate, +and a `ParseAsync(Stream, ct)` method that throws `FormatException` on parse failure. +`SpecParserRegistry` picks the first registered parser whose `CanParse` returns true and throws +`InvalidOperationException` with a "registered parsers: …" diagnostic otherwise. `OpenApiParser` +implements the interface via explicit implementation so its legacy `ISchemaParser` entry point +stays unchanged; the CLI's `integrate docs` and `integrate diff` commands both go through the +registry rather than hardcoding OpenAPI. + +### Watch mode and changelogs (Phase 5) + +`integrate docs --watch` runs an initial build, then wires a `FileSystemWatcher` to the spec +file with a 300 ms debounce (guarded by `SemaphoreSlim` so bursts of `Changed` events collapse +into a single regeneration). Watch exits cleanly on cancellation. + +`integrate diff -o changelog.md` loads both specs via the registry, computes a +`SpecDiff` with `SpecDiffer`, and renders a Markdown changelog via `ChangelogGenerator`. Diff +heuristics map each difference to a `ChangeSeverity` (Breaking or NonBreaking) across six +`ChangeCategory` buckets (Operation, Parameter, RequestBody, Response, Schema, Security): + +| Change | Severity | +|--------|----------| +| Added operation / added optional parameter / added optional property | Non-breaking | +| Removed operation / removed parameter / removed property / removed response status | Breaking | +| Added required parameter or property | Breaking | +| Tightened required flag (false → true) | Breaking | +| Relaxed required flag (true → false) | Non-breaking | +| Changed parameter / field / request-body / response entity type | Breaking | +| HTTP method or path changed on a kept operationId | Breaking | + +The rendered changelog leads with a Breaking / Non-breaking count summary, then groups by +severity → category. + +### Design rules + +- **Pure generator**: `IDocumentationGenerator.GenerateAsync` returns an in-memory file list. + The CLI layer owns persistence. +- **Deterministic output**: every iteration is `OrderBy`-preceded; `MarkdownWriter` forces LF + line endings and trims trailing whitespace. The domain-model builder clones the input + `SemanticModel` with entities re-inserted in alphabetical order so the upstream class diagram + generator produces stable text. +- **Purity check**: `DocFlow.Documentation` depends on Core + Diagrams only. It never references + `DocFlow.Integration`; the CLI orchestrates parsing and passes the `SemanticModel` plus the + raw source bytes via `DocumentationOptions.SourceSpec`. +- **Source spec preservation**: the original spec is passed through unmodified as + `assets/openapi.` so readers can verify the docs against the authoritative source + without leaving the bundle. + +### CLI + +``` +docflow integrate docs \ + -o \ + [--format markdown|html] \ + [--diagrams class,er,sequence,context,flow] \ + [--with-examples] \ + [--group-by tag|path] \ + [--title "My API"] \ + [-v] +``` + +Exit codes: `0` success, `1` validation error (missing spec, unknown flag value, parser +failure), `2` I/O error (`IOException`/`UnauthorizedAccessException`/`NotSupportedException`). +Phase 1 only implements `--format markdown`; `--format html` returns exit 1 with a pointer to +issue #10. + +--- + ## Intelligent Mapping Service (IMS) The IMS (designed, future implementation) learns transformation patterns from examples: @@ -385,6 +592,9 @@ DocFlow.CLI | +-- DocFlow.Core | +-- DocFlow.IMS | +-- DocFlow.CodeGen ++-- DocFlow.Documentation # Design-docs bundle generation (Phase 1 complete) +| +-- DocFlow.Core +| +-- DocFlow.Diagrams +-- DocFlow.Documents # Document pipeline (planned) | +-- DocFlow.Core +-- DocFlow.Web # Web UI (planned) @@ -440,8 +650,22 @@ All AI-assisted and heuristic-based mappings include confidence scores and reaso | Mermaid Generator | `src/DocFlow.Diagrams/Mermaid/MermaidClassDiagramGenerator.cs` | | Whiteboard Scanner | `src/DocFlow.Vision/WhiteboardScanner.cs` | | Claude Provider | `src/DocFlow.AI/Providers/ClaudeProvider.cs` | -| OpenAPI Parser | `src/DocFlow.Integration/Schemas/OpenApiParser.cs` | +| OpenAPI Parser | `src/DocFlow.Integration/Schemas/OpenApi/OpenApiParser.cs` | | CDM Mapper | `src/DocFlow.Integration/Mapping/CdmMapper.cs` | | SLA Validator | `src/DocFlow.Integration/Validation/SlaValidator.cs` | | Code Generator | `src/DocFlow.Integration/CodeGen/IntegrationCodeGenerator.cs` | +| ApiSurface Records | `src/DocFlow.Core/CanonicalModel/ApiSurface.cs` | +| Spec Parser Abstraction | `src/DocFlow.Core/Abstractions/IApiSpecParser.cs` | +| Spec Parser Registry | `src/DocFlow.Core/Abstractions/SpecParserRegistry.cs` | +| Documentation Generator | `src/DocFlow.Documentation/Markdown/MarkdownDocumentationGenerator.cs` | +| Documentation Section Builders | `src/DocFlow.Documentation/Markdown/Sections/` | +| Example Synthesizer | `src/DocFlow.Documentation/Examples/ExampleSynthesizer.cs` | +| Spec Differ | `src/DocFlow.Documentation/Diff/SpecDiffer.cs` | +| Changelog Generator | `src/DocFlow.Documentation/Diff/ChangelogGenerator.cs` | +| Static HTML Renderer | `src/DocFlow.Documentation/Html/StaticSiteRenderer.cs` | +| HTML Theme (embedded) | `src/DocFlow.Documentation/Html/Assets/theme.css` | +| ER Diagram Generator | `src/DocFlow.Diagrams/Mermaid/MermaidErDiagramGenerator.cs` | +| Sequence Diagram Generator | `src/DocFlow.Diagrams/Mermaid/MermaidSequenceDiagramGenerator.cs` | +| C4 Context Generator | `src/DocFlow.Diagrams/Mermaid/MermaidC4ContextGenerator.cs` | +| Endpoint Flowchart Generator | `src/DocFlow.Diagrams/Mermaid/MermaidEndpointFlowchartGenerator.cs` | | CLI Entry Point | `src/DocFlow.CLI/Program.cs` | diff --git a/docs/design/documentation-module.md b/docs/design/documentation-module.md new file mode 100644 index 0000000..6809a02 --- /dev/null +++ b/docs/design/documentation-module.md @@ -0,0 +1,145 @@ +# DocFlow.Documentation Module - Design Document + +## Status: Phase 1 Complete (v0.1.0-preview) + +`DocFlow.Documentation` turns any DocFlow `SemanticModel` into a navigable design-documentation +bundle. Phase 1 ships the Markdown MVP driven by the OpenAPI parser; later phases add richer +diagrams, synthesized examples, static HTML rendering, and spec-to-spec diffing. + +| Phase | Scope | Status | +|-------|-------|--------| +| 1 | Markdown bundle: overview, domain model (class diagram), endpoint pages, TOC, source-spec asset | **Complete** | +| 2 | Additional diagram kinds: ER, per-operation sequence, C4 context, endpoint flowchart | Planned | +| 3 | Example payload synthesis, enriched security section, cross-page linking | Planned | +| 4 | `--format html` static site renderer | Planned | +| 5 | `IApiSpecParser` abstraction + registry; `--watch` + `integrate diff` | Planned | + +--- + +## Overview + +``` +Source spec (OpenAPI 3.x JSON/YAML) + -> OpenApiParser -> SemanticModel { Entities, Relationships, ApiSurface } + -> MarkdownDocumentationGenerator + -> IReadOnlyList + -> (CLI) written to / +``` + +Nothing bypasses the canonical model. The generator is pure: it returns files in memory; +persistence is the CLI's responsibility. + +### Output bundle (Phase 1) + +``` +/ +├── index.md # TOC linking every other page +├── overview.md # Title, version, description, servers, auth summary +├── domain-model.md # Mermaid class diagram + entity table +├── endpoints/ +│ └── .md # One page per tag (or first path segment with --group-by path) +└── assets/ + └── openapi.json # Byte-equivalent copy of the source spec (.yaml/.yml preserved) +``` + +--- + +## CLI + +``` +docflow integrate docs \ + -o \ + [--format markdown|html] \ + [--diagrams class,er,sequence,context,flow] \ + [--with-examples] \ + [--group-by tag|path] \ + [--title "My API"] \ + [-v] +``` + +Exit codes: `0` success, `1` validation error, `2` I/O error. + +Phase 1: only `--format markdown` is implemented. `--format html` returns exit 1 with a pointer +to the Phase 4 tracking issue. `--with-examples` is accepted but no-op until Phase 3. + +--- + +## Architecture + +### Canonical extension (Phase 1 Issue #1) + +`SemanticModel` gained an optional `Api: ApiSurface` property carrying operations, parameters, +request/response bodies, servers, tags, and security schemes. Records: + +- `ApiSurface` — root container (title, version, description, servers, operations, tags, security schemes) +- `ApiOperation` — `{ OperationId, Method, Path, Summary, Description, Tags, Parameters, RequestBody, Responses, SecurityRequirements, Deprecated }` +- `ApiParameter` / `ApiRequestBody` / `ApiResponse` / `ApiMediaType` / `ApiSchema` +- `ApiServer`, `ApiTag`, `ApiSecurityScheme`, `ApiSecurityFlow`, `ApiSecurityRequirement` + +Enums: `ApiHttpMethod`, `ApiParameterLocation` (Query, Header, Path, Cookie), `ApiSecuritySchemeType`. + +### Populating the surface (Phase 1 Issue #2) + +`DocFlow.Integration.Schemas.OpenApi.OpenApiParser.BuildApiSurface` walks +`paths → operations → parameters/requestBody/responses`, resolves `$ref` to known schema names, +and fills in servers, tags, security schemes (including OAuth2 flows). Operations without an +`operationId` get a deterministic synthesized id of the form `{method}_{path}` lowercased with +non-alphanumerics collapsed to single underscores. + +### Section builders (Phase 1 Issue #4) + +`MarkdownDocumentationGenerator` orchestrates four stateless builders: + +| Builder | Output | +|---------|--------| +| `OverviewSectionBuilder` | `overview.md` | +| `DomainModelSectionBuilder` | `domain-model.md` (embeds `MermaidClassDiagramGenerator` output in a `mermaid` fence) | +| `EndpointSectionBuilder` | `endpoints/.md` per tag (`--group-by tag`) or per first path segment (`--group-by path`) | +| `IndexSectionBuilder` | `index.md`, built last so it can link every sibling | + +The generator sorts entities into a deterministic order before invoking the upstream class +diagram generator so Mermaid output is stable across runs. All iteration is explicitly ordered; +`MarkdownWriter` normalises line endings to LF and trims trailing whitespace so output lints cleanly +and snapshot tests are stable. + +### CLI wiring (Phase 1 Issue #5) + +`Program.ExecuteDocsCommand` parses the spec, runs the generator, then writes every +`GeneratedFile` to disk (creating subdirectories as needed). It maps `IOException`, +`UnauthorizedAccessException`, and `NotSupportedException` to exit code 2; parser/validation +failures to exit code 1. + +### Source spec preservation (Phase 1 Issue #6) + +The CLI reads the original spec and passes it to the generator via +`DocumentationOptions.SourceSpec`. The generator emits it unmodified as `assets/openapi.json` +(or `assets/openapi.yaml` / `.yml` when the source was YAML). Content preservation lets readers +verify the docs against the authoritative source without leaving the bundle. + +--- + +## Testing + +| Project | Tests | +|---------|-------| +| `DocFlow.Core.Tests` | ApiSurface records, enum coverage, backwards-compatibility | +| `DocFlow.Integration.Tests` | OpenAPI → ApiSurface, YAML/JSON equivalence, OAuth flows, deterministic operationId | +| `DocFlow.Documentation.Tests` | Generator file set, Mermaid embedding, determinism, snapshot via Verify.Xunit | +| `DocFlow.CLI.Tests` | End-to-end CLI invocation, exit codes, verbose output | + +Determinism rules (§4.3 of the feature plan): every iteration is `OrderBy`-preceded; line +endings are LF; generated output contains no timestamps, user names, or absolute paths. + +--- + +## Key Files + +| File | Purpose | +|------|---------| +| `src/DocFlow.Core/CanonicalModel/ApiSurface.cs` | Canonical API-surface records | +| `src/DocFlow.Integration/Schemas/OpenApi/OpenApiParser.cs` | Populates `SemanticModel.Api` | +| `src/DocFlow.Documentation/Abstractions/IDocumentationGenerator.cs` | Generator contract | +| `src/DocFlow.Documentation/Markdown/MarkdownDocumentationGenerator.cs` | Phase 1 generator | +| `src/DocFlow.Documentation/Markdown/Sections/*SectionBuilder.cs` | Overview / DomainModel / Endpoint / Index builders | +| `src/DocFlow.Documentation/Options/DocumentationOptions.cs` | Generator options | +| `src/DocFlow.CLI/Program.cs` (`BuildDocsCommand`) | `integrate docs` subcommand | diff --git a/docs/todo.md b/docs/todo.md new file mode 100644 index 0000000..9096d25 --- /dev/null +++ b/docs/todo.md @@ -0,0 +1,136 @@ +# DocFlow TODO + +Tracks the feature plan for **API Spec → Design Documentation & Diagrams** (see +[docs/design/documentation-module.md](design/documentation-module.md) for the full design doc). + +## Status + +| Phase | Issues | Status | +|-------|--------|--------| +| 1 — Foundations (MVP Markdown) | #1–#6 | **Done** | +| 2 — Rich Diagrams | #7, #8 | **Done** | +| 3 — Content Depth | #9 | **Done** | +| 4 — HTML Rendering | #10 | **Done** | +| 5 — Pluggability & Polish | #11, #12 | **Done** | + +**Test suite:** 152 passing (1 skipped) across 6 test projects. 80 tests added since +pre-feature baseline; all 72 pre-existing tests still pass. + +**Feature-level Definition of Done:** all 12 issues merged. Running +`docflow integrate docs samples/integration-demos/petstore.json -o ./out` produces the +navigable Markdown bundle; `--format html` produces the parallel static HTML site; `--watch` +regenerates on spec change; `docflow integrate diff old.json new.json -o changelog.md` +produces a breaking / non-breaking classified Markdown changelog. + +## Phase 1 — Foundations (Done) + +- [x] **#1 Extend `SemanticModel` with `ApiSurface`** — new records in + `src/DocFlow.Core/CanonicalModel/ApiSurface.cs`; optional `Api` property on `SemanticModel`; + 6 new tests in `DocFlow.Core.Tests`. +- [x] **#2 Populate `ApiSurface` in `OpenApiParser`** — walks paths/operations/params/ + requestBody/responses; resolves `$ref` to entity names; populates servers, tags, security + schemes with OAuth2 flows; synthesizes deterministic `{method}_{path}` operationIds. 6 new + tests in `DocFlow.Integration.Tests` including YAML/JSON equivalence. +- [x] **#3 Scaffold `DocFlow.Documentation` project** — net8.0, references Core + Diagrams only + (no Integration dependency). `IDocumentationGenerator`, `GeneratedFile`, + `DocumentationOptions`, `DocumentationFormat`, `DiagramKinds` (flags), `GroupBy`. +- [x] **#4 Implement `MarkdownDocumentationGenerator` (MVP)** — four section builders + (Overview, DomainModel, Endpoint, Index); deterministic ordering; `MarkdownWriter` enforces + LF line endings and trims trailing whitespace. 11 tests in `DocFlow.Documentation.Tests` + including a `Verify.Xunit` snapshot for `endpoints/pet.md`. +- [x] **#5 Add `docflow integrate docs` CLI subcommand** — all flags wired + (`--format`, `--diagrams`, `--with-examples`, `--group-by`, `--title`, `-v`); exit codes + 0/1/2 for success/validation/IO; 5 new tests in `DocFlow.CLI.Tests`. +- [x] **#6 Copy source spec to assets; ship design doc** — `DocumentationOptions.SourceSpec` + plumbs the raw spec through the generator; `docs/design/documentation-module.md` committed; + `README.md` + `CLAUDE.md` updated. 2 new CLI tests covering JSON byte-identical copy and + YAML preservation. + +## Phase 2 — Rich Diagrams (Done) + +- [x] **#7 ER and Sequence Mermaid generators** — `MermaidErDiagramGenerator` emits + `erDiagram` with cardinality mapped from `RelationshipType` (Composition→`||--o{`, + Aggregation→`}o--o{`, Association→`}o--||`); non-structural relationships are dropped and + orphan entities render as standalone blocks. `MermaidSequenceDiagramGenerator` takes an + `ApiOperation` and emits Client/API actors plus optional Auth when security requirements are + present; request/response messages include method, path, request-body entity, and the first + 2xx response. `DomainModelSectionBuilder` appends the ER fence after the class fence when + `DiagramKinds.Er` is set; `EndpointSectionBuilder` embeds a sequence fence per operation and + emits standalone `sequences/.md` pages when `DiagramKinds.Sequence` is set. CLI + default `--diagrams` expanded to `class,er,sequence`. 10 new tests in `DocFlow.Diagrams.Tests` + (5 ER + 5 Sequence) and 1 new integration test in `DocFlow.Documentation.Tests`. +- [x] **#8 C4 Context and Endpoint Flowchart generators** — `MermaidC4ContextGenerator` uses a + `flowchart LR` fallback (Mermaid's dedicated C4 primitive is still experimental) with + Client, API container, per-server deployment nodes, and OAuth/OpenID IdP nodes; rendered + into a new `architecture.md` alongside a standalone `diagrams/context.mmd`. + `MermaidEndpointFlowchartGenerator` produces `Request → Validate → [Authorize] → Handler → + Response` with dashed branches from `Handler` to non-2xx responses and solid edges to 2xx. + `ArchitectureSectionBuilder` emits when `DiagramKinds.Context` is set; `EndpointSectionBuilder` + embeds the flowchart when `DiagramKinds.Flow` is set. CLI default `--diagrams` bumped to + `all`. 8 new tests in `DocFlow.Diagrams.Tests` (4 Context + 4 Flowchart) and 2 new integration + tests in `DocFlow.Documentation.Tests`. + +## Phase 3 — Content Depth (Done) + +- [x] **#9 Example synthesis and enriched security section** — `ExampleSynthesizer` (in + `DocFlow.Documentation/Examples/`) produces JSON from `ApiMediaType` + the entity catalogue: + prefers `ApiMediaType.Example` (captured by the OpenAPI parser via `OpenApiJsonWriter`), + otherwise synthesises from the schema — enum[0] / ISO-8601 date-time / UUID placeholders / + single-element arrays / required-respecting objects / `"..."` on cycles. `--with-examples` + adds `### Example Request/Response` blocks to endpoint pages. New `SecuritySectionBuilder` + emits `security.md` when the spec declares `securitySchemes` or any operation references one: + a scheme-details table, a Mermaid `sequenceDiagram` per OAuth2 flow (authorizationCode, + clientCredentials, implicit, password), and a per-operation requirements cross-reference. + Entity references on endpoint pages render as links into stable + `domain-model.md#entity-` anchors that `DomainModelSectionBuilder` injects inside the + entity-table cells. `ApiMediaType.Example` narrowed from `object?` to `string?`. 9 new tests + (5 Examples + 3 Security + 1 CrossLinks). + +## Phase 4 — HTML Rendering (Done) + +- [x] **#10 Static HTML site renderer** — `DocFlow.Documentation` now depends on + `Markdig` 0.34.0; `StaticSiteRenderer` converts the Markdown bundle into parallel `.html` + files while preserving `.mmd` / `.json` assets. Markdig's advanced-diagrams extension emits + mermaid fences as `
` so Mermaid.js auto-initialises on load; a compiled + regex rewrites intra-bundle `.md` hrefs to `.html` and preserves `#fragment` tails. The + sidebar nav is built from the file tree with `.active` highlighting for the current page. + `Html/Assets/theme.css` ships as an embedded resource (dark/light via `prefers-color-scheme`) + and is emitted alongside HTML as `assets/theme.css`. Mermaid.js loads from + `cdn.jsdelivr.net/npm/mermaid@10`. CLI `--format html` now runs the renderer (replacing the + Phase 1 error). 5 new tests in `DocFlow.Documentation.Tests/Html/` (1 skipped — offline-asset + packaging follow-up); `Cli_Docs_HtmlFlag_Phase1_ReturnsError` replaced by + `Cli_Docs_HtmlFlag_WritesHtmlBundle`. + +## Phase 5 — Pluggability & Polish (Done) + +- [x] **#11 `IApiSpecParser` abstraction + registry** — `IApiSpecParser` in + `DocFlow.Core/Abstractions/` (`Name`, `CanParse(path, content)`, `ParseAsync(Stream, ct) → + SemanticModel`). `SpecParserRegistry` picks the first parser whose `CanParse` returns true + and otherwise throws `InvalidOperationException` with a "registered parsers: …" message. + `OpenApiParser` implements the interface via explicit implementation, so the legacy + `ISchemaParser` entry point is untouched and `ParseSchemaAsync(ParserInput)` continues to + work. CLI `integrate docs` now goes through the registry rather than hardcoding OpenAPI. + 7 new tests in `DocFlow.Integration.Tests/Schemas/` covering JSON/YAML selection, + content-sniff fallback, missing-parser diagnostic, a `StubGraphQlParser` proving one-file + extensibility, and a regression test confirming the legacy entry point still produces an + equivalent `SemanticModel`. +- [x] **#12 Watch mode and spec-diff changelog** — `integrate docs --watch` wires a + `FileSystemWatcher` with a 300 ms debounce (`SemaphoreSlim`-guarded so bursts of `Changed` + events collapse into a single regen) and prints Spectre.Console status lines on each + refresh. New `integrate diff -o ` subcommand: loads both specs via the + registry, runs `SpecDiffer` (in `DocFlow.Documentation/Diff/`), and emits + `ChangelogGenerator` Markdown with a Breaking / Non-breaking summary table and sections + grouped by severity → category (Operation / Parameter / RequestBody / Response / Schema / + Security). Diff heuristics: removed / required-tighter / type change = breaking; added + optional / relaxed required / added response status / added new schema = non-breaking. + 7 new `SpecDifferTests` in `DocFlow.Documentation.Tests/Diff/` (all six scenarios from the + issue plus a required-flag flip-direction check) and 1 end-to-end watch test in + `DocFlow.CLI.Tests/Integrate/WatchModeTests.cs` that polls for mtime updates within a 10 s + budget. + +## Cross-Cutting Requirements (Ongoing) + +- Zero new compiler warnings. +- xUnit only; deterministic tests; no network or time-based assertions. +- `\n` line endings in all generated output; no timestamps/user names/absolute paths. +- Every issue that ships user-visible CLI behavior updates `README.md` and `CLAUDE.md`. diff --git a/samples/integration-demos/petstore.json b/samples/integration-demos/petstore.json index 7f48972..1ff492d 100644 --- a/samples/integration-demos/petstore.json +++ b/samples/integration-demos/petstore.json @@ -16,6 +16,7 @@ "paths": { "/pets": { "get": { + "tags": ["pet"], "summary": "List all pets", "operationId": "listPets", "responses": { @@ -35,6 +36,7 @@ } }, "post": { + "tags": ["pet"], "summary": "Create a pet", "operationId": "createPet", "requestBody": { @@ -62,6 +64,7 @@ }, "/pets/{petId}": { "get": { + "tags": ["pet"], "summary": "Get a pet by ID", "operationId": "getPetById", "parameters": [ @@ -94,6 +97,7 @@ }, "/orders": { "post": { + "tags": ["store"], "summary": "Place an order", "operationId": "placeOrder", "requestBody": { diff --git a/src/DocFlow.CLI/DocFlow.CLI.csproj b/src/DocFlow.CLI/DocFlow.CLI.csproj index fa42f37..774ecc3 100644 --- a/src/DocFlow.CLI/DocFlow.CLI.csproj +++ b/src/DocFlow.CLI/DocFlow.CLI.csproj @@ -28,6 +28,7 @@ + @@ -38,4 +39,8 @@ + + + + diff --git a/src/DocFlow.CLI/Program.cs b/src/DocFlow.CLI/Program.cs index 5087b57..66e3986 100644 --- a/src/DocFlow.CLI/Program.cs +++ b/src/DocFlow.CLI/Program.cs @@ -11,6 +11,10 @@ using DocFlow.Core.CanonicalModel; using DocFlow.Diagrams.Mermaid; using System.Text.RegularExpressions; +using DocFlow.Documentation.Diff; +using DocFlow.Documentation.Html; +using DocFlow.Documentation.Markdown; +using DocFlow.Documentation.Options; using DocFlow.Integration.CodeGen; using DocFlow.Integration.Mapping; using DocFlow.Integration.Models; @@ -868,10 +872,102 @@ private static Command BuildIntegrateCommand() command.AddCommand(BuildAnalyzeCommand()); command.AddCommand(BuildSlaCommand()); command.AddCommand(BuildGenerateCommand()); + command.AddCommand(BuildDocsCommand()); + command.AddCommand(BuildDiffCommand()); return command; } + private static Command BuildDiffCommand() + { + var oldSpecArg = new Argument( + name: "old-spec", + description: "Previous API specification file"); + var newSpecArg = new Argument( + name: "new-spec", + description: "Current API specification file"); + var outputOption = new Option( + aliases: ["-o", "--output"], + description: "Output changelog file path") + { + IsRequired = true + }; + + var command = new Command("diff", "Diff two API specifications and emit a breaking/non-breaking Markdown changelog") + { + oldSpecArg, + newSpecArg, + outputOption + }; + + command.SetHandler(async (context) => + { + var oldSpec = context.ParseResult.GetValueForArgument(oldSpecArg); + var newSpec = context.ParseResult.GetValueForArgument(newSpecArg); + var output = context.ParseResult.GetValueForOption(outputOption)!; + + context.ExitCode = await ExecuteDiffCommand(oldSpec, newSpec, output); + }); + + return command; + } + + internal static async Task ExecuteDiffCommand(FileInfo oldSpec, FileInfo newSpec, FileInfo output) + { + if (!oldSpec.Exists) + { + AnsiConsole.MarkupLine($"[red]Error:[/] Old spec not found: [yellow]{Markup.Escape(oldSpec.FullName)}[/]"); + return 1; + } + if (!newSpec.Exists) + { + AnsiConsole.MarkupLine($"[red]Error:[/] New spec not found: [yellow]{Markup.Escape(newSpec.FullName)}[/]"); + return 1; + } + + var registry = new SpecParserRegistry([new OpenApiParser()]); + + SemanticModel oldModel, newModel; + try + { + oldModel = await ParseSpecAsync(registry, oldSpec); + newModel = await ParseSpecAsync(registry, newSpec); + } + catch (Exception ex) when (ex is InvalidOperationException or FormatException) + { + AnsiConsole.MarkupLine($"[red]Error parsing spec:[/] {Markup.Escape(ex.Message)}"); + return 1; + } + + var diff = new SpecDiffer().Diff(oldModel, newModel); + var changelog = new ChangelogGenerator().Render(diff); + + try + { + var dir = Path.GetDirectoryName(output.FullName); + if (!string.IsNullOrEmpty(dir) && !Directory.Exists(dir)) + { + Directory.CreateDirectory(dir); + } + await File.WriteAllTextAsync(output.FullName, changelog); + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or NotSupportedException) + { + AnsiConsole.MarkupLine($"[red]I/O error:[/] {Markup.Escape(ex.Message)}"); + return 2; + } + + AnsiConsole.MarkupLine($"[green]Wrote[/] {Markup.Escape(output.FullName)} ([red]{diff.BreakingCount} breaking[/], [yellow]{diff.NonBreakingCount} non-breaking[/])"); + return 0; + } + + private static async Task ParseSpecAsync(SpecParserRegistry registry, FileInfo spec) + { + var parser = registry.Select(spec.FullName, content: null); + using var stream = File.OpenRead(spec.FullName); + return await parser.ParseAsync(stream); + } + private static Command BuildParseCommand() { var specArg = new Argument( @@ -2135,6 +2231,333 @@ private static void DisplayGeneratorResults( AnsiConsole.MarkupLine($"[dim]Completed in {elapsedMs}ms[/]"); } + private static Command BuildDocsCommand() + { + var specArg = new Argument( + name: "spec", + description: "OpenAPI specification file (JSON or YAML)"); + + var outputOption = new Option( + aliases: ["-o", "--output"], + description: "Output directory for the documentation bundle") + { + IsRequired = true + }; + + var formatOption = new Option( + aliases: ["--format"], + getDefaultValue: () => "markdown", + description: "Output format: markdown or html"); + + var diagramsOption = new Option( + aliases: ["--diagrams"], + getDefaultValue: () => "all", + description: "Comma-separated diagram kinds: class,er,sequence,context,flow,all,none"); + + var withExamplesOption = new Option( + aliases: ["--with-examples"], + description: "Include synthesized example payloads (Phase 3 — currently a no-op)"); + + var groupByOption = new Option( + aliases: ["--group-by"], + getDefaultValue: () => "tag", + description: "Group endpoint pages by tag or path"); + + var titleOption = new Option( + aliases: ["--title"], + description: "Override the API title in the generated docs"); + + var watchOption = new Option( + aliases: ["--watch"], + description: "Regenerate the bundle whenever the spec file changes"); + + var command = new Command("docs", "Generate a design-documentation bundle from an API specification") + { + specArg, + outputOption, + formatOption, + diagramsOption, + withExamplesOption, + groupByOption, + titleOption, + watchOption, + VerboseOption + }; + + command.SetHandler(async (context) => + { + var spec = context.ParseResult.GetValueForArgument(specArg); + var output = context.ParseResult.GetValueForOption(outputOption)!; + var format = context.ParseResult.GetValueForOption(formatOption) ?? "markdown"; + var diagrams = context.ParseResult.GetValueForOption(diagramsOption) ?? "class"; + var withExamples = context.ParseResult.GetValueForOption(withExamplesOption); + var groupBy = context.ParseResult.GetValueForOption(groupByOption) ?? "tag"; + var title = context.ParseResult.GetValueForOption(titleOption); + var watch = context.ParseResult.GetValueForOption(watchOption); + var verbose = context.ParseResult.GetValueForOption(VerboseOption); + + if (watch) + { + context.ExitCode = await RunWatchAsync( + spec, output, format, diagrams, withExamples, groupBy, title, verbose, + context.GetCancellationToken()); + } + else + { + context.ExitCode = await ExecuteDocsCommand( + spec, output, format, diagrams, withExamples, groupBy, title, verbose); + } + }); + + return command; + } + + internal static async Task RunWatchAsync( + FileInfo spec, + DirectoryInfo output, + string format, + string diagrams, + bool withExamples, + string groupBy, + string? title, + bool verbose, + CancellationToken cancellationToken) + { + if (!spec.Exists) + { + AnsiConsole.MarkupLine($"[red]Error:[/] Spec file not found: [yellow]{Markup.Escape(spec.FullName)}[/]"); + return 1; + } + + var initial = await ExecuteDocsCommand(spec, output, format, diagrams, withExamples, groupBy, title, verbose); + if (initial != 0) return initial; + + AnsiConsole.MarkupLine($"[dim]Watching[/] [cyan]{Markup.Escape(spec.FullName)}[/] [dim]— press Ctrl+C to stop.[/]"); + + var debounce = TimeSpan.FromMilliseconds(300); + var lastTriggered = DateTime.MinValue; + var gate = new SemaphoreSlim(1, 1); + + using var watcher = new FileSystemWatcher(spec.DirectoryName!, spec.Name) + { + NotifyFilter = NotifyFilters.LastWrite | NotifyFilters.Size | NotifyFilters.CreationTime + }; + + watcher.Changed += OnChange; + watcher.Created += OnChange; + watcher.Renamed += OnChange; + watcher.EnableRaisingEvents = true; + + try + { + await Task.Delay(Timeout.InfiniteTimeSpan, cancellationToken); + } + catch (OperationCanceledException) + { + // expected on Ctrl+C / test cancellation + } + + return 0; + + async void OnChange(object sender, FileSystemEventArgs e) + { + var now = DateTime.UtcNow; + lock (gate) + { + if (now - lastTriggered < debounce) return; + lastTriggered = now; + } + + try + { + await Task.Delay(debounce, cancellationToken); + if (!await gate.WaitAsync(0, cancellationToken)) return; + try + { + AnsiConsole.MarkupLine("[dim]spec changed; regenerating...[/]"); + await ExecuteDocsCommand(spec, output, format, diagrams, withExamples, groupBy, title, verbose); + AnsiConsole.MarkupLine("[green]Bundle updated.[/]"); + } + finally + { + gate.Release(); + } + } + catch (OperationCanceledException) { } + catch (Exception ex) + { + AnsiConsole.MarkupLine($"[red]Regeneration failed:[/] {Markup.Escape(ex.Message)}"); + } + } + } + + internal static async Task ExecuteDocsCommand( + FileInfo spec, + DirectoryInfo output, + string format, + string diagrams, + bool withExamples, + string groupBy, + string? title, + bool verbose) + { + if (!spec.Exists) + { + AnsiConsole.MarkupLine($"[red]Error:[/] Spec file not found: [yellow]{Markup.Escape(spec.FullName)}[/]"); + return 1; + } + + if (!string.Equals(format, "markdown", StringComparison.OrdinalIgnoreCase) + && !string.Equals(format, "html", StringComparison.OrdinalIgnoreCase)) + { + AnsiConsole.MarkupLine($"[red]Error:[/] Unknown --format '[yellow]{Markup.Escape(format)}[/]'. Expected 'markdown' or 'html'."); + return 1; + } + + var htmlRequested = string.Equals(format, "html", StringComparison.OrdinalIgnoreCase); + + if (!TryParseGroupBy(groupBy, out var groupByEnum)) + { + AnsiConsole.MarkupLine($"[red]Error:[/] Unknown --group-by '[yellow]{Markup.Escape(groupBy)}[/]'. Expected 'tag' or 'path'."); + return 1; + } + + if (!TryParseDiagrams(diagrams, out var diagramKinds, out var diagramError)) + { + AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(diagramError!)}"); + return 1; + } + + var specExtension = Path.GetExtension(spec.FullName).ToLowerInvariant(); + var specAssetName = specExtension switch + { + ".yaml" => "assets/openapi.yaml", + ".yml" => "assets/openapi.yml", + _ => "assets/openapi.json" + }; + var specMediaType = specExtension switch + { + ".yaml" or ".yml" => "application/yaml", + _ => "application/json" + }; + var specContent = await File.ReadAllTextAsync(spec.FullName); + var sourceSpec = new DocFlow.Documentation.Models.GeneratedFile(specAssetName, specContent, specMediaType); + + var options = new DocumentationOptions + { + Format = htmlRequested ? DocumentationFormat.Html : DocumentationFormat.Markdown, + Diagrams = diagramKinds, + WithExamples = withExamples, + GroupBy = groupByEnum, + Title = title, + SourceSpec = sourceSpec + }; + + var registry = new SpecParserRegistry([new OpenApiParser()]); + IApiSpecParser selected; + try + { + selected = registry.Select(spec.FullName, content: null); + } + catch (InvalidOperationException ex) + { + AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}"); + return 1; + } + + SemanticModel model; + try + { + using var stream = File.OpenRead(spec.FullName); + model = await selected.ParseAsync(stream); + } + catch (FormatException ex) + { + AnsiConsole.MarkupLine($"[red]Error parsing spec:[/] {Markup.Escape(ex.Message)}"); + return 1; + } + + var generator = new MarkdownDocumentationGenerator(); + var files = await generator.GenerateAsync(model, options); + + if (htmlRequested) + { + files = new StaticSiteRenderer().Render(files); + } + + try + { + if (!output.Exists) + { + Directory.CreateDirectory(output.FullName); + } + + foreach (var file in files) + { + var fullPath = Path.Combine(output.FullName, file.RelativePath); + var dir = Path.GetDirectoryName(fullPath); + if (!string.IsNullOrEmpty(dir) && !Directory.Exists(dir)) + { + Directory.CreateDirectory(dir); + } + await File.WriteAllTextAsync(fullPath, file.Content); + + if (verbose) + { + AnsiConsole.MarkupLine($" [green]wrote[/] {Markup.Escape(file.RelativePath)}"); + } + } + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or NotSupportedException) + { + AnsiConsole.MarkupLine($"[red]I/O error:[/] {Markup.Escape(ex.Message)}"); + return 2; + } + + AnsiConsole.MarkupLine($"[green]Generated[/] {files.Count} file(s) in [cyan]{Markup.Escape(output.FullName)}[/]"); + return 0; + } + + private static bool TryParseGroupBy(string value, out GroupBy groupBy) + { + switch (value?.Trim().ToLowerInvariant()) + { + case "tag": + groupBy = GroupBy.Tag; + return true; + case "path": + groupBy = GroupBy.Path; + return true; + default: + groupBy = GroupBy.Tag; + return false; + } + } + + private static bool TryParseDiagrams(string value, out DiagramKinds kinds, out string? error) + { + kinds = DiagramKinds.None; + error = null; + + foreach (var token in value.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)) + { + switch (token.ToLowerInvariant()) + { + case "class": kinds |= DiagramKinds.Class; break; + case "er": kinds |= DiagramKinds.Er; break; + case "sequence": kinds |= DiagramKinds.Sequence; break; + case "context": kinds |= DiagramKinds.Context; break; + case "flow": kinds |= DiagramKinds.Flow; break; + case "all": kinds |= DiagramKinds.All; break; + case "none": break; + default: + error = $"Unknown diagram kind '{token}'. Expected class, er, sequence, context, flow, all, or none."; + return false; + } + } + return true; + } + #endregion #region Helpers diff --git a/src/DocFlow.Core/Abstractions/IApiSpecParser.cs b/src/DocFlow.Core/Abstractions/IApiSpecParser.cs new file mode 100644 index 0000000..9e04e74 --- /dev/null +++ b/src/DocFlow.Core/Abstractions/IApiSpecParser.cs @@ -0,0 +1,28 @@ +using DocFlow.Core.CanonicalModel; + +namespace DocFlow.Core.Abstractions; + +/// +/// Strategy for turning an API specification (OpenAPI, AsyncAPI, GraphQL SDL, Postman, …) +/// into a canonical . Implementations are stateless and may +/// inspect both the path and the content to decide whether they apply. +/// +public interface IApiSpecParser +{ + /// A short, stable identifier (e.g. "OpenAPI") used in error messages. + string Name { get; } + + /// + /// Returns true when this parser believes it can parse the given input. + /// may be the original filename (the extension is a strong signal); + /// is an optional first look at the file contents. + /// Either argument may be null / empty. + /// + bool CanParse(string? path, string? content); + + /// + /// Parse the stream into a . The stream is read to completion. + /// Throws when the input cannot be parsed. + /// + Task ParseAsync(Stream input, CancellationToken cancellationToken = default); +} diff --git a/src/DocFlow.Core/Abstractions/SpecParserRegistry.cs b/src/DocFlow.Core/Abstractions/SpecParserRegistry.cs new file mode 100644 index 0000000..6ac5eec --- /dev/null +++ b/src/DocFlow.Core/Abstractions/SpecParserRegistry.cs @@ -0,0 +1,36 @@ +namespace DocFlow.Core.Abstractions; + +/// +/// Selects the first registered that reports it can parse the +/// given input. Registration order matters: callers should register more specific parsers +/// ahead of generic ones. +/// +public sealed class SpecParserRegistry +{ + private readonly IReadOnlyList _parsers; + + public SpecParserRegistry(IEnumerable parsers) + { + _parsers = parsers?.ToList() ?? throw new ArgumentNullException(nameof(parsers)); + } + + /// Parsers in their registration order. + public IReadOnlyList Parsers => _parsers; + + /// + /// Finds the first parser that reports for the input. + /// Throws when no parser matches — the message + /// includes the names of the registered parsers so callers can diagnose missing support. + /// + public IApiSpecParser Select(string? path, string? content) + { + foreach (var parser in _parsers) + { + if (parser.CanParse(path, content)) return parser; + } + + var names = _parsers.Count == 0 ? "(none registered)" : string.Join(", ", _parsers.Select(p => p.Name)); + throw new InvalidOperationException( + $"No registered API spec parser can parse '{path ?? ""}'. Registered parsers: {names}."); + } +} diff --git a/src/DocFlow.Core/CanonicalModel/ApiSurface.cs b/src/DocFlow.Core/CanonicalModel/ApiSurface.cs new file mode 100644 index 0000000..3e78181 --- /dev/null +++ b/src/DocFlow.Core/CanonicalModel/ApiSurface.cs @@ -0,0 +1,260 @@ +namespace DocFlow.Core.CanonicalModel; + +/// +/// API-level information layered onto the canonical . +/// Captures operations, parameters, request/response bodies, servers, security schemes and tags +/// so that downstream generators (documentation, diagrams, clients) can reason about the API surface +/// without re-parsing the source spec. +/// +public sealed record ApiSurface +{ + /// API title (e.g. "Petstore"). + public required string Title { get; init; } + + /// API version string from the source spec. + public required string Version { get; init; } + + /// Human-readable description of the API. + public string? Description { get; init; } + + /// Deployment targets declared by the spec. + public IReadOnlyList Servers { get; init; } = []; + + /// All operations keyed by . + public IReadOnlyList Operations { get; init; } = []; + + /// Tag definitions (the operations themselves carry tag names). + public IReadOnlyList Tags { get; init; } = []; + + /// Security schemes keyed by scheme name. + public IReadOnlyDictionary SecuritySchemes { get; init; } + = new Dictionary(); + + /// + /// Default security requirements applied when an operation does not declare its own. + /// Each requirement in the list is OR'd; the schemes inside a single requirement are AND'd. + /// + public IReadOnlyList SecurityRequirements { get; init; } = []; +} + +/// A single API operation (method + path pair). +public sealed record ApiOperation +{ + /// + /// Stable identifier for the operation. When the source spec omits operationId, + /// parsers synthesize a deterministic id of the form {method}_{path}. + /// + public required string OperationId { get; init; } + + /// HTTP method for the operation. + public required ApiHttpMethod Method { get; init; } + + /// URL path template (e.g. /pets/{petId}). + public required string Path { get; init; } + + /// One-line summary. + public string? Summary { get; init; } + + /// Longer description. + public string? Description { get; init; } + + /// Tag names associated with the operation. + public IReadOnlyList Tags { get; init; } = []; + + /// Parameters (path, query, header, cookie). + public IReadOnlyList Parameters { get; init; } = []; + + /// Request body, if the operation accepts one. + public ApiRequestBody? RequestBody { get; init; } + + /// Responses keyed by status code (or "default"). + public IReadOnlyDictionary Responses { get; init; } + = new Dictionary(); + + /// + /// Security requirements specific to this operation. When empty, the surface's + /// default apply. + /// + public IReadOnlyList SecurityRequirements { get; init; } = []; + + /// True if the spec marks this operation as deprecated. + public bool Deprecated { get; init; } +} + +/// HTTP methods recognised by the canonical model. +public enum ApiHttpMethod +{ + Get, + Put, + Post, + Delete, + Options, + Head, + Patch, + Trace +} + +/// Where a parameter is bound in the HTTP request. +public enum ApiParameterLocation +{ + Query, + Header, + Path, + Cookie +} + +/// A single input parameter for an operation. +public sealed record ApiParameter +{ + public required string Name { get; init; } + public required ApiParameterLocation Location { get; init; } + public string? Description { get; init; } + public bool Required { get; init; } + public bool Deprecated { get; init; } + + /// The media shape of the parameter value. + public ApiMediaType? Schema { get; init; } +} + +/// Body payload accepted by an operation. +public sealed record ApiRequestBody +{ + public string? Description { get; init; } + public bool Required { get; init; } + + /// Content keyed by media type (e.g. application/json). + public IReadOnlyDictionary Content { get; init; } + = new Dictionary(); +} + +/// A response variant keyed by status code on its containing operation. +public sealed record ApiResponse +{ + public required string Description { get; init; } + + /// Content keyed by media type. + public IReadOnlyDictionary Content { get; init; } + = new Dictionary(); + + /// Response headers keyed by header name. + public IReadOnlyDictionary Headers { get; init; } + = new Dictionary(); +} + +/// +/// The payload shape for a given media type. Either references +/// a by name, or describes a primitive/inline shape. +/// At most one of the two is populated; both null means "untyped". +/// +public sealed record ApiMediaType +{ + /// Name of a in the enclosing model. + public string? EntityName { get; init; } + + /// Inline schema when the payload does not map to a named entity. + public ApiSchema? Schema { get; init; } + + /// Literal example payload as a JSON string, if the spec supplied one. + public string? Example { get; init; } +} + +/// +/// A reduced schema descriptor for primitive or inline shapes. Named object schemas are +/// projected into instances and referenced by name instead. +/// +public sealed record ApiSchema +{ + /// JSON schema type: string, integer, number, boolean, array, object. + public required string Type { get; init; } + + /// Optional format hint (e.g. int32, date-time, uuid). + public string? Format { get; init; } + + /// Element schema when is array. + public ApiSchema? Items { get; init; } + + /// Enumeration of allowed values, stringified. + public IReadOnlyList Enum { get; init; } = []; + + /// Whether this schema is nullable. + public bool Nullable { get; init; } + + /// + /// Name of a this schema resolves to. Populated for inline + /// schemas that $ref a named component (e.g. an array whose items are a Pet), + /// where the outer cannot carry the link. + /// + public string? EntityName { get; init; } +} + +/// A deployment target for the API. +public sealed record ApiServer +{ + public required string Url { get; init; } + public string? Description { get; init; } +} + +/// A tag definition. Operations reference tags by name via . +public sealed record ApiTag +{ + public required string Name { get; init; } + public string? Description { get; init; } +} + +/// Kind of security scheme. +public enum ApiSecuritySchemeType +{ + ApiKey, + Http, + OAuth2, + OpenIdConnect, + MutualTls +} + +/// A security scheme definition. Oauth2 schemes populate . +public sealed record ApiSecurityScheme +{ + public required string Name { get; init; } + public required ApiSecuritySchemeType Type { get; init; } + public string? Description { get; init; } + + /// For : where the key is passed. + public ApiParameterLocation? In { get; init; } + + /// For : the name of the header/query/cookie parameter. + public string? ParameterName { get; init; } + + /// For : the HTTP auth scheme (e.g. bearer, basic). + public string? Scheme { get; init; } + + /// For HTTP bearer schemes: hints at the bearer token format (e.g. JWT). + public string? BearerFormat { get; init; } + + /// For : the discovery URL. + public string? OpenIdConnectUrl { get; init; } + + /// For : flows keyed by flow type name. + public IReadOnlyDictionary Flows { get; init; } + = new Dictionary(); +} + +/// A single OAuth2 flow (authorization_code, client_credentials, implicit, password). +public sealed record ApiSecurityFlow +{ + public string? AuthorizationUrl { get; init; } + public string? TokenUrl { get; init; } + public string? RefreshUrl { get; init; } + + /// Scopes advertised by this flow keyed by scope name, values are descriptions. + public IReadOnlyDictionary Scopes { get; init; } = new Dictionary(); +} + +/// +/// A security requirement. The map names referenced schemes to the scopes +/// required on them. An operation accepts any single requirement in its list (OR), but every +/// scheme within the requirement must be satisfied (AND). +/// +public sealed record ApiSecurityRequirement +{ + public required IReadOnlyDictionary> Schemes { get; init; } +} diff --git a/src/DocFlow.Core/CanonicalModel/SemanticModel.cs b/src/DocFlow.Core/CanonicalModel/SemanticModel.cs index b15d9bd..8ddcd30 100644 --- a/src/DocFlow.Core/CanonicalModel/SemanticModel.cs +++ b/src/DocFlow.Core/CanonicalModel/SemanticModel.cs @@ -52,6 +52,12 @@ public sealed class SemanticModel /// Information about how this model was created /// public ModelProvenance? Provenance { get; set; } + + /// + /// API-level surface (operations, parameters, responses, servers, security) layered on top + /// of the entity model. Null when the source format does not describe an API (e.g. pure C# parse). + /// + public ApiSurface? Api { get; set; } /// /// Validation issues found in this model diff --git a/src/DocFlow.Diagrams/Mermaid/MermaidC4ContextGenerator.cs b/src/DocFlow.Diagrams/Mermaid/MermaidC4ContextGenerator.cs new file mode 100644 index 0000000..d5da10e --- /dev/null +++ b/src/DocFlow.Diagrams/Mermaid/MermaidC4ContextGenerator.cs @@ -0,0 +1,77 @@ +using System.Text; +using DocFlow.Core.CanonicalModel; + +namespace DocFlow.Diagrams.Mermaid; + +/// +/// Produces an architecture "context" diagram for an using a +/// flowchart LR base (the Mermaid C4Context primitive is still experimental at +/// the time of writing, so we fall back to flowchart-based C4-style grouping). +/// +/// Nodes: +/// +/// a single Client actor on the left +/// the API as one container (labeled with ) +/// each entry as a deployment node +/// each OAuth2 security scheme as an external identity provider node +/// +/// Output is deterministic: every collection is ordered alphabetically before rendering. +/// +public sealed class MermaidC4ContextGenerator +{ + public string Generate(ApiSurface? api) + { + var sb = new StringBuilder(); + sb.Append("flowchart LR\n"); + + var apiLabel = string.IsNullOrWhiteSpace(api?.Title) ? "API" : api.Title; + sb.Append(" Client([\"Client\"])\n"); + sb.Append($" API[[\"{EscapeLabel(apiLabel)}\"]]\n"); + sb.Append(" Client --> API\n"); + + if (api is null) + { + return sb.ToString(); + } + + var servers = api.Servers + .OrderBy(s => s.Url, StringComparer.Ordinal) + .ToList(); + for (var i = 0; i < servers.Count; i++) + { + var nodeId = $"Server{i + 1}"; + var label = string.IsNullOrWhiteSpace(servers[i].Description) + ? servers[i].Url + : $"{servers[i].Description}: {servers[i].Url}"; + sb.Append($" {nodeId}[(\"{EscapeLabel(label)}\")]\n"); + sb.Append($" API --> {nodeId}\n"); + } + + var oauthSchemes = api.SecuritySchemes + .Where(kvp => kvp.Value.Type == ApiSecuritySchemeType.OAuth2 + || kvp.Value.Type == ApiSecuritySchemeType.OpenIdConnect) + .OrderBy(kvp => kvp.Key, StringComparer.Ordinal) + .ToList(); + + for (var i = 0; i < oauthSchemes.Count; i++) + { + var (name, scheme) = (oauthSchemes[i].Key, oauthSchemes[i].Value); + var nodeId = $"Idp{i + 1}"; + var url = scheme.Flows + .Select(f => f.Value.AuthorizationUrl ?? f.Value.TokenUrl) + .FirstOrDefault(u => !string.IsNullOrEmpty(u)) + ?? scheme.OpenIdConnectUrl + ?? string.Empty; + var label = string.IsNullOrEmpty(url) ? $"IdP: {name}" : $"IdP: {name} ({url})"; + + sb.Append($" {nodeId}{{{{\"{EscapeLabel(label)}\"}}}}\n"); + sb.Append($" Client --> {nodeId}\n"); + sb.Append($" API --> {nodeId}\n"); + } + + return sb.ToString(); + } + + private static string EscapeLabel(string value) => + value.Replace('"', '\'').Replace('\n', ' ').Replace('\r', ' ').Trim(); +} diff --git a/src/DocFlow.Diagrams/Mermaid/MermaidEndpointFlowchartGenerator.cs b/src/DocFlow.Diagrams/Mermaid/MermaidEndpointFlowchartGenerator.cs new file mode 100644 index 0000000..f77e54c --- /dev/null +++ b/src/DocFlow.Diagrams/Mermaid/MermaidEndpointFlowchartGenerator.cs @@ -0,0 +1,101 @@ +using System.Text; +using DocFlow.Core.CanonicalModel; + +namespace DocFlow.Diagrams.Mermaid; + +/// +/// Produces a per-operation request-lifecycle flowchart: +/// Request → Validate Params → [Authorize] → Handler → Response (2xx) with branches +/// from Handler to each non-2xx response declared on the operation. +/// The Authorize node is omitted when the operation has no security requirements. +/// +public sealed class MermaidEndpointFlowchartGenerator +{ + public string Generate(ApiOperation operation) + { + var sb = new StringBuilder(); + sb.Append("flowchart LR\n"); + + sb.Append(" Request[\"Request\"]\n"); + sb.Append(" Validate[\"Validate Params\"]\n"); + sb.Append(" Handler[\"Handler\"]\n"); + + var requiresAuth = operation.SecurityRequirements.Count > 0; + if (requiresAuth) + { + sb.Append(" Authorize[\"Authorize\"]\n"); + } + + // Partition responses into success (2xx) and others. + var orderedResponses = operation.Responses + .OrderBy(r => r.Key, StringComparer.Ordinal) + .ToList(); + + var successResponses = orderedResponses + .Where(r => r.Key.StartsWith("2", StringComparison.Ordinal)) + .ToList(); + var otherResponses = orderedResponses + .Where(r => !r.Key.StartsWith("2", StringComparison.Ordinal)) + .ToList(); + + // Emit terminal response nodes. + foreach (var (status, response) in orderedResponses) + { + var label = $"{status}: {Summarize(response)}"; + sb.Append($" Response{status}[\"{EscapeLabel(label)}\"]\n"); + } + + // Wire up the happy path. + sb.Append(" Request --> Validate\n"); + var handlerPredecessor = requiresAuth ? "Authorize" : "Validate"; + if (requiresAuth) + { + sb.Append(" Validate --> Authorize\n"); + } + sb.Append($" {handlerPredecessor} --> Handler\n"); + + foreach (var (status, _) in successResponses) + { + sb.Append($" Handler --> Response{status}\n"); + } + + // Branches to non-2xx responses (dashed to distinguish from the happy path). + foreach (var (status, _) in otherResponses) + { + sb.Append($" Handler -.-> Response{status}\n"); + } + + return sb.ToString(); + } + + private static string Summarize(ApiResponse response) + { + if (response.Content.Count == 0) + { + return string.IsNullOrWhiteSpace(response.Description) ? "no body" : response.Description; + } + + var first = response.Content + .OrderBy(c => c.Key, StringComparer.Ordinal) + .First() + .Value; + + if (!string.IsNullOrEmpty(first.EntityName)) return first.EntityName; + + var schema = first.Schema; + if (schema is null) return response.Description ?? string.Empty; + + if (schema.Type == "array" && schema.Items is not null) + { + var inner = !string.IsNullOrEmpty(schema.Items.EntityName) + ? schema.Items.EntityName + : schema.Items.Type; + return $"array<{inner}>"; + } + + return !string.IsNullOrEmpty(schema.EntityName) ? schema.EntityName : schema.Type; + } + + private static string EscapeLabel(string value) => + value.Replace('"', '\'').Replace('\n', ' ').Replace('\r', ' ').Trim(); +} diff --git a/src/DocFlow.Diagrams/Mermaid/MermaidErDiagramGenerator.cs b/src/DocFlow.Diagrams/Mermaid/MermaidErDiagramGenerator.cs new file mode 100644 index 0000000..b19c5b5 --- /dev/null +++ b/src/DocFlow.Diagrams/Mermaid/MermaidErDiagramGenerator.cs @@ -0,0 +1,101 @@ +using System.Text; +using DocFlow.Core.CanonicalModel; + +namespace DocFlow.Diagrams.Mermaid; + +/// +/// Produces a Mermaid erDiagram from a . +/// +/// Cardinality mapping (per ): +/// +/// ||--o{ +/// }o--o{ +/// }o--|| +/// +/// Other relationship kinds (inheritance, dependency, etc.) are not rendered in an ER diagram. +/// Output is deterministic: entities and relationships are ordered alphabetically. +/// +public sealed class MermaidErDiagramGenerator +{ + public string Generate(SemanticModel model) + { + var sb = new StringBuilder(); + sb.Append("erDiagram\n"); + + var entities = model.Entities.Values + .OrderBy(e => e.Name, StringComparer.Ordinal) + .ToList(); + + if (entities.Count == 0) + { + return sb.ToString(); + } + + var entitiesById = entities.ToDictionary(e => e.Id, e => e); + var relationships = model.Relationships + .Where(r => entitiesById.ContainsKey(r.SourceEntityId) + && entitiesById.ContainsKey(r.TargetEntityId) + && MapCardinality(r.Type) is not null) + .OrderBy(r => entitiesById[r.SourceEntityId].Name, StringComparer.Ordinal) + .ThenBy(r => entitiesById[r.TargetEntityId].Name, StringComparer.Ordinal) + .ThenBy(r => r.Type) + .ToList(); + + var touchedEntityIds = new HashSet(StringComparer.Ordinal); + + foreach (var rel in relationships) + { + var source = entitiesById[rel.SourceEntityId]; + var target = entitiesById[rel.TargetEntityId]; + var cardinality = MapCardinality(rel.Type)!; + var label = string.IsNullOrWhiteSpace(rel.Name) + ? rel.Type.ToString().ToLowerInvariant() + : rel.Name.Trim(); + + sb.Append($" {SanitizeName(source.Name)} {cardinality} {SanitizeName(target.Name)} : {SanitizeLabel(label)}\n"); + touchedEntityIds.Add(source.Id); + touchedEntityIds.Add(target.Id); + } + + // Entities not covered by any rendered relationship are emitted as standalone blocks so + // a model with no relationships still produces a valid erDiagram with one entity. + foreach (var entity in entities.Where(e => !touchedEntityIds.Contains(e.Id))) + { + sb.Append($" {SanitizeName(entity.Name)} {{\n"); + foreach (var property in entity.Properties + .OrderBy(p => p.Name, StringComparer.Ordinal)) + { + var type = FormatType(property.Type); + sb.Append($" {type} {SanitizeFieldName(property.Name)}\n"); + } + sb.Append(" }\n"); + } + + return sb.ToString(); + } + + private static string? MapCardinality(RelationshipType type) => type switch + { + RelationshipType.Composition => "||--o{", + RelationshipType.Aggregation => "}o--o{", + RelationshipType.Association => "}o--||", + _ => null + }; + + private static string FormatType(SemanticType type) + { + var name = type.IsCollection && type.GenericArguments.Count > 0 + ? type.GenericArguments[0].Name + : type.Name; + return SanitizeFieldName(name); + } + + private static string SanitizeName(string name) => + name.Replace(' ', '_').Replace('<', '_').Replace('>', '_').Replace(',', '_').Replace('.', '_'); + + private static string SanitizeFieldName(string name) => + name.Replace(' ', '_').Replace('<', '_').Replace('>', '_').Replace(',', '_'); + + private static string SanitizeLabel(string label) => + label.Replace('"', '\'').Replace('\n', ' ').Trim(); +} diff --git a/src/DocFlow.Diagrams/Mermaid/MermaidSequenceDiagramGenerator.cs b/src/DocFlow.Diagrams/Mermaid/MermaidSequenceDiagramGenerator.cs new file mode 100644 index 0000000..84d77d3 --- /dev/null +++ b/src/DocFlow.Diagrams/Mermaid/MermaidSequenceDiagramGenerator.cs @@ -0,0 +1,107 @@ +using System.Text; +using DocFlow.Core.CanonicalModel; + +namespace DocFlow.Diagrams.Mermaid; + +/// +/// Produces a Mermaid sequenceDiagram for a single . +/// +/// Participants: Client, API, and — when the operation declares security +/// requirements — Auth. Messages include the HTTP method and path, the first request +/// body media type (when applicable), and the first successful (2xx) response. +/// +public sealed class MermaidSequenceDiagramGenerator +{ + public string Generate(ApiOperation operation) + { + var sb = new StringBuilder(); + sb.Append("sequenceDiagram\n"); + sb.Append(" participant Client\n"); + sb.Append(" participant API\n"); + + var requiresAuth = operation.SecurityRequirements.Count > 0; + if (requiresAuth) + { + sb.Append(" participant Auth\n"); + sb.Append(" Client->>Auth: authenticate\n"); + sb.Append(" Auth-->>Client: token\n"); + } + + var requestMessage = BuildRequestMessage(operation); + sb.Append($" Client->>API: {requestMessage}\n"); + + var responseMessage = BuildResponseMessage(operation); + sb.Append($" API-->>Client: {responseMessage}\n"); + + return sb.ToString(); + } + + private static string BuildRequestMessage(ApiOperation operation) + { + var method = operation.Method.ToString().ToUpperInvariant(); + var line = $"{method} {operation.Path}"; + + var payloadType = DescribePayload(operation.RequestBody?.Content); + if (payloadType is not null) + { + line += $" ({payloadType})"; + } + + return Sanitize(line); + } + + private static string BuildResponseMessage(ApiOperation operation) + { + // Prefer the first successful (2xx) response, falling back to the first listed. + var (status, response) = operation.Responses + .OrderBy(r => r.Key, StringComparer.Ordinal) + .FirstOrDefault(r => r.Key.StartsWith("2", StringComparison.Ordinal)); + + if (response is null && operation.Responses.Count > 0) + { + var first = operation.Responses + .OrderBy(r => r.Key, StringComparer.Ordinal) + .First(); + status = first.Key; + response = first.Value; + } + + if (response is null) + { + return "response"; + } + + var schema = DescribePayload(response.Content); + return schema is null + ? Sanitize(status) + : Sanitize($"{status} {schema}"); + } + + private static string? DescribePayload(IReadOnlyDictionary? content) + { + if (content is null || content.Count == 0) return null; + + var first = content + .OrderBy(c => c.Key, StringComparer.Ordinal) + .First() + .Value; + + if (!string.IsNullOrEmpty(first.EntityName)) return first.EntityName; + + var schema = first.Schema; + if (schema is null) return null; + + if (schema.Type == "array" && schema.Items is not null) + { + var itemName = !string.IsNullOrEmpty(schema.Items.EntityName) + ? schema.Items.EntityName + : schema.Items.Type; + return $"array<{itemName}>"; + } + + return !string.IsNullOrEmpty(schema.EntityName) ? schema.EntityName : schema.Type; + } + + private static string Sanitize(string value) => + value.Replace('\n', ' ').Replace('\r', ' ').Trim(); +} diff --git a/src/DocFlow.Documentation/Abstractions/IDocumentationGenerator.cs b/src/DocFlow.Documentation/Abstractions/IDocumentationGenerator.cs new file mode 100644 index 0000000..ca89437 --- /dev/null +++ b/src/DocFlow.Documentation/Abstractions/IDocumentationGenerator.cs @@ -0,0 +1,25 @@ +using DocFlow.Core.CanonicalModel; +using DocFlow.Documentation.Models; +using DocFlow.Documentation.Options; + +namespace DocFlow.Documentation.Abstractions; + +/// +/// Produces a documentation bundle (a set of s) from a +/// . Implementations are pure: they return files in memory and +/// do not write to disk — callers (the CLI) own persistence. +/// +public interface IDocumentationGenerator +{ + /// + /// Generate the documentation bundle. + /// + /// The canonical model to document. + /// Generation options (format, diagram kinds, grouping, etc.). + /// Cancellation token. + /// An ordered, deterministic list of files to emit. + Task> GenerateAsync( + SemanticModel model, + DocumentationOptions options, + CancellationToken cancellationToken = default); +} diff --git a/src/DocFlow.Documentation/Diagrams/.gitkeep b/src/DocFlow.Documentation/Diagrams/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/src/DocFlow.Documentation/Diff/ChangelogGenerator.cs b/src/DocFlow.Documentation/Diff/ChangelogGenerator.cs new file mode 100644 index 0000000..d29b88c --- /dev/null +++ b/src/DocFlow.Documentation/Diff/ChangelogGenerator.cs @@ -0,0 +1,73 @@ +using DocFlow.Documentation.Markdown; + +namespace DocFlow.Documentation.Diff; + +/// +/// Renders a as a readable Markdown changelog. Changes are grouped by +/// severity (Breaking first) and then by category, with a summary table at the top. +/// +public sealed class ChangelogGenerator +{ + public string Render(SpecDiff diff) + { + var writer = new MarkdownWriter(); + writer.Heading(1, "API Changelog"); + writer.Line(); + + writer.Heading(2, "Summary"); + writer.Line(); + writer.Line("| Severity | Count |"); + writer.Line("| --- | --- |"); + writer.Line($"| Breaking | {diff.BreakingCount} |"); + writer.Line($"| Non-breaking | {diff.NonBreakingCount} |"); + writer.Line(); + + if (!diff.HasChanges) + { + writer.Line("_No differences detected._"); + writer.Line(); + return writer.ToString(); + } + + RenderBySeverity(writer, diff, ChangeSeverity.Breaking, "Breaking Changes"); + RenderBySeverity(writer, diff, ChangeSeverity.NonBreaking, "Non-breaking Changes"); + + return writer.ToString(); + } + + private static void RenderBySeverity(MarkdownWriter writer, SpecDiff diff, ChangeSeverity severity, string heading) + { + var changesBySeverity = diff.Changes.Where(c => c.Severity == severity).ToList(); + if (changesBySeverity.Count == 0) return; + + writer.Heading(2, heading); + writer.Line(); + + foreach (var category in changesBySeverity + .GroupBy(c => c.Category) + .OrderBy(g => g.Key)) + { + writer.Heading(3, CategoryLabel(category.Key)); + writer.Line(); + + foreach (var change in category + .OrderBy(c => c.Path, StringComparer.Ordinal) + .ThenBy(c => c.Description, StringComparer.Ordinal)) + { + writer.Line($"- {change.Description}"); + } + writer.Line(); + } + } + + private static string CategoryLabel(ChangeCategory category) => category switch + { + ChangeCategory.Operation => "Operations", + ChangeCategory.Parameter => "Parameters", + ChangeCategory.RequestBody => "Request Bodies", + ChangeCategory.Response => "Responses", + ChangeCategory.Schema => "Schemas", + ChangeCategory.Security => "Security", + _ => category.ToString() + }; +} diff --git a/src/DocFlow.Documentation/Diff/SpecChange.cs b/src/DocFlow.Documentation/Diff/SpecChange.cs new file mode 100644 index 0000000..3fc7511 --- /dev/null +++ b/src/DocFlow.Documentation/Diff/SpecChange.cs @@ -0,0 +1,38 @@ +namespace DocFlow.Documentation.Diff; + +/// Which facet of the spec a change affects. +public enum ChangeCategory +{ + Operation, + Parameter, + RequestBody, + Response, + Schema, + Security +} + +/// How disruptive a change is to consumers. +public enum ChangeSeverity +{ + Breaking, + NonBreaking +} + +/// A single spec-to-spec difference. +public sealed record SpecChange +{ + public required ChangeCategory Category { get; init; } + public required ChangeSeverity Severity { get; init; } + public required string Description { get; init; } + public string? Path { get; init; } +} + +/// The full set of differences plus convenience counters. +public sealed record SpecDiff +{ + public IReadOnlyList Changes { get; init; } = []; + + public bool HasChanges => Changes.Count > 0; + public int BreakingCount => Changes.Count(c => c.Severity == ChangeSeverity.Breaking); + public int NonBreakingCount => Changes.Count(c => c.Severity == ChangeSeverity.NonBreaking); +} diff --git a/src/DocFlow.Documentation/Diff/SpecDiffer.cs b/src/DocFlow.Documentation/Diff/SpecDiffer.cs new file mode 100644 index 0000000..9939357 --- /dev/null +++ b/src/DocFlow.Documentation/Diff/SpecDiffer.cs @@ -0,0 +1,365 @@ +using DocFlow.Core.CanonicalModel; + +namespace DocFlow.Documentation.Diff; + +/// +/// Produces a between two instances. +/// Heuristics (industry conventions): +/// +/// Operations: added → non-breaking; removed → breaking. +/// Parameters: required added → breaking; optional added → non-breaking; removed → breaking; type change → breaking; required flag flip true→false → non-breaking, false→true → breaking. +/// Schemas (entities): added → non-breaking; removed → breaking; property added-required → breaking; property added-optional → non-breaking; property removed → breaking; type change → breaking; required flag flip same as parameters. +/// Request/response content-entity rename on an operation → breaking. +/// +/// +public sealed class SpecDiffer +{ + public SpecDiff Diff(SemanticModel oldModel, SemanticModel newModel) + { + var changes = new List(); + DiffOperations(oldModel.Api, newModel.Api, changes); + DiffEntities(oldModel, newModel, changes); + + var ordered = changes + .OrderBy(c => c.Severity) + .ThenBy(c => c.Category) + .ThenBy(c => c.Path, StringComparer.Ordinal) + .ThenBy(c => c.Description, StringComparer.Ordinal) + .ToList(); + + return new SpecDiff { Changes = ordered }; + } + + private static void DiffOperations(ApiSurface? oldApi, ApiSurface? newApi, List changes) + { + var oldOps = (oldApi?.Operations ?? []) + .ToDictionary(o => o.OperationId, StringComparer.Ordinal); + var newOps = (newApi?.Operations ?? []) + .ToDictionary(o => o.OperationId, StringComparer.Ordinal); + + foreach (var added in newOps.Keys.Except(oldOps.Keys, StringComparer.Ordinal).OrderBy(k => k, StringComparer.Ordinal)) + { + changes.Add(new SpecChange + { + Category = ChangeCategory.Operation, + Severity = ChangeSeverity.NonBreaking, + Description = $"Added operation `{added}`", + Path = added + }); + } + + foreach (var removed in oldOps.Keys.Except(newOps.Keys, StringComparer.Ordinal).OrderBy(k => k, StringComparer.Ordinal)) + { + changes.Add(new SpecChange + { + Category = ChangeCategory.Operation, + Severity = ChangeSeverity.Breaking, + Description = $"Removed operation `{removed}`", + Path = removed + }); + } + + foreach (var id in oldOps.Keys.Intersect(newOps.Keys, StringComparer.Ordinal).OrderBy(k => k, StringComparer.Ordinal)) + { + DiffOperation(oldOps[id], newOps[id], changes); + } + } + + private static void DiffOperation(ApiOperation oldOp, ApiOperation newOp, List changes) + { + if (oldOp.Method != newOp.Method) + { + changes.Add(new SpecChange + { + Category = ChangeCategory.Operation, + Severity = ChangeSeverity.Breaking, + Description = $"Changed HTTP method of `{oldOp.OperationId}` from {oldOp.Method} to {newOp.Method}", + Path = oldOp.OperationId + }); + } + + if (!string.Equals(oldOp.Path, newOp.Path, StringComparison.Ordinal)) + { + changes.Add(new SpecChange + { + Category = ChangeCategory.Operation, + Severity = ChangeSeverity.Breaking, + Description = $"Changed path of `{oldOp.OperationId}` from `{oldOp.Path}` to `{newOp.Path}`", + Path = oldOp.OperationId + }); + } + + DiffParameters(oldOp, newOp, changes); + DiffRequestBody(oldOp, newOp, changes); + DiffResponses(oldOp, newOp, changes); + } + + private static void DiffParameters(ApiOperation oldOp, ApiOperation newOp, List changes) + { + var oldByKey = oldOp.Parameters.ToDictionary(p => (p.Name, p.Location)); + var newByKey = newOp.Parameters.ToDictionary(p => (p.Name, p.Location)); + + foreach (var key in newByKey.Keys.Except(oldByKey.Keys).OrderBy(k => k.Name, StringComparer.Ordinal)) + { + var param = newByKey[key]; + changes.Add(new SpecChange + { + Category = ChangeCategory.Parameter, + Severity = param.Required ? ChangeSeverity.Breaking : ChangeSeverity.NonBreaking, + Description = $"Added {(param.Required ? "required" : "optional")} {param.Location.ToString().ToLowerInvariant()} parameter `{param.Name}` to `{oldOp.OperationId}`", + Path = $"{oldOp.OperationId}.{param.Name}" + }); + } + + foreach (var key in oldByKey.Keys.Except(newByKey.Keys).OrderBy(k => k.Name, StringComparer.Ordinal)) + { + var param = oldByKey[key]; + changes.Add(new SpecChange + { + Category = ChangeCategory.Parameter, + Severity = ChangeSeverity.Breaking, + Description = $"Removed {param.Location.ToString().ToLowerInvariant()} parameter `{param.Name}` from `{oldOp.OperationId}`", + Path = $"{oldOp.OperationId}.{param.Name}" + }); + } + + foreach (var key in oldByKey.Keys.Intersect(newByKey.Keys).OrderBy(k => k.Name, StringComparer.Ordinal)) + { + var oldP = oldByKey[key]; + var newP = newByKey[key]; + + if (oldP.Required != newP.Required) + { + changes.Add(new SpecChange + { + Category = ChangeCategory.Parameter, + Severity = newP.Required ? ChangeSeverity.Breaking : ChangeSeverity.NonBreaking, + Description = $"Parameter `{oldP.Name}` on `{oldOp.OperationId}` is now {(newP.Required ? "required" : "optional")} (was {(oldP.Required ? "required" : "optional")})", + Path = $"{oldOp.OperationId}.{oldP.Name}" + }); + } + + var oldType = DescribeSchemaType(oldP.Schema); + var newType = DescribeSchemaType(newP.Schema); + if (!string.Equals(oldType, newType, StringComparison.Ordinal)) + { + changes.Add(new SpecChange + { + Category = ChangeCategory.Parameter, + Severity = ChangeSeverity.Breaking, + Description = $"Changed type of parameter `{oldP.Name}` on `{oldOp.OperationId}` from `{oldType}` to `{newType}`", + Path = $"{oldOp.OperationId}.{oldP.Name}" + }); + } + } + } + + private static void DiffRequestBody(ApiOperation oldOp, ApiOperation newOp, List changes) + { + var oldEntity = FirstEntityName(oldOp.RequestBody?.Content); + var newEntity = FirstEntityName(newOp.RequestBody?.Content); + + if (oldEntity is null && newEntity is not null) + { + changes.Add(new SpecChange + { + Category = ChangeCategory.RequestBody, + Severity = (newOp.RequestBody?.Required ?? false) ? ChangeSeverity.Breaking : ChangeSeverity.NonBreaking, + Description = $"Added request body `{newEntity}` to `{oldOp.OperationId}`", + Path = oldOp.OperationId + }); + } + else if (oldEntity is not null && newEntity is null) + { + changes.Add(new SpecChange + { + Category = ChangeCategory.RequestBody, + Severity = ChangeSeverity.Breaking, + Description = $"Removed request body from `{oldOp.OperationId}`", + Path = oldOp.OperationId + }); + } + else if (oldEntity is not null && newEntity is not null + && !string.Equals(oldEntity, newEntity, StringComparison.Ordinal)) + { + changes.Add(new SpecChange + { + Category = ChangeCategory.RequestBody, + Severity = ChangeSeverity.Breaking, + Description = $"Request body of `{oldOp.OperationId}` changed from `{oldEntity}` to `{newEntity}`", + Path = oldOp.OperationId + }); + } + } + + private static void DiffResponses(ApiOperation oldOp, ApiOperation newOp, List changes) + { + var oldStatuses = oldOp.Responses.Keys.ToHashSet(StringComparer.Ordinal); + var newStatuses = newOp.Responses.Keys.ToHashSet(StringComparer.Ordinal); + + foreach (var added in newStatuses.Except(oldStatuses).OrderBy(s => s, StringComparer.Ordinal)) + { + changes.Add(new SpecChange + { + Category = ChangeCategory.Response, + Severity = ChangeSeverity.NonBreaking, + Description = $"Added response `{added}` to `{oldOp.OperationId}`", + Path = $"{oldOp.OperationId}:{added}" + }); + } + + foreach (var removed in oldStatuses.Except(newStatuses).OrderBy(s => s, StringComparer.Ordinal)) + { + changes.Add(new SpecChange + { + Category = ChangeCategory.Response, + Severity = ChangeSeverity.Breaking, + Description = $"Removed response `{removed}` from `{oldOp.OperationId}`", + Path = $"{oldOp.OperationId}:{removed}" + }); + } + + foreach (var status in oldStatuses.Intersect(newStatuses).OrderBy(s => s, StringComparer.Ordinal)) + { + var oldEntity = FirstEntityName(oldOp.Responses[status].Content); + var newEntity = FirstEntityName(newOp.Responses[status].Content); + if (oldEntity is not null && newEntity is not null + && !string.Equals(oldEntity, newEntity, StringComparison.Ordinal)) + { + changes.Add(new SpecChange + { + Category = ChangeCategory.Response, + Severity = ChangeSeverity.Breaking, + Description = $"Response `{status}` of `{oldOp.OperationId}` changed from `{oldEntity}` to `{newEntity}`", + Path = $"{oldOp.OperationId}:{status}" + }); + } + } + } + + private static void DiffEntities(SemanticModel oldModel, SemanticModel newModel, List changes) + { + var oldEntities = oldModel.Entities.Values + .GroupBy(e => e.Name, StringComparer.Ordinal) + .ToDictionary(g => g.Key, g => g.First(), StringComparer.Ordinal); + var newEntities = newModel.Entities.Values + .GroupBy(e => e.Name, StringComparer.Ordinal) + .ToDictionary(g => g.Key, g => g.First(), StringComparer.Ordinal); + + foreach (var added in newEntities.Keys.Except(oldEntities.Keys, StringComparer.Ordinal).OrderBy(k => k, StringComparer.Ordinal)) + { + changes.Add(new SpecChange + { + Category = ChangeCategory.Schema, + Severity = ChangeSeverity.NonBreaking, + Description = $"Added schema `{added}`", + Path = added + }); + } + + foreach (var removed in oldEntities.Keys.Except(newEntities.Keys, StringComparer.Ordinal).OrderBy(k => k, StringComparer.Ordinal)) + { + changes.Add(new SpecChange + { + Category = ChangeCategory.Schema, + Severity = ChangeSeverity.Breaking, + Description = $"Removed schema `{removed}`", + Path = removed + }); + } + + foreach (var name in oldEntities.Keys.Intersect(newEntities.Keys, StringComparer.Ordinal).OrderBy(k => k, StringComparer.Ordinal)) + { + DiffEntityProperties(oldEntities[name], newEntities[name], changes); + } + } + + private static void DiffEntityProperties(SemanticEntity oldEntity, SemanticEntity newEntity, List changes) + { + var oldProps = oldEntity.Properties + .GroupBy(p => p.Name, StringComparer.Ordinal) + .ToDictionary(g => g.Key, g => g.First(), StringComparer.Ordinal); + var newProps = newEntity.Properties + .GroupBy(p => p.Name, StringComparer.Ordinal) + .ToDictionary(g => g.Key, g => g.First(), StringComparer.Ordinal); + + foreach (var added in newProps.Keys.Except(oldProps.Keys, StringComparer.Ordinal).OrderBy(k => k, StringComparer.Ordinal)) + { + var prop = newProps[added]; + changes.Add(new SpecChange + { + Category = ChangeCategory.Schema, + Severity = prop.IsRequired ? ChangeSeverity.Breaking : ChangeSeverity.NonBreaking, + Description = $"Added {(prop.IsRequired ? "required" : "optional")} property `{oldEntity.Name}.{added}`", + Path = $"{oldEntity.Name}.{added}" + }); + } + + foreach (var removed in oldProps.Keys.Except(newProps.Keys, StringComparer.Ordinal).OrderBy(k => k, StringComparer.Ordinal)) + { + changes.Add(new SpecChange + { + Category = ChangeCategory.Schema, + Severity = ChangeSeverity.Breaking, + Description = $"Removed property `{oldEntity.Name}.{removed}`", + Path = $"{oldEntity.Name}.{removed}" + }); + } + + foreach (var name in oldProps.Keys.Intersect(newProps.Keys, StringComparer.Ordinal).OrderBy(k => k, StringComparer.Ordinal)) + { + var oldP = oldProps[name]; + var newP = newProps[name]; + + if (!string.Equals(oldP.Type.Name, newP.Type.Name, StringComparison.Ordinal)) + { + changes.Add(new SpecChange + { + Category = ChangeCategory.Schema, + Severity = ChangeSeverity.Breaking, + Description = $"Changed type of `{oldEntity.Name}.{name}` from `{oldP.Type.Name}` to `{newP.Type.Name}`", + Path = $"{oldEntity.Name}.{name}" + }); + } + + if (oldP.IsRequired != newP.IsRequired) + { + changes.Add(new SpecChange + { + Category = ChangeCategory.Schema, + Severity = newP.IsRequired ? ChangeSeverity.Breaking : ChangeSeverity.NonBreaking, + Description = $"Property `{oldEntity.Name}.{name}` is now {(newP.IsRequired ? "required" : "optional")} (was {(oldP.IsRequired ? "required" : "optional")})", + Path = $"{oldEntity.Name}.{name}" + }); + } + } + } + + private static string? FirstEntityName(IReadOnlyDictionary? content) + { + if (content is null || content.Count == 0) return null; + var first = content.OrderBy(c => c.Key, StringComparer.Ordinal).First().Value; + if (!string.IsNullOrEmpty(first.EntityName)) return first.EntityName; + if (!string.IsNullOrEmpty(first.Schema?.EntityName)) return first.Schema!.EntityName; + if (first.Schema?.Type == "array" && !string.IsNullOrEmpty(first.Schema.Items?.EntityName)) + { + return $"array<{first.Schema.Items!.EntityName}>"; + } + return null; + } + + private static string DescribeSchemaType(ApiMediaType? media) + { + if (media is null) return "unknown"; + if (!string.IsNullOrEmpty(media.EntityName)) return media.EntityName; + var schema = media.Schema; + if (schema is null) return "unknown"; + if (schema.Type == "array" && schema.Items is not null) + { + var inner = !string.IsNullOrEmpty(schema.Items.EntityName) ? schema.Items.EntityName : schema.Items.Type; + return $"array<{inner}>"; + } + if (!string.IsNullOrEmpty(schema.EntityName)) return schema.EntityName; + return string.IsNullOrEmpty(schema.Format) ? schema.Type : $"{schema.Type}({schema.Format})"; + } +} diff --git a/src/DocFlow.Documentation/DocFlow.Documentation.csproj b/src/DocFlow.Documentation/DocFlow.Documentation.csproj new file mode 100644 index 0000000..1745b45 --- /dev/null +++ b/src/DocFlow.Documentation/DocFlow.Documentation.csproj @@ -0,0 +1,28 @@ + + + + net8.0 + enable + enable + DocFlow.Documentation + latest + Generates navigable design-documentation bundles (Markdown + Mermaid diagrams) from the DocFlow canonical semantic model. + Kurt Mitchell + DocFlow + documentation;openapi;markdown;mermaid;ddd + + + + + + + + + + + + + + + + diff --git a/src/DocFlow.Documentation/Examples/ExampleSynthesizer.cs b/src/DocFlow.Documentation/Examples/ExampleSynthesizer.cs new file mode 100644 index 0000000..e8ff5c5 --- /dev/null +++ b/src/DocFlow.Documentation/Examples/ExampleSynthesizer.cs @@ -0,0 +1,141 @@ +using System.Text; +using DocFlow.Core.CanonicalModel; + +namespace DocFlow.Documentation.Examples; + +/// +/// Produces a JSON payload that illustrates an . +/// Prefers a spec-provided example () when available; +/// otherwise synthesizes a payload from the schema / referenced . +/// +/// Synthesis rules: +/// +/// string → first enum value if constrained, else "string" (or ISO-8601 placeholder for date-time). +/// integer/number0. +/// booleanfalse. +/// array → single-element array of the item type. +/// object/entity → all properties emitted; cycles terminate with "...". +/// +/// +public sealed class ExampleSynthesizer +{ + private readonly IReadOnlyDictionary _entitiesByName; + + public ExampleSynthesizer(SemanticModel model) + { + _entitiesByName = model.Entities.Values + .GroupBy(e => e.Name, StringComparer.Ordinal) + .ToDictionary(g => g.Key, g => g.First(), StringComparer.Ordinal); + } + + /// + /// Returns a JSON string illustrating , or null when nothing useful + /// can be produced (neither a spec example nor a recognisable schema). + /// + public string? Synthesize(ApiMediaType media) + { + if (!string.IsNullOrWhiteSpace(media.Example)) + { + return media.Example.Trim(); + } + + var visited = new HashSet(StringComparer.Ordinal); + + if (!string.IsNullOrEmpty(media.EntityName) && _entitiesByName.TryGetValue(media.EntityName, out var entity)) + { + return SynthesizeEntity(entity, visited); + } + + return media.Schema is null ? null : SynthesizeSchema(media.Schema, visited); + } + + private string SynthesizeEntity(SemanticEntity entity, HashSet visited) + { + if (!visited.Add(entity.Name)) + { + return "\"...\""; + } + + try + { + var sb = new StringBuilder(); + sb.Append('{'); + var first = true; + foreach (var property in entity.Properties + .OrderBy(p => p.Name, StringComparer.Ordinal)) + { + if (!first) sb.Append(", "); + first = false; + sb.Append($"\"{Escape(property.Name)}\": "); + sb.Append(SynthesizeProperty(property, visited)); + } + sb.Append('}'); + return sb.ToString(); + } + finally + { + visited.Remove(entity.Name); + } + } + + private string SynthesizeProperty(SemanticProperty property, HashSet visited) + { + // Collection navigation property → array of element type. + if (property.Type.IsCollection && property.Type.GenericArguments.Count > 0) + { + var elementName = property.Type.GenericArguments[0].Name; + return $"[{SynthesizeByTypeName(elementName, visited)}]"; + } + + return SynthesizeByTypeName(property.Type.Name, visited); + } + + private string SynthesizeByTypeName(string typeName, HashSet visited) + { + if (_entitiesByName.TryGetValue(typeName, out var entity)) + { + return SynthesizeEntity(entity, visited); + } + + return typeName.ToLowerInvariant() switch + { + "string" => "\"string\"", + "int" or "long" or "integer" or "short" or "byte" => "0", + "decimal" or "double" or "float" or "number" => "0", + "bool" or "boolean" => "false", + "datetime" => "\"2026-01-01T00:00:00Z\"", + "dateonly" or "date" => "\"2026-01-01\"", + "timespan" or "time" => "\"00:00:00\"", + "guid" or "uuid" => "\"00000000-0000-0000-0000-000000000000\"", + _ => "\"string\"" + }; + } + + private string SynthesizeSchema(ApiSchema schema, HashSet visited) + { + // A schema that resolves to a named entity + if (!string.IsNullOrEmpty(schema.EntityName) && _entitiesByName.TryGetValue(schema.EntityName, out var entity)) + { + return SynthesizeEntity(entity, visited); + } + + return schema.Type.ToLowerInvariant() switch + { + "string" when schema.Enum.Count > 0 => $"\"{Escape(schema.Enum[0])}\"", + "string" when string.Equals(schema.Format, "date-time", StringComparison.OrdinalIgnoreCase) => "\"2026-01-01T00:00:00Z\"", + "string" when string.Equals(schema.Format, "date", StringComparison.OrdinalIgnoreCase) => "\"2026-01-01\"", + "string" when string.Equals(schema.Format, "uuid", StringComparison.OrdinalIgnoreCase) => "\"00000000-0000-0000-0000-000000000000\"", + "string" => "\"string\"", + "integer" => "0", + "number" => "0", + "boolean" => "false", + "array" when schema.Items is not null => $"[{SynthesizeSchema(schema.Items, visited)}]", + "array" => "[]", + "object" => "{}", + _ => "null" + }; + } + + private static string Escape(string value) => + value.Replace("\\", "\\\\").Replace("\"", "\\\""); +} diff --git a/src/DocFlow.Documentation/Html/Assets/theme.css b/src/DocFlow.Documentation/Html/Assets/theme.css new file mode 100644 index 0000000..1bacc26 --- /dev/null +++ b/src/DocFlow.Documentation/Html/Assets/theme.css @@ -0,0 +1,105 @@ +:root { + color-scheme: light dark; + --bg: #ffffff; + --fg: #24292f; + --muted: #57606a; + --accent: #0969da; + --border: #d0d7de; + --code-bg: #f6f8fa; + --sidebar-bg: #f6f8fa; + --sidebar-fg: #24292f; + --active-bg: #e7f3ff; +} + +@media (prefers-color-scheme: dark) { + :root { + --bg: #0d1117; + --fg: #e6edf3; + --muted: #7d8590; + --accent: #58a6ff; + --border: #30363d; + --code-bg: #161b22; + --sidebar-bg: #161b22; + --sidebar-fg: #e6edf3; + --active-bg: #1f3a60; + } +} + +* { box-sizing: border-box; } + +body { + margin: 0; + background: var(--bg); + color: var(--fg); + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Helvetica, Arial, sans-serif; + font-size: 16px; + line-height: 1.55; +} + +.layout { + display: flex; + min-height: 100vh; +} + +aside.sidebar { + width: 260px; + padding: 24px 16px; + background: var(--sidebar-bg); + color: var(--sidebar-fg); + border-right: 1px solid var(--border); + position: sticky; + top: 0; + height: 100vh; + overflow-y: auto; +} + +aside.sidebar h2 { + font-size: 14px; + text-transform: uppercase; + letter-spacing: 0.05em; + color: var(--muted); + margin: 16px 0 8px; +} + +aside.sidebar ul { list-style: none; padding-left: 0; margin: 0; } +aside.sidebar li { margin: 4px 0; } +aside.sidebar li ul { padding-left: 14px; } +aside.sidebar a { + color: var(--sidebar-fg); + text-decoration: none; + padding: 4px 6px; + border-radius: 4px; + display: block; +} +aside.sidebar a:hover { background: var(--active-bg); } +aside.sidebar a.active { background: var(--active-bg); color: var(--accent); font-weight: 600; } + +main.content { + flex: 1; + padding: 32px 48px; + max-width: 900px; +} + +h1, h2, h3, h4 { line-height: 1.25; } +h1 { border-bottom: 1px solid var(--border); padding-bottom: 8px; } +h2 { margin-top: 32px; border-bottom: 1px solid var(--border); padding-bottom: 6px; } + +a { color: var(--accent); text-decoration: none; } +a:hover { text-decoration: underline; } + +code, pre { font-family: "SFMono-Regular", Consolas, "Liberation Mono", Menlo, monospace; } +code { background: var(--code-bg); padding: 2px 4px; border-radius: 3px; font-size: 0.9em; } +pre { background: var(--code-bg); padding: 12px 16px; border-radius: 6px; overflow-x: auto; } +pre code { background: transparent; padding: 0; font-size: 0.9em; } +pre.mermaid { background: transparent; padding: 8px 0; text-align: center; } + +table { border-collapse: collapse; margin: 12px 0; width: 100%; } +th, td { border: 1px solid var(--border); padding: 6px 10px; text-align: left; } +th { background: var(--code-bg); } + +blockquote { + border-left: 4px solid var(--border); + margin: 0; + padding: 8px 16px; + color: var(--muted); +} diff --git a/src/DocFlow.Documentation/Html/StaticSiteRenderer.cs b/src/DocFlow.Documentation/Html/StaticSiteRenderer.cs new file mode 100644 index 0000000..528bc13 --- /dev/null +++ b/src/DocFlow.Documentation/Html/StaticSiteRenderer.cs @@ -0,0 +1,210 @@ +using System.Reflection; +using System.Text; +using System.Text.RegularExpressions; +using DocFlow.Documentation.Models; +using Markdig; +using MarkdigMarkdown = Markdig.Markdown; + +namespace DocFlow.Documentation.Html; + +/// +/// Converts a Markdown documentation bundle into a self-contained static HTML site. +/// The Markdown is rendered with Markdig; Mermaid fences (```mermaid ... ```) are +/// rewritten to <pre class="mermaid"> so Mermaid.js picks them up; intra-bundle +/// .md links are rewritten to .html. A sidebar nav is built from the file tree +/// with the current page highlighted. +/// +public sealed class StaticSiteRenderer +{ + private const string MermaidCdn = "https://cdn.jsdelivr.net/npm/mermaid@10/dist/mermaid.min.js"; + + private static readonly MarkdownPipeline Pipeline = new MarkdownPipelineBuilder() + .UseAdvancedExtensions() + .Build(); + + private static readonly Regex MdHrefPattern = new( + @"(href=""[^""]*?)\.md(#[^""]*)?""", + RegexOptions.Compiled); + + public IReadOnlyList Render(IReadOnlyList markdownBundle) + { + var markdownFiles = markdownBundle + .Where(f => f.RelativePath.EndsWith(".md", StringComparison.Ordinal)) + .OrderBy(f => f.RelativePath, StringComparer.Ordinal) + .ToList(); + + var navItems = BuildNavItems(markdownFiles); + + var output = new List(); + + foreach (var file in markdownBundle) + { + if (file.RelativePath.EndsWith(".md", StringComparison.Ordinal)) + { + output.Add(RenderPage(file, navItems)); + } + else + { + output.Add(file); + } + } + + output.Add(new GeneratedFile("assets/theme.css", LoadEmbeddedCss(), "text/css")); + + return output + .OrderBy(f => f.RelativePath, StringComparer.Ordinal) + .ToList(); + } + + private static GeneratedFile RenderPage(GeneratedFile markdown, IReadOnlyList navItems) + { + // Markdig's advanced-diagrams extension renders ```mermaid fences as + //
, which Mermaid.js auto-initialises on load. + var htmlBody = MarkdigMarkdown.ToHtml(markdown.Content, Pipeline); + + // Rewrite intra-bundle .md links to .html (preserving any fragment). + htmlBody = MdHrefPattern.Replace(htmlBody, m => + $"{m.Groups[1].Value}.html{m.Groups[2].Value}\""); + + var htmlPath = markdown.RelativePath[..^3] + ".html"; + var relativeRoot = RelativeRootFor(htmlPath); + var title = ExtractTitle(markdown.Content) ?? Path.GetFileNameWithoutExtension(htmlPath); + var sidebar = RenderSidebar(navItems, htmlPath, relativeRoot); + + var sb = new StringBuilder(); + sb.Append("\n"); + sb.Append("\n"); + sb.Append("\n"); + sb.Append(" \n"); + sb.Append(" \n"); + sb.Append($" {HtmlEncode(title)}\n"); + sb.Append($" \n"); + sb.Append("\n"); + sb.Append("\n"); + sb.Append("
\n"); + sb.Append("\n"); + sb.Append("
\n"); + sb.Append(htmlBody); + sb.Append("
\n"); + sb.Append("
\n"); + sb.Append($"\n"); + sb.Append("\n"); + sb.Append("\n"); + sb.Append("\n"); + + return new GeneratedFile(htmlPath, sb.ToString(), "text/html"); + } + + private static string RenderSidebar(IReadOnlyList items, string currentPath, string relativeRoot) + { + var sb = new StringBuilder(); + sb.Append("

Documentation

\n"); + sb.Append("
    \n"); + + foreach (var item in items) + { + if (item.Children.Count == 0) + { + RenderNavLink(sb, item, currentPath, relativeRoot); + } + else + { + sb.Append("
  • "); + sb.Append(HtmlEncode(item.Label)); + sb.Append("
      \n"); + foreach (var child in item.Children) + { + RenderNavLink(sb, child, currentPath, relativeRoot); + } + sb.Append("
  • \n"); + } + } + sb.Append("
\n"); + return sb.ToString(); + } + + private static void RenderNavLink(StringBuilder sb, NavItem item, string currentPath, string relativeRoot) + { + var cls = string.Equals(item.Path, currentPath, StringComparison.Ordinal) ? " class=\"active\"" : ""; + sb.Append($"
  • {HtmlEncode(item.Label)}
  • \n"); + } + + private static IReadOnlyList BuildNavItems(IReadOnlyList markdownFiles) + { + var items = new List(); + + // Top-level files first (index, overview, domain-model, architecture, security…). + foreach (var file in markdownFiles.Where(f => !f.RelativePath.Contains('/'))) + { + var htmlPath = file.RelativePath[..^3] + ".html"; + items.Add(new NavItem(LinkLabelFor(file.RelativePath), htmlPath, [])); + } + + // Group subdirectories (endpoints/, sequences/) under a parent node. + var grouped = markdownFiles + .Where(f => f.RelativePath.Contains('/')) + .GroupBy(f => f.RelativePath.Split('/')[0]) + .OrderBy(g => g.Key, StringComparer.Ordinal); + + foreach (var group in grouped) + { + var children = group + .OrderBy(f => f.RelativePath, StringComparer.Ordinal) + .Select(f => new NavItem( + LinkLabelFor(f.RelativePath), + f.RelativePath[..^3] + ".html", + [])) + .ToList(); + items.Add(new NavItem(Capitalize(group.Key), string.Empty, children)); + } + + return items; + } + + private static string LinkLabelFor(string relativePath) + { + var stem = Path.GetFileNameWithoutExtension(relativePath); + return string.Join(' ', stem.Split('-').Select(Capitalize)); + } + + private static string Capitalize(string token) => + string.IsNullOrEmpty(token) ? token : char.ToUpperInvariant(token[0]) + token[1..]; + + private static string RelativeRootFor(string path) + { + var depth = path.Count(c => c == '/'); + return depth == 0 ? "./" : string.Concat(Enumerable.Repeat("../", depth)); + } + + private static string? ExtractTitle(string markdown) + { + foreach (var line in markdown.Split('\n')) + { + var trimmed = line.TrimStart(); + if (trimmed.StartsWith("# ", StringComparison.Ordinal)) + { + return trimmed[2..].Trim(); + } + } + return null; + } + + private static string HtmlEncode(string value) => + System.Net.WebUtility.HtmlEncode(value); + + private static string LoadEmbeddedCss() + { + var assembly = typeof(StaticSiteRenderer).Assembly; + var name = assembly.GetManifestResourceNames() + .FirstOrDefault(n => n.EndsWith("theme.css", StringComparison.Ordinal)) + ?? throw new InvalidOperationException("theme.css embedded resource not found."); + using var stream = assembly.GetManifestResourceStream(name) + ?? throw new InvalidOperationException($"Could not load {name}."); + using var reader = new StreamReader(stream); + return reader.ReadToEnd(); + } + + private sealed record NavItem(string Label, string Path, IReadOnlyList Children); +} diff --git a/src/DocFlow.Documentation/Markdown/MarkdownDocumentationGenerator.cs b/src/DocFlow.Documentation/Markdown/MarkdownDocumentationGenerator.cs new file mode 100644 index 0000000..a6bbaa0 --- /dev/null +++ b/src/DocFlow.Documentation/Markdown/MarkdownDocumentationGenerator.cs @@ -0,0 +1,75 @@ +using DocFlow.Core.CanonicalModel; +using DocFlow.Diagrams.Mermaid; +using DocFlow.Documentation.Abstractions; +using DocFlow.Documentation.Markdown.Sections; +using DocFlow.Documentation.Models; +using DocFlow.Documentation.Options; + +namespace DocFlow.Documentation.Markdown; + +/// +/// Default : produces a Markdown bundle +/// (overview, domain model with Mermaid class diagram, per-group endpoint pages, index TOC). +/// +public sealed class MarkdownDocumentationGenerator : IDocumentationGenerator +{ + private readonly MermaidClassDiagramGenerator _classDiagramGenerator; + private readonly MermaidErDiagramGenerator _erDiagramGenerator; + private readonly MermaidSequenceDiagramGenerator _sequenceDiagramGenerator; + private readonly MermaidC4ContextGenerator _contextGenerator; + private readonly MermaidEndpointFlowchartGenerator _flowchartGenerator; + + public MarkdownDocumentationGenerator() + : this(new MermaidClassDiagramGenerator(), + new MermaidErDiagramGenerator(), + new MermaidSequenceDiagramGenerator(), + new MermaidC4ContextGenerator(), + new MermaidEndpointFlowchartGenerator()) + { + } + + public MarkdownDocumentationGenerator( + MermaidClassDiagramGenerator classDiagramGenerator, + MermaidErDiagramGenerator erDiagramGenerator, + MermaidSequenceDiagramGenerator sequenceDiagramGenerator, + MermaidC4ContextGenerator contextGenerator, + MermaidEndpointFlowchartGenerator flowchartGenerator) + { + _classDiagramGenerator = classDiagramGenerator; + _erDiagramGenerator = erDiagramGenerator; + _sequenceDiagramGenerator = sequenceDiagramGenerator; + _contextGenerator = contextGenerator; + _flowchartGenerator = flowchartGenerator; + } + + public async Task> GenerateAsync( + SemanticModel model, + DocumentationOptions options, + CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + var files = new List + { + OverviewSectionBuilder.Build(model, options), + await DomainModelSectionBuilder.BuildAsync(model, options, _classDiagramGenerator, _erDiagramGenerator, cancellationToken) + }; + + files.AddRange(ArchitectureSectionBuilder.Build(model, options, _contextGenerator)); + files.AddRange(SecuritySectionBuilder.Build(model, options)); + files.AddRange(EndpointSectionBuilder.Build(model, options, _sequenceDiagramGenerator, _flowchartGenerator)); + + // Index is built last so it can link every sibling file. + files.Add(IndexSectionBuilder.Build(model, options, files)); + + // Preserve the source spec inside the bundle so consumers have the original handy. + if (options.SourceSpec is not null) + { + files.Add(options.SourceSpec); + } + + return files + .OrderBy(f => f.RelativePath, StringComparer.Ordinal) + .ToList(); + } +} diff --git a/src/DocFlow.Documentation/Markdown/MarkdownWriter.cs b/src/DocFlow.Documentation/Markdown/MarkdownWriter.cs new file mode 100644 index 0000000..2a6cf0a --- /dev/null +++ b/src/DocFlow.Documentation/Markdown/MarkdownWriter.cs @@ -0,0 +1,43 @@ +using System.Text; + +namespace DocFlow.Documentation.Markdown; + +/// +/// Tiny helper on top of that enforces LF line endings and +/// guards against trailing whitespace so generated Markdown passes common linters. +/// +internal sealed class MarkdownWriter +{ + private readonly StringBuilder _buffer = new(); + + public void Line() => _buffer.Append('\n'); + + public void Line(string text) + { + if (!string.IsNullOrEmpty(text)) + { + _buffer.Append(text.TrimEnd()); + } + _buffer.Append('\n'); + } + + public void Heading(int level, string text) + { + _buffer.Append(new string('#', Math.Clamp(level, 1, 6))); + _buffer.Append(' '); + _buffer.Append(text.Trim()); + _buffer.Append('\n'); + } + + public void Raw(string text) + { + // Collapse Windows-style line endings while preserving internal structure. + foreach (var line in text.Replace("\r\n", "\n").Replace("\r", "\n").Split('\n')) + { + _buffer.Append(line.TrimEnd()); + _buffer.Append('\n'); + } + } + + public override string ToString() => _buffer.ToString(); +} diff --git a/src/DocFlow.Documentation/Markdown/Sections/ArchitectureSectionBuilder.cs b/src/DocFlow.Documentation/Markdown/Sections/ArchitectureSectionBuilder.cs new file mode 100644 index 0000000..a795a6d --- /dev/null +++ b/src/DocFlow.Documentation/Markdown/Sections/ArchitectureSectionBuilder.cs @@ -0,0 +1,63 @@ +using DocFlow.Core.CanonicalModel; +using DocFlow.Diagrams.Mermaid; +using DocFlow.Documentation.Models; +using DocFlow.Documentation.Options; + +namespace DocFlow.Documentation.Markdown.Sections; + +/// +/// Emits architecture.md (overview sentence + context diagram + server table) and a +/// standalone diagrams/context.mmd file. +/// +internal static class ArchitectureSectionBuilder +{ + public static IEnumerable Build( + SemanticModel model, + DocumentationOptions options, + MermaidC4ContextGenerator contextGenerator) + { + if ((options.Diagrams & DiagramKinds.Context) == 0) + { + return []; + } + + var api = model.Api; + var contextDiagram = contextGenerator.Generate(api); + + var writer = new MarkdownWriter(); + writer.Heading(1, "Architecture"); + writer.Line(); + + writer.Heading(2, "System Context"); + writer.Line(); + writer.Line("```mermaid"); + writer.Raw(contextDiagram); + writer.Line("```"); + writer.Line(); + + writer.Heading(2, "Deployments"); + writer.Line(); + + if (api is null || api.Servers.Count == 0) + { + writer.Line("_No servers declared._"); + writer.Line(); + } + else + { + writer.Line("| URL | Description |"); + writer.Line("| --- | --- |"); + foreach (var server in api.Servers.OrderBy(s => s.Url, StringComparer.Ordinal)) + { + writer.Line($"| `{server.Url}` | {server.Description ?? ""} |"); + } + writer.Line(); + } + + return + [ + new GeneratedFile("architecture.md", writer.ToString(), "text/markdown"), + new GeneratedFile("diagrams/context.mmd", contextDiagram, "text/vnd.mermaid") + ]; + } +} diff --git a/src/DocFlow.Documentation/Markdown/Sections/DomainModelSectionBuilder.cs b/src/DocFlow.Documentation/Markdown/Sections/DomainModelSectionBuilder.cs new file mode 100644 index 0000000..3c2bcd3 --- /dev/null +++ b/src/DocFlow.Documentation/Markdown/Sections/DomainModelSectionBuilder.cs @@ -0,0 +1,99 @@ +using DocFlow.Core.CanonicalModel; +using DocFlow.Diagrams.Mermaid; +using DocFlow.Documentation.Models; +using DocFlow.Documentation.Options; + +namespace DocFlow.Documentation.Markdown.Sections; + +/// +/// Emits domain-model.md: a Mermaid class diagram followed by an entity summary table. +/// +internal static class DomainModelSectionBuilder +{ + public static async Task BuildAsync( + SemanticModel model, + DocumentationOptions options, + MermaidClassDiagramGenerator classDiagramGenerator, + MermaidErDiagramGenerator erDiagramGenerator, + CancellationToken cancellationToken) + { + var writer = new MarkdownWriter(); + writer.Heading(1, "Domain Model"); + writer.Line(); + + if (model.Entities.Count == 0) + { + writer.Line("_No entities discovered._"); + writer.Line(); + return new GeneratedFile("domain-model.md", writer.ToString(), "text/markdown"); + } + + var deterministicModel = SortEntitiesForDeterminism(model); + + if ((options.Diagrams & DiagramKinds.Class) != 0) + { + var diagram = await classDiagramGenerator.GenerateAsync(deterministicModel, options: null, cancellationToken); + + writer.Line("```mermaid"); + writer.Raw(diagram.Content ?? string.Empty); + writer.Line("```"); + writer.Line(); + } + + if ((options.Diagrams & DiagramKinds.Er) != 0) + { + var er = erDiagramGenerator.Generate(deterministicModel); + writer.Line("```mermaid"); + writer.Raw(er); + writer.Line("```"); + writer.Line(); + } + + writer.Heading(2, "Entities"); + writer.Line(); + writer.Line("| Entity | Stereotype | Properties |"); + writer.Line("| --- | --- | --- |"); + foreach (var entity in model.Entities.Values.OrderBy(e => e.Name, StringComparer.Ordinal)) + { + var stereotype = entity.Classification == EntityClassification.Unknown + ? "" + : entity.Classification.ToString(); + // Anchor is inlined inside the cell so cross-links from endpoint pages resolve to + // this row without breaking table rendering. + writer.Line($"| `{entity.Name}` | {stereotype} | {entity.Properties.Count} |"); + } + writer.Line(); + + return new GeneratedFile("domain-model.md", writer.ToString(), "text/markdown"); + } + + /// + /// The upstream class-diagram generator iterates in + /// dictionary order. Reinsert entities sorted alphabetically so the rendered diagram is + /// stable across runs regardless of upstream insertion order. + /// + private static SemanticModel SortEntitiesForDeterminism(SemanticModel source) + { + var clone = new SemanticModel + { + Id = source.Id, + Name = source.Name, + Description = source.Description, + Version = source.Version, + Provenance = source.Provenance, + Api = source.Api + }; + + foreach (var entity in source.Entities.Values.OrderBy(e => e.Name, StringComparer.Ordinal)) + { + clone.Entities[entity.Id] = entity; + } + + clone.Relationships.AddRange(source.Relationships + .OrderBy(r => r.SourceEntityId, StringComparer.Ordinal) + .ThenBy(r => r.TargetEntityId, StringComparer.Ordinal) + .ThenBy(r => r.Type)); + + return clone; + } +} diff --git a/src/DocFlow.Documentation/Markdown/Sections/EndpointSectionBuilder.cs b/src/DocFlow.Documentation/Markdown/Sections/EndpointSectionBuilder.cs new file mode 100644 index 0000000..237e475 --- /dev/null +++ b/src/DocFlow.Documentation/Markdown/Sections/EndpointSectionBuilder.cs @@ -0,0 +1,349 @@ +using DocFlow.Core.CanonicalModel; +using DocFlow.Diagrams.Mermaid; +using DocFlow.Documentation.Examples; +using DocFlow.Documentation.Models; +using DocFlow.Documentation.Options; + +namespace DocFlow.Documentation.Markdown.Sections; + +/// +/// Emits endpoints/<group>.md: one page per tag (or per first path segment). +/// When is enabled, each operation's sequence diagram is +/// also embedded on the endpoint page and emitted as sequences/<operationId>.md. +/// +internal static class EndpointSectionBuilder +{ + private const string UntaggedBucket = "Untagged"; + + public static IEnumerable Build( + SemanticModel model, + DocumentationOptions options, + MermaidSequenceDiagramGenerator sequenceDiagramGenerator, + MermaidEndpointFlowchartGenerator flowchartGenerator) + { + var api = model.Api; + if (api is null || api.Operations.Count == 0) + { + return []; + } + + var sequenceEnabled = (options.Diagrams & DiagramKinds.Sequence) != 0; + var flowEnabled = (options.Diagrams & DiagramKinds.Flow) != 0; + var entityNames = model.Entities.Values + .Select(e => e.Name) + .ToHashSet(StringComparer.Ordinal); + var synthesizer = options.WithExamples ? new ExampleSynthesizer(model) : null; + + var groups = api.Operations + .GroupBy(op => ChooseGroup(op, options.GroupBy)) + .OrderBy(g => g.Key, StringComparer.Ordinal) + .ToList(); + + var files = new List(); + + foreach (var group in groups) + { + files.Add(BuildPage( + group.Key, + group.ToList(), + sequenceEnabled, + flowEnabled, + sequenceDiagramGenerator, + flowchartGenerator, + entityNames, + synthesizer)); + } + + if (sequenceEnabled) + { + foreach (var operation in api.Operations.OrderBy(o => o.OperationId, StringComparer.Ordinal)) + { + files.Add(BuildSequencePage(operation, sequenceDiagramGenerator)); + } + } + + return files; + } + + private static string ChooseGroup(ApiOperation op, GroupBy groupBy) + { + if (groupBy == GroupBy.Tag) + { + var firstTag = op.Tags.FirstOrDefault(t => !string.IsNullOrWhiteSpace(t)); + return firstTag ?? UntaggedBucket; + } + + // GroupBy.Path + var path = op.Path.Trim('/'); + if (string.IsNullOrEmpty(path)) return "root"; + var slash = path.IndexOf('/'); + return slash < 0 ? path : path[..slash]; + } + + private static GeneratedFile BuildPage( + string groupName, + List operations, + bool sequenceEnabled, + bool flowEnabled, + MermaidSequenceDiagramGenerator sequenceDiagramGenerator, + MermaidEndpointFlowchartGenerator flowchartGenerator, + HashSet entityNames, + ExampleSynthesizer? synthesizer) + { + var writer = new MarkdownWriter(); + writer.Heading(1, groupName); + writer.Line(); + + foreach (var op in operations.OrderBy(o => o.OperationId, StringComparer.Ordinal)) + { + WriteOperation( + writer, + op, + sequenceEnabled, + flowEnabled, + sequenceDiagramGenerator, + flowchartGenerator, + entityNames, + synthesizer); + } + + var slug = Slug.Kebab(groupName); + if (string.IsNullOrEmpty(slug)) slug = "untagged"; + return new GeneratedFile($"endpoints/{slug}.md", writer.ToString(), "text/markdown"); + } + + private static GeneratedFile BuildSequencePage(ApiOperation op, MermaidSequenceDiagramGenerator generator) + { + var writer = new MarkdownWriter(); + writer.Heading(1, $"`{op.Method.ToString().ToUpperInvariant()} {op.Path}`"); + writer.Line(); + writer.Line($"**Operation ID:** `{op.OperationId}`"); + writer.Line(); + writer.Line("```mermaid"); + writer.Raw(generator.Generate(op)); + writer.Line("```"); + writer.Line(); + + return new GeneratedFile($"sequences/{op.OperationId}.md", writer.ToString(), "text/markdown"); + } + + private static void WriteOperation( + MarkdownWriter writer, + ApiOperation op, + bool sequenceEnabled, + bool flowEnabled, + MermaidSequenceDiagramGenerator sequenceDiagramGenerator, + MermaidEndpointFlowchartGenerator flowchartGenerator, + HashSet entityNames, + ExampleSynthesizer? synthesizer) + { + writer.Heading(2, $"`{op.Method.ToString().ToUpperInvariant()} {op.Path}`"); + writer.Line(); + writer.Line($"**Operation ID:** `{op.OperationId}`"); + writer.Line(); + + if (!string.IsNullOrWhiteSpace(op.Summary)) + { + writer.Line(op.Summary.Trim()); + writer.Line(); + } + + if (!string.IsNullOrWhiteSpace(op.Description)) + { + writer.Line(op.Description.Trim()); + writer.Line(); + } + + if (op.Deprecated) + { + writer.Line("> **Deprecated.**"); + writer.Line(); + } + + WriteParameters(writer, op, entityNames); + WriteRequestBody(writer, op, entityNames); + WriteResponses(writer, op, entityNames); + + if (synthesizer is not null) + { + WriteExamples(writer, op, synthesizer); + } + + if (sequenceEnabled) + { + writer.Line("```mermaid"); + writer.Raw(sequenceDiagramGenerator.Generate(op)); + writer.Line("```"); + writer.Line(); + } + + if (flowEnabled) + { + writer.Line("```mermaid"); + writer.Raw(flowchartGenerator.Generate(op)); + writer.Line("```"); + writer.Line(); + } + } + + private static void WriteParameters(MarkdownWriter writer, ApiOperation op, HashSet entityNames) + { + if (op.Parameters.Count == 0) return; + + writer.Heading(3, "Parameters"); + writer.Line(); + writer.Line("| Name | In | Type | Required | Description |"); + writer.Line("| --- | --- | --- | --- | --- |"); + + foreach (var param in op.Parameters + .OrderBy(p => p.Location) + .ThenBy(p => p.Name, StringComparer.Ordinal)) + { + var type = DescribeSchema(param.Schema, entityNames); + var required = param.Required ? "yes" : "no"; + var description = (param.Description ?? "").Replace('|', '/').Replace('\n', ' ').Trim(); + writer.Line($"| `{param.Name}` | {param.Location.ToString().ToLowerInvariant()} | {type} | {required} | {description} |"); + } + writer.Line(); + } + + private static void WriteRequestBody(MarkdownWriter writer, ApiOperation op, HashSet entityNames) + { + if (op.RequestBody is null || op.RequestBody.Content.Count == 0) return; + + writer.Heading(3, "Request Body"); + writer.Line(); + + if (op.RequestBody.Required) + { + writer.Line("_Required._"); + writer.Line(); + } + + foreach (var kvp in op.RequestBody.Content.OrderBy(c => c.Key, StringComparer.Ordinal)) + { + writer.Line($"- `{kvp.Key}` → {DescribeMedia(kvp.Value, entityNames)}"); + } + writer.Line(); + } + + private static void WriteResponses(MarkdownWriter writer, ApiOperation op, HashSet entityNames) + { + if (op.Responses.Count == 0) return; + + writer.Heading(3, "Responses"); + writer.Line(); + writer.Line("| Status | Content-Type | Schema | Description |"); + writer.Line("| --- | --- | --- | --- |"); + + foreach (var kvp in op.Responses.OrderBy(r => r.Key, StringComparer.Ordinal)) + { + var response = kvp.Value; + var description = (response.Description ?? "").Replace('|', '/').Replace('\n', ' ').Trim(); + + if (response.Content.Count == 0) + { + writer.Line($"| `{kvp.Key}` | _none_ | _none_ | {description} |"); + continue; + } + + foreach (var content in response.Content.OrderBy(c => c.Key, StringComparer.Ordinal)) + { + writer.Line($"| `{kvp.Key}` | `{content.Key}` | {DescribeMedia(content.Value, entityNames)} | {description} |"); + } + } + writer.Line(); + } + + private static void WriteExamples(MarkdownWriter writer, ApiOperation op, ExampleSynthesizer synthesizer) + { + var requestBodyExample = op.RequestBody?.Content + .OrderBy(c => c.Key, StringComparer.Ordinal) + .Select(c => synthesizer.Synthesize(c.Value)) + .FirstOrDefault(e => !string.IsNullOrWhiteSpace(e)); + + var responseExample = op.Responses + .OrderBy(r => r.Key, StringComparer.Ordinal) + .Where(r => r.Key.StartsWith("2", StringComparison.Ordinal)) + .SelectMany(r => r.Value.Content.OrderBy(c => c.Key, StringComparer.Ordinal)) + .Select(c => synthesizer.Synthesize(c.Value)) + .FirstOrDefault(e => !string.IsNullOrWhiteSpace(e)); + + if (requestBodyExample is null && responseExample is null) return; + + writer.Heading(3, "Example Request/Response"); + writer.Line(); + + if (requestBodyExample is not null) + { + writer.Line("**Request:**"); + writer.Line(); + writer.Line("```json"); + writer.Raw(requestBodyExample); + writer.Line("```"); + writer.Line(); + } + + if (responseExample is not null) + { + writer.Line("**Response:**"); + writer.Line(); + writer.Line("```json"); + writer.Raw(responseExample); + writer.Line("```"); + writer.Line(); + } + } + + private static string DescribeMedia(ApiMediaType media, HashSet entityNames) + { + if (!string.IsNullOrEmpty(media.EntityName)) + { + return EntityLink(media.EntityName, entityNames); + } + + return DescribeSchema(media, entityNames); + } + + private static string DescribeSchema(ApiMediaType? media, HashSet entityNames) + { + if (media is null) return "`unknown`"; + + if (!string.IsNullOrEmpty(media.EntityName)) + { + return EntityLink(media.EntityName, entityNames); + } + + var schema = media.Schema; + if (schema is null) return "`unknown`"; + + return FormatSchema(schema, entityNames); + } + + private static string FormatSchema(ApiSchema schema, HashSet entityNames) + { + if (schema.Type == "array" && schema.Items is not null) + { + return $"array<{FormatSchema(schema.Items, entityNames)}>"; + } + if (!string.IsNullOrEmpty(schema.EntityName)) + { + return EntityLink(schema.EntityName, entityNames); + } + return string.IsNullOrEmpty(schema.Format) ? $"`{schema.Type}`" : $"`{schema.Type}({schema.Format})`"; + } + + /// + /// Renders a reference to a named entity. When the name matches a known + /// , emit a Markdown link to its anchor inside + /// domain-model.md; otherwise fall back to an inline code span. + /// + private static string EntityLink(string name, HashSet entityNames) + { + if (entityNames.Contains(name)) + { + return $"[`{name}`](../domain-model.md#entity-{Slug.Kebab(name)})"; + } + return $"`{name}`"; + } +} diff --git a/src/DocFlow.Documentation/Markdown/Sections/IndexSectionBuilder.cs b/src/DocFlow.Documentation/Markdown/Sections/IndexSectionBuilder.cs new file mode 100644 index 0000000..3e45684 --- /dev/null +++ b/src/DocFlow.Documentation/Markdown/Sections/IndexSectionBuilder.cs @@ -0,0 +1,59 @@ +using DocFlow.Core.CanonicalModel; +using DocFlow.Documentation.Models; +using DocFlow.Documentation.Options; + +namespace DocFlow.Documentation.Markdown.Sections; + +/// +/// Emits index.md: a TOC linking every other Markdown file in the bundle. +/// +internal static class IndexSectionBuilder +{ + public static GeneratedFile Build(SemanticModel model, DocumentationOptions options, IReadOnlyList siblings) + { + var writer = new MarkdownWriter(); + var title = options.Title ?? model.Api?.Title ?? model.Name ?? "API"; + + writer.Heading(1, $"{title} — Documentation"); + writer.Line(); + + var markdownFiles = siblings + .Where(f => f.RelativePath.EndsWith(".md", StringComparison.Ordinal) + && !f.RelativePath.Equals("index.md", StringComparison.Ordinal)) + .OrderBy(f => f.RelativePath, StringComparer.Ordinal) + .ToList(); + + var topLevel = markdownFiles.Where(f => !f.RelativePath.Contains('/')).ToList(); + var endpointPages = markdownFiles + .Where(f => f.RelativePath.StartsWith("endpoints/", StringComparison.Ordinal)) + .ToList(); + + foreach (var file in topLevel) + { + writer.Line($"- [{LinkLabel(file.RelativePath)}](./{file.RelativePath})"); + } + + if (endpointPages.Count > 0) + { + writer.Line("- Endpoints"); + foreach (var file in endpointPages) + { + writer.Line($" - [{LinkLabel(file.RelativePath)}](./{file.RelativePath})"); + } + } + + writer.Line(); + return new GeneratedFile("index.md", writer.ToString(), "text/markdown"); + } + + private static string LinkLabel(string relativePath) + { + var fileName = relativePath.Split('/').Last(); + var stem = fileName.EndsWith(".md", StringComparison.Ordinal) ? fileName[..^3] : fileName; + // Turn "domain-model" → "Domain Model", "pet" → "Pet" + return string.Join(' ', stem.Split('-').Select(Capitalize)); + } + + private static string Capitalize(string token) => + string.IsNullOrEmpty(token) ? token : char.ToUpperInvariant(token[0]) + token[1..]; +} diff --git a/src/DocFlow.Documentation/Markdown/Sections/OverviewSectionBuilder.cs b/src/DocFlow.Documentation/Markdown/Sections/OverviewSectionBuilder.cs new file mode 100644 index 0000000..77d2484 --- /dev/null +++ b/src/DocFlow.Documentation/Markdown/Sections/OverviewSectionBuilder.cs @@ -0,0 +1,80 @@ +using DocFlow.Core.CanonicalModel; +using DocFlow.Documentation.Models; +using DocFlow.Documentation.Options; + +namespace DocFlow.Documentation.Markdown.Sections; + +/// +/// Emits overview.md: API title, version, description, servers table, and auth summary. +/// +internal static class OverviewSectionBuilder +{ + public static GeneratedFile Build(SemanticModel model, DocumentationOptions options) + { + var api = model.Api; + var writer = new MarkdownWriter(); + + var title = options.Title ?? api?.Title ?? model.Name ?? "API"; + writer.Heading(1, $"{title} — Overview"); + writer.Line(); + + if (!string.IsNullOrWhiteSpace(api?.Version)) + { + writer.Line($"**Version:** {api.Version}"); + writer.Line(); + } + + if (!string.IsNullOrWhiteSpace(api?.Description)) + { + writer.Line(api.Description.Trim()); + writer.Line(); + } + + WriteServers(writer, api); + WriteAuthentication(writer, api); + + return new GeneratedFile("overview.md", writer.ToString(), "text/markdown"); + } + + private static void WriteServers(MarkdownWriter writer, ApiSurface? api) + { + writer.Heading(2, "Servers"); + writer.Line(); + + if (api is null || api.Servers.Count == 0) + { + writer.Line("_No servers declared._"); + writer.Line(); + return; + } + + writer.Line("| URL | Description |"); + writer.Line("| --- | --- |"); + foreach (var server in api.Servers.OrderBy(s => s.Url, StringComparer.Ordinal)) + { + writer.Line($"| `{server.Url}` | {server.Description ?? ""} |"); + } + writer.Line(); + } + + private static void WriteAuthentication(MarkdownWriter writer, ApiSurface? api) + { + writer.Heading(2, "Authentication"); + writer.Line(); + + if (api is null || api.SecuritySchemes.Count == 0) + { + writer.Line("_No authentication configured._"); + writer.Line(); + return; + } + + writer.Line("| Scheme | Type |"); + writer.Line("| --- | --- |"); + foreach (var kvp in api.SecuritySchemes.OrderBy(k => k.Key, StringComparer.Ordinal)) + { + writer.Line($"| `{kvp.Key}` | {kvp.Value.Type} |"); + } + writer.Line(); + } +} diff --git a/src/DocFlow.Documentation/Markdown/Sections/SecuritySectionBuilder.cs b/src/DocFlow.Documentation/Markdown/Sections/SecuritySectionBuilder.cs new file mode 100644 index 0000000..4e1ddc4 --- /dev/null +++ b/src/DocFlow.Documentation/Markdown/Sections/SecuritySectionBuilder.cs @@ -0,0 +1,177 @@ +using System.Text; +using DocFlow.Core.CanonicalModel; +using DocFlow.Documentation.Models; +using DocFlow.Documentation.Options; + +namespace DocFlow.Documentation.Markdown.Sections; + +/// +/// Emits security.md — a scheme table, a Mermaid sequence diagram per OAuth2 flow, and a +/// cross-reference linking each operation to the schemes it requires. +/// Produces an empty list when the model declares no security schemes and no operation declares +/// security requirements. +/// +internal static class SecuritySectionBuilder +{ + public static IEnumerable Build(SemanticModel model, DocumentationOptions options) + { + var api = model.Api; + if (api is null) return []; + + if (api.SecuritySchemes.Count == 0 && api.Operations.All(o => o.SecurityRequirements.Count == 0)) + { + return []; + } + + var writer = new MarkdownWriter(); + writer.Heading(1, "Security"); + writer.Line(); + + WriteSchemeTable(writer, api); + WriteOAuthFlowDiagrams(writer, api); + WritePerOperationTable(writer, api); + + return [new GeneratedFile("security.md", writer.ToString(), "text/markdown")]; + } + + private static void WriteSchemeTable(MarkdownWriter writer, ApiSurface api) + { + writer.Heading(2, "Schemes"); + writer.Line(); + + if (api.SecuritySchemes.Count == 0) + { + writer.Line("_No security schemes declared, but some operations reference unknown schemes._"); + writer.Line(); + return; + } + + writer.Line("| Scheme | Type | Details |"); + writer.Line("| --- | --- | --- |"); + foreach (var kvp in api.SecuritySchemes.OrderBy(k => k.Key, StringComparer.Ordinal)) + { + writer.Line($"| `{kvp.Key}` | {kvp.Value.Type} | {DescribeScheme(kvp.Value)} |"); + } + writer.Line(); + } + + private static void WriteOAuthFlowDiagrams(MarkdownWriter writer, ApiSurface api) + { + var oauth = api.SecuritySchemes + .Where(kvp => kvp.Value.Type == ApiSecuritySchemeType.OAuth2 && kvp.Value.Flows.Count > 0) + .OrderBy(kvp => kvp.Key, StringComparer.Ordinal) + .ToList(); + + if (oauth.Count == 0) return; + + writer.Heading(2, "OAuth2 Flows"); + writer.Line(); + + foreach (var (name, scheme) in oauth) + { + foreach (var (flowName, flow) in scheme.Flows.OrderBy(f => f.Key, StringComparer.Ordinal)) + { + writer.Heading(3, $"`{name}` — {flowName}"); + writer.Line(); + writer.Line("```mermaid"); + writer.Raw(RenderFlowSequence(flowName, flow)); + writer.Line("```"); + writer.Line(); + + if (flow.Scopes.Count > 0) + { + writer.Line("**Scopes:**"); + writer.Line(); + foreach (var scope in flow.Scopes.OrderBy(s => s.Key, StringComparer.Ordinal)) + { + writer.Line($"- `{scope.Key}` — {scope.Value}"); + } + writer.Line(); + } + } + } + } + + private static void WritePerOperationTable(MarkdownWriter writer, ApiSurface api) + { + var secured = api.Operations + .Where(o => o.SecurityRequirements.Count > 0) + .OrderBy(o => o.OperationId, StringComparer.Ordinal) + .ToList(); + + if (secured.Count == 0) return; + + writer.Heading(2, "Per-operation requirements"); + writer.Line(); + writer.Line("| Operation | Method | Path | Schemes (scopes) |"); + writer.Line("| --- | --- | --- | --- |"); + + foreach (var op in secured) + { + var schemes = string.Join("; ", op.SecurityRequirements.Select(req => + string.Join(" + ", req.Schemes + .OrderBy(k => k.Key, StringComparer.Ordinal) + .Select(k => k.Value.Count == 0 ? k.Key : $"{k.Key} ({string.Join(", ", k.Value)})")))); + + writer.Line($"| `{op.OperationId}` | {op.Method.ToString().ToUpperInvariant()} | `{op.Path}` | {schemes} |"); + } + writer.Line(); + } + + private static string DescribeScheme(ApiSecurityScheme scheme) => scheme.Type switch + { + ApiSecuritySchemeType.ApiKey => $"in {scheme.In?.ToString().ToLowerInvariant() ?? "?"} as `{scheme.ParameterName ?? "?"}`", + ApiSecuritySchemeType.Http => $"scheme `{scheme.Scheme ?? "?"}`" + (string.IsNullOrEmpty(scheme.BearerFormat) ? "" : $", format `{scheme.BearerFormat}`"), + ApiSecuritySchemeType.OAuth2 => $"{scheme.Flows.Count} flow(s): {string.Join(", ", scheme.Flows.Keys.OrderBy(k => k, StringComparer.Ordinal))}", + ApiSecuritySchemeType.OpenIdConnect => $"discovery: {scheme.OpenIdConnectUrl ?? "?"}", + _ => scheme.Description ?? "" + }; + + private static string RenderFlowSequence(string flowName, ApiSecurityFlow flow) + { + var sb = new StringBuilder(); + sb.Append("sequenceDiagram\n"); + sb.Append(" participant Client\n"); + sb.Append(" participant Auth\n"); + sb.Append(" participant API\n"); + + switch (flowName) + { + case "authorizationCode": + sb.Append(" Client->>Auth: GET authorize\n"); + sb.Append(" Auth-->>Client: authorization code\n"); + sb.Append(" Client->>Auth: POST token (code)\n"); + sb.Append(" Auth-->>Client: access token\n"); + sb.Append(" Client->>API: request (bearer)\n"); + sb.Append(" API-->>Client: response\n"); + break; + case "implicit": + sb.Append(" Client->>Auth: GET authorize\n"); + sb.Append(" Auth-->>Client: access token (fragment)\n"); + sb.Append(" Client->>API: request (bearer)\n"); + sb.Append(" API-->>Client: response\n"); + break; + case "clientCredentials": + sb.Append(" Client->>Auth: POST token (client_credentials)\n"); + sb.Append(" Auth-->>Client: access token\n"); + sb.Append(" Client->>API: request (bearer)\n"); + sb.Append(" API-->>Client: response\n"); + break; + case "password": + sb.Append(" Client->>Auth: POST token (password grant)\n"); + sb.Append(" Auth-->>Client: access token\n"); + sb.Append(" Client->>API: request (bearer)\n"); + sb.Append(" API-->>Client: response\n"); + break; + default: + sb.Append(" Client->>Auth: acquire token\n"); + sb.Append(" Auth-->>Client: access token\n"); + sb.Append(" Client->>API: request (bearer)\n"); + sb.Append(" API-->>Client: response\n"); + break; + } + + _ = flow; // flow-specific URLs are shown in the scheme table, not the diagram. + return sb.ToString(); + } +} diff --git a/src/DocFlow.Documentation/Markdown/Slug.cs b/src/DocFlow.Documentation/Markdown/Slug.cs new file mode 100644 index 0000000..3de2259 --- /dev/null +++ b/src/DocFlow.Documentation/Markdown/Slug.cs @@ -0,0 +1,44 @@ +using System.Text; + +namespace DocFlow.Documentation.Markdown; + +/// +/// Deterministic slug utilities for filenames and cross-links. +/// +internal static class Slug +{ + /// + /// Convert an arbitrary display string (tag name, entity name, path segment) into a + /// lowercase kebab-case slug suitable for filenames and anchor ids. + /// + public static string Kebab(string value) + { + if (string.IsNullOrEmpty(value)) return string.Empty; + + var builder = new StringBuilder(value.Length); + var previousWasDash = false; + + for (var i = 0; i < value.Length; i++) + { + var ch = value[i]; + + if (char.IsLetterOrDigit(ch)) + { + // Insert a dash before a capital that follows a lowercase or digit (camelCase → kebab-case). + if (char.IsUpper(ch) && i > 0 && (char.IsLower(value[i - 1]) || char.IsDigit(value[i - 1])) && !previousWasDash) + { + builder.Append('-'); + } + builder.Append(char.ToLowerInvariant(ch)); + previousWasDash = false; + } + else if (!previousWasDash && builder.Length > 0) + { + builder.Append('-'); + previousWasDash = true; + } + } + + return builder.ToString().TrimEnd('-'); + } +} diff --git a/src/DocFlow.Documentation/Models/GeneratedFile.cs b/src/DocFlow.Documentation/Models/GeneratedFile.cs new file mode 100644 index 0000000..2f087d5 --- /dev/null +++ b/src/DocFlow.Documentation/Models/GeneratedFile.cs @@ -0,0 +1,12 @@ +namespace DocFlow.Documentation.Models; + +/// +/// A single file in a generated documentation bundle. +/// +/// Forward-slash path relative to the output root (e.g. endpoints/pet.md). +/// The file's text or binary content. +/// MIME type (e.g. text/markdown, application/json, text/vnd.mermaid). +public sealed record GeneratedFile( + string RelativePath, + string Content, + string MediaType); diff --git a/src/DocFlow.Documentation/Options/DiagramKinds.cs b/src/DocFlow.Documentation/Options/DiagramKinds.cs new file mode 100644 index 0000000..fa8a113 --- /dev/null +++ b/src/DocFlow.Documentation/Options/DiagramKinds.cs @@ -0,0 +1,16 @@ +namespace DocFlow.Documentation.Options; + +/// +/// Bitmask selecting which diagram kinds to emit in the documentation bundle. +/// +[Flags] +public enum DiagramKinds +{ + None = 0, + Class = 1 << 0, + Er = 1 << 1, + Sequence = 1 << 2, + Context = 1 << 3, + Flow = 1 << 4, + All = Class | Er | Sequence | Context | Flow +} diff --git a/src/DocFlow.Documentation/Options/DocumentationFormat.cs b/src/DocFlow.Documentation/Options/DocumentationFormat.cs new file mode 100644 index 0000000..9c7b3c5 --- /dev/null +++ b/src/DocFlow.Documentation/Options/DocumentationFormat.cs @@ -0,0 +1,11 @@ +namespace DocFlow.Documentation.Options; + +/// Output format for the documentation bundle. +public enum DocumentationFormat +{ + /// Plain Markdown files with embedded Mermaid fences. + Markdown, + + /// Static HTML site rendered from the Markdown bundle (Phase 4). + Html +} diff --git a/src/DocFlow.Documentation/Options/DocumentationOptions.cs b/src/DocFlow.Documentation/Options/DocumentationOptions.cs new file mode 100644 index 0000000..b4764dd --- /dev/null +++ b/src/DocFlow.Documentation/Options/DocumentationOptions.cs @@ -0,0 +1,31 @@ +using DocFlow.Documentation.Models; + +namespace DocFlow.Documentation.Options; + +/// +/// Options controlling how a documentation bundle is generated. +/// +public sealed record DocumentationOptions +{ + /// Output format. Defaults to . + public DocumentationFormat Format { get; init; } = DocumentationFormat.Markdown; + + /// Diagram kinds to emit. Phase 1 default is . + public DiagramKinds Diagrams { get; init; } = DiagramKinds.Class; + + /// When true, emit Example Request/Response blocks on endpoint pages (Phase 3). + public bool WithExamples { get; init; } + + /// How to group endpoint pages. Defaults to . + public GroupBy GroupBy { get; init; } = GroupBy.Tag; + + /// Override the API title rendered in the bundle. Null means "use the spec title". + public string? Title { get; init; } + + /// + /// Optional raw source spec to include in the bundle under assets/. + /// Callers (the CLI) supply the original bytes so the spec is preserved verbatim. + /// The generator emits this file as-is without parsing or re-encoding. + /// + public GeneratedFile? SourceSpec { get; init; } +} diff --git a/src/DocFlow.Documentation/Options/GroupBy.cs b/src/DocFlow.Documentation/Options/GroupBy.cs new file mode 100644 index 0000000..858c02e --- /dev/null +++ b/src/DocFlow.Documentation/Options/GroupBy.cs @@ -0,0 +1,11 @@ +namespace DocFlow.Documentation.Options; + +/// How endpoint pages are grouped. +public enum GroupBy +{ + /// Group by the operation's first tag (fallback: Untagged). + Tag, + + /// Group by the first segment of the operation's URL path. + Path +} diff --git a/src/DocFlow.Integration/Schemas/OpenApi/OpenApiParser.cs b/src/DocFlow.Integration/Schemas/OpenApi/OpenApiParser.cs index a132b62..5ebfc0d 100644 --- a/src/DocFlow.Integration/Schemas/OpenApi/OpenApiParser.cs +++ b/src/DocFlow.Integration/Schemas/OpenApi/OpenApiParser.cs @@ -11,8 +11,48 @@ namespace DocFlow.Integration.Schemas.OpenApi; /// /// Parses OpenAPI 3.x specifications into the semantic model. /// -public sealed class OpenApiParser : ISchemaParser +public sealed class OpenApiParser : ISchemaParser, IApiSpecParser { + // IApiSpecParser members (Phase 5: pluggable spec parsing). + + string IApiSpecParser.Name => "OpenAPI"; + + bool IApiSpecParser.CanParse(string? path, string? content) + { + if (!string.IsNullOrEmpty(path)) + { + var ext = Path.GetExtension(path).ToLowerInvariant(); + if (ext is ".json" or ".yaml" or ".yml") return true; + } + + if (!string.IsNullOrEmpty(content)) + { + return content.Contains("openapi", StringComparison.Ordinal) + || content.Contains("swagger", StringComparison.Ordinal); + } + + return false; + } + + async Task IApiSpecParser.ParseAsync(Stream input, CancellationToken cancellationToken) + { + using var reader = new StreamReader(input, leaveOpen: true); + var content = await reader.ReadToEndAsync(cancellationToken); + + var result = await ParseSchemaAsync( + ParserInput.FromContent(content), + options: null, + cancellationToken); + + if (!result.Success) + { + var errors = string.Join("; ", result.Errors.Select(e => $"{e.Code}: {e.Message}")); + throw new FormatException($"Failed to parse OpenAPI spec. {errors}"); + } + + return result.Model; + } + private readonly ILogger? _logger; public OpenApiParser(ILogger? logger = null) @@ -144,7 +184,10 @@ public async Task ParseSchemaAsync( // Extract authentication var authConfig = ExtractAuthConfig(document); - + + // Populate the canonical ApiSurface (additive — legacy ApiEndpoint list remains available). + model.Api = BuildApiSurface(document, model); + _logger?.LogInformation( "Parsed OpenAPI spec: {EntityCount} entities, {EndpointCount} endpoints", model.Entities.Count, endpoints.Count); @@ -282,7 +325,7 @@ private ApiEndpoint ParseEndpoint( // Parse path parameters foreach (var param in operation.Parameters.Where(p => p.In == ParameterLocation.Path)) { - endpoint.PathParameters.Add(new ApiParameter + endpoint.PathParameters.Add(new Models.ApiParameter { Name = param.Name, Type = param.Schema != null ? MapSchemaType(param.Schema) : SemanticType.String, @@ -290,11 +333,11 @@ private ApiEndpoint ParseEndpoint( Description = param.Description }); } - + // Parse query parameters foreach (var param in operation.Parameters.Where(p => p.In == ParameterLocation.Query)) { - endpoint.QueryParameters.Add(new ApiParameter + endpoint.QueryParameters.Add(new Models.ApiParameter { Name = param.Name, Type = param.Schema != null ? MapSchemaType(param.Schema) : SemanticType.String, @@ -406,10 +449,339 @@ private static bool ShouldIncludeEndpoint(string path, List? filters) private static string ToPascalCase(string name) { if (string.IsNullOrEmpty(name)) return name; - + // Handle snake_case and kebab-case var parts = name.Split(new[] { '_', '-' }, StringSplitOptions.RemoveEmptyEntries); - return string.Concat(parts.Select(p => + return string.Concat(parts.Select(p => char.ToUpperInvariant(p[0]) + p[1..].ToLowerInvariant())); } + + // --------------------------------------------------------------------- + // ApiSurface population. Additive layer over the existing parse. + // --------------------------------------------------------------------- + + private static Core.CanonicalModel.ApiSurface BuildApiSurface(OpenApiDocument document, SemanticModel model) + { + var knownEntityNames = model.Entities.Values.Select(e => e.Name).ToHashSet(StringComparer.Ordinal); + + var operations = new List(); + foreach (var (path, pathItem) in document.Paths) + { + foreach (var (method, operation) in pathItem.Operations) + { + operations.Add(BuildOperation(path, method, operation, knownEntityNames)); + } + } + + var servers = document.Servers? + .Select(s => new Core.CanonicalModel.ApiServer { Url = s.Url ?? string.Empty, Description = s.Description }) + .ToList() ?? []; + + var tags = document.Tags? + .Select(t => new Core.CanonicalModel.ApiTag { Name = t.Name, Description = t.Description }) + .ToList() ?? []; + + var securitySchemes = new Dictionary(StringComparer.Ordinal); + if (document.Components?.SecuritySchemes is not null) + { + foreach (var (name, scheme) in document.Components.SecuritySchemes) + { + securitySchemes[name] = BuildSecurityScheme(name, scheme); + } + } + + var defaultRequirements = BuildSecurityRequirements(document.SecurityRequirements); + + return new Core.CanonicalModel.ApiSurface + { + Title = document.Info?.Title ?? "API", + Version = document.Info?.Version ?? "0.0.0", + Description = document.Info?.Description, + Servers = servers, + Tags = tags, + Operations = operations, + SecuritySchemes = securitySchemes, + SecurityRequirements = defaultRequirements + }; + } + + private static Core.CanonicalModel.ApiOperation BuildOperation( + string path, + OperationType method, + OpenApiOperation operation, + HashSet knownEntityNames) + { + var canonicalMethod = MapOperationType(method); + var operationId = !string.IsNullOrWhiteSpace(operation.OperationId) + ? operation.OperationId + : GenerateOperationId(canonicalMethod, path); + + var parameters = operation.Parameters? + .Select(p => BuildParameter(p, knownEntityNames)) + .ToList() ?? []; + + Core.CanonicalModel.ApiRequestBody? requestBody = null; + if (operation.RequestBody is not null) + { + requestBody = new Core.CanonicalModel.ApiRequestBody + { + Description = operation.RequestBody.Description, + Required = operation.RequestBody.Required, + Content = BuildContentMap(operation.RequestBody.Content, knownEntityNames) + }; + } + + var responses = new Dictionary(StringComparer.Ordinal); + if (operation.Responses is not null) + { + foreach (var (statusCode, response) in operation.Responses) + { + responses[statusCode] = new Core.CanonicalModel.ApiResponse + { + Description = response.Description ?? string.Empty, + Content = BuildContentMap(response.Content, knownEntityNames) + }; + } + } + + return new Core.CanonicalModel.ApiOperation + { + OperationId = operationId, + Method = canonicalMethod, + Path = path, + Summary = operation.Summary, + Description = operation.Description, + Tags = operation.Tags?.Select(t => t.Name).Where(n => !string.IsNullOrEmpty(n)).ToList() ?? [], + Parameters = parameters, + RequestBody = requestBody, + Responses = responses, + SecurityRequirements = BuildSecurityRequirements(operation.Security), + Deprecated = operation.Deprecated + }; + } + + private static Core.CanonicalModel.ApiParameter BuildParameter(OpenApiParameter param, HashSet knownEntityNames) + { + return new Core.CanonicalModel.ApiParameter + { + Name = param.Name, + Location = MapParameterLocation(param.In), + Description = param.Description, + Required = param.Required, + Deprecated = param.Deprecated, + Schema = param.Schema is null ? null : BuildMediaType(param.Schema, knownEntityNames) + }; + } + + private static IReadOnlyDictionary BuildContentMap( + IDictionary? content, + HashSet knownEntityNames) + { + if (content is null || content.Count == 0) + { + return new Dictionary(); + } + + var result = new Dictionary(StringComparer.Ordinal); + foreach (var (mediaType, mediaValue) in content) + { + if (mediaValue?.Schema is null) continue; + result[mediaType] = BuildMediaType(mediaValue.Schema, knownEntityNames, mediaValue); + } + return result; + } + + private static Core.CanonicalModel.ApiMediaType BuildMediaType( + OpenApiSchema schema, + HashSet knownEntityNames, + OpenApiMediaType? mediaTypeSource = null) + { + var example = SerializeExample(mediaTypeSource); + + // Direct named reference → link to the SemanticEntity by name. + if (schema.Reference is { Id: { Length: > 0 } refId } && knownEntityNames.Contains(refId)) + { + return new Core.CanonicalModel.ApiMediaType { EntityName = refId, Example = example }; + } + + // Array of named references → still an inline array schema, but Items carries the link. + return new Core.CanonicalModel.ApiMediaType + { + Schema = BuildSchema(schema, knownEntityNames), + Example = example + }; + } + + private static string? SerializeExample(OpenApiMediaType? mediaType) + { + var any = mediaType?.Example + ?? mediaType?.Examples? + .OrderBy(k => k.Key, StringComparer.Ordinal) + .Select(k => k.Value?.Value) + .FirstOrDefault(v => v is not null); + if (any is null) return null; + + using var sw = new StringWriter(); + var writer = new Microsoft.OpenApi.Writers.OpenApiJsonWriter(sw); + any.Write(writer, Microsoft.OpenApi.OpenApiSpecVersion.OpenApi3_0); + return sw.ToString(); + } + + private static Core.CanonicalModel.ApiSchema BuildSchema(OpenApiSchema schema, HashSet knownEntityNames) + { + var type = !string.IsNullOrEmpty(schema.Type) + ? schema.Type + : (schema.Reference is not null ? "object" : "string"); + + var enumValues = schema.Enum? + .Select(e => e is Microsoft.OpenApi.Any.OpenApiString s ? s.Value : e?.ToString() ?? string.Empty) + .Where(v => !string.IsNullOrEmpty(v)) + .ToList() ?? []; + + Core.CanonicalModel.ApiSchema? items = null; + if (schema.Items is not null) + { + items = BuildSchema(schema.Items, knownEntityNames); + } + + string? entityName = null; + if (schema.Reference is { Id: { Length: > 0 } refId } && knownEntityNames.Contains(refId)) + { + entityName = refId; + } + + return new Core.CanonicalModel.ApiSchema + { + Type = type, + Format = schema.Format, + Items = items, + Enum = enumValues, + Nullable = schema.Nullable, + EntityName = entityName + }; + } + + private static Core.CanonicalModel.ApiSecurityScheme BuildSecurityScheme(string name, OpenApiSecurityScheme scheme) + { + var flows = new Dictionary(StringComparer.Ordinal); + if (scheme.Type == SecuritySchemeType.OAuth2 && scheme.Flows is not null) + { + if (scheme.Flows.AuthorizationCode is not null) + flows["authorizationCode"] = MapFlow(scheme.Flows.AuthorizationCode); + if (scheme.Flows.ClientCredentials is not null) + flows["clientCredentials"] = MapFlow(scheme.Flows.ClientCredentials); + if (scheme.Flows.Implicit is not null) + flows["implicit"] = MapFlow(scheme.Flows.Implicit); + if (scheme.Flows.Password is not null) + flows["password"] = MapFlow(scheme.Flows.Password); + } + + return new Core.CanonicalModel.ApiSecurityScheme + { + Name = name, + Type = MapSecuritySchemeType(scheme.Type), + Description = scheme.Description, + In = scheme.Type == SecuritySchemeType.ApiKey ? MapParameterLocation(scheme.In) : null, + ParameterName = scheme.Type == SecuritySchemeType.ApiKey ? scheme.Name : null, + Scheme = scheme.Type == SecuritySchemeType.Http ? scheme.Scheme : null, + BearerFormat = scheme.Type == SecuritySchemeType.Http ? scheme.BearerFormat : null, + OpenIdConnectUrl = scheme.OpenIdConnectUrl?.ToString(), + Flows = flows + }; + } + + private static Core.CanonicalModel.ApiSecurityFlow MapFlow(OpenApiOAuthFlow flow) + { + var scopes = flow.Scopes?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value, StringComparer.Ordinal) + ?? new Dictionary(StringComparer.Ordinal); + + return new Core.CanonicalModel.ApiSecurityFlow + { + AuthorizationUrl = flow.AuthorizationUrl?.ToString(), + TokenUrl = flow.TokenUrl?.ToString(), + RefreshUrl = flow.RefreshUrl?.ToString(), + Scopes = scopes + }; + } + + private static IReadOnlyList BuildSecurityRequirements( + IList? requirements) + { + if (requirements is null || requirements.Count == 0) return []; + + var result = new List(); + foreach (var requirement in requirements) + { + var schemes = new Dictionary>(StringComparer.Ordinal); + foreach (var (schemeRef, scopes) in requirement) + { + var schemeName = schemeRef?.Reference?.Id; + if (string.IsNullOrEmpty(schemeName)) continue; + schemes[schemeName] = (scopes ?? (IList)Array.Empty()).ToList(); + } + if (schemes.Count > 0) + { + result.Add(new Core.CanonicalModel.ApiSecurityRequirement { Schemes = schemes }); + } + } + return result; + } + + private static Core.CanonicalModel.ApiHttpMethod MapOperationType(OperationType method) => method switch + { + OperationType.Get => Core.CanonicalModel.ApiHttpMethod.Get, + OperationType.Put => Core.CanonicalModel.ApiHttpMethod.Put, + OperationType.Post => Core.CanonicalModel.ApiHttpMethod.Post, + OperationType.Delete => Core.CanonicalModel.ApiHttpMethod.Delete, + OperationType.Options => Core.CanonicalModel.ApiHttpMethod.Options, + OperationType.Head => Core.CanonicalModel.ApiHttpMethod.Head, + OperationType.Patch => Core.CanonicalModel.ApiHttpMethod.Patch, + OperationType.Trace => Core.CanonicalModel.ApiHttpMethod.Trace, + _ => Core.CanonicalModel.ApiHttpMethod.Get + }; + + private static Core.CanonicalModel.ApiParameterLocation MapParameterLocation(ParameterLocation? location) => location switch + { + ParameterLocation.Query => Core.CanonicalModel.ApiParameterLocation.Query, + ParameterLocation.Header => Core.CanonicalModel.ApiParameterLocation.Header, + ParameterLocation.Path => Core.CanonicalModel.ApiParameterLocation.Path, + ParameterLocation.Cookie => Core.CanonicalModel.ApiParameterLocation.Cookie, + _ => Core.CanonicalModel.ApiParameterLocation.Query + }; + + private static Core.CanonicalModel.ApiSecuritySchemeType MapSecuritySchemeType(SecuritySchemeType type) => type switch + { + SecuritySchemeType.ApiKey => Core.CanonicalModel.ApiSecuritySchemeType.ApiKey, + SecuritySchemeType.Http => Core.CanonicalModel.ApiSecuritySchemeType.Http, + SecuritySchemeType.OAuth2 => Core.CanonicalModel.ApiSecuritySchemeType.OAuth2, + SecuritySchemeType.OpenIdConnect => Core.CanonicalModel.ApiSecuritySchemeType.OpenIdConnect, + _ => Core.CanonicalModel.ApiSecuritySchemeType.ApiKey + }; + + /// + /// Produce a deterministic operationId for operations that omit one. + /// Format: {method}_{path} with the path lowercased, non-alphanumerics replaced by _, + /// and runs of underscores collapsed. + /// + internal static string GenerateOperationId(Core.CanonicalModel.ApiHttpMethod method, string path) + { + var methodToken = method.ToString().ToLowerInvariant(); + var pathBuilder = new System.Text.StringBuilder(path.Length); + var previousWasUnderscore = false; + foreach (var ch in path) + { + if (char.IsLetterOrDigit(ch)) + { + pathBuilder.Append(char.ToLowerInvariant(ch)); + previousWasUnderscore = false; + } + else if (!previousWasUnderscore) + { + pathBuilder.Append('_'); + previousWasUnderscore = true; + } + } + var pathToken = pathBuilder.ToString().Trim('_'); + return string.IsNullOrEmpty(pathToken) ? methodToken : $"{methodToken}_{pathToken}"; + } } diff --git a/tests/DocFlow.CLI.Tests/DocFlow.CLI.Tests.csproj b/tests/DocFlow.CLI.Tests/DocFlow.CLI.Tests.csproj new file mode 100644 index 0000000..187c016 --- /dev/null +++ b/tests/DocFlow.CLI.Tests/DocFlow.CLI.Tests.csproj @@ -0,0 +1,33 @@ + + + + net8.0 + enable + enable + false + true + latest + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + diff --git a/tests/DocFlow.CLI.Tests/Integrate/DocsCommandTests.cs b/tests/DocFlow.CLI.Tests/Integrate/DocsCommandTests.cs new file mode 100644 index 0000000..095074e --- /dev/null +++ b/tests/DocFlow.CLI.Tests/Integrate/DocsCommandTests.cs @@ -0,0 +1,234 @@ +using System.Text; +using DocFlow.CLI; +using Spectre.Console; +using Xunit; + +namespace DocFlow.CLI.Tests.Integrate; + +/// +/// Exercises docflow integrate docs. Drives the command handler directly +/// () so assertions are quick and deterministic; +/// argument parsing is covered separately by . +/// +public class DocsCommandTests : IDisposable +{ + private const string PetstoreFixture = "Fixtures/petstore.json"; + + private readonly string _tempRoot = Path.Combine(Path.GetTempPath(), $"docflow-test-{Guid.NewGuid():N}"); + private readonly TextWriter _originalOut; + private readonly StringWriter _capturedOut; + private readonly IAnsiConsole _originalConsole; + + public DocsCommandTests() + { + Directory.CreateDirectory(_tempRoot); + _capturedOut = new StringWriter(); + _originalOut = Console.Out; + _originalConsole = AnsiConsole.Console; + + Console.SetOut(_capturedOut); + AnsiConsole.Console = AnsiConsole.Create(new AnsiConsoleSettings + { + Ansi = AnsiSupport.No, + ColorSystem = ColorSystemSupport.NoColors, + Out = new AnsiConsoleOutput(_capturedOut) + }); + } + + public void Dispose() + { + AnsiConsole.Console = _originalConsole; + Console.SetOut(_originalOut); + try { Directory.Delete(_tempRoot, recursive: true); } catch { } + } + + private string CapturedOutput => _capturedOut.ToString(); + + [Fact] + public async Task Cli_Docs_Petstore_WritesExpectedFiles() + { + var output = new DirectoryInfo(Path.Combine(_tempRoot, "out")); + + var exitCode = await Program.ExecuteDocsCommand( + spec: new FileInfo(PetstoreFixture), + output: output, + format: "markdown", + diagrams: "class", + withExamples: false, + groupBy: "tag", + title: null, + verbose: false); + + Assert.Equal(0, exitCode); + + string[] expected = + [ + "index.md", + "overview.md", + "domain-model.md", + "endpoints/pet.md", + "endpoints/store.md", + "assets/openapi.json" + ]; + + foreach (var relative in expected) + { + var path = Path.Combine(output.FullName, relative); + Assert.True(File.Exists(path), $"Expected {relative} to be written."); + Assert.NotEqual(0, new FileInfo(path).Length); + } + } + + [Fact] + public async Task Cli_Docs_CopiesSourceSpecToAssets_ByteIdentical() + { + var output = new DirectoryInfo(Path.Combine(_tempRoot, "out")); + + var exitCode = await Program.ExecuteDocsCommand( + spec: new FileInfo(PetstoreFixture), + output: output, + format: "markdown", + diagrams: "class", + withExamples: false, + groupBy: "tag", + title: null, + verbose: false); + + Assert.Equal(0, exitCode); + + var sourceBytes = await File.ReadAllBytesAsync(PetstoreFixture); + var copiedBytes = await File.ReadAllBytesAsync(Path.Combine(output.FullName, "assets/openapi.json")); + Assert.Equal(sourceBytes, copiedBytes); + } + + [Fact] + public async Task Cli_Docs_YamlSpec_CopiesAsYaml() + { + const string yaml = """ + openapi: 3.0.3 + info: + title: YAML Pet + version: '1.0' + paths: + /pets: + get: + operationId: listPets + responses: + '200': + description: ok + """; + + var yamlPath = Path.Combine(_tempRoot, "pet.yaml"); + await File.WriteAllTextAsync(yamlPath, yaml); + + var output = new DirectoryInfo(Path.Combine(_tempRoot, "out-yaml")); + var exitCode = await Program.ExecuteDocsCommand( + spec: new FileInfo(yamlPath), + output: output, + format: "markdown", + diagrams: "class", + withExamples: false, + groupBy: "tag", + title: null, + verbose: false); + + Assert.Equal(0, exitCode); + + var copied = Path.Combine(output.FullName, "assets/openapi.yaml"); + Assert.True(File.Exists(copied), "YAML spec should be preserved as .yaml, not converted."); + Assert.False(File.Exists(Path.Combine(output.FullName, "assets/openapi.json"))); + + Assert.Equal( + await File.ReadAllTextAsync(yamlPath), + await File.ReadAllTextAsync(copied)); + } + + [Fact] + public async Task Cli_Docs_MissingSpec_ReturnsExitCode1() + { + var exitCode = await Program.ExecuteDocsCommand( + spec: new FileInfo(Path.Combine(_tempRoot, "does-not-exist.json")), + output: new DirectoryInfo(Path.Combine(_tempRoot, "out")), + format: "markdown", + diagrams: "class", + withExamples: false, + groupBy: "tag", + title: null, + verbose: false); + + Assert.Equal(1, exitCode); + Assert.Contains("Spec file not found", CapturedOutput); + } + + [Fact] + public async Task Cli_Docs_InvalidOutputDir_ReturnsExitCode2() + { + // Create a file at the path we'll pass as the output directory. Directory.CreateDirectory + // on an existing file throws IOException, which the handler maps to exit code 2. + var collisionPath = Path.Combine(_tempRoot, "collision"); + await File.WriteAllTextAsync(collisionPath, "not a directory"); + + var exitCode = await Program.ExecuteDocsCommand( + spec: new FileInfo(PetstoreFixture), + output: new DirectoryInfo(collisionPath), + format: "markdown", + diagrams: "class", + withExamples: false, + groupBy: "tag", + title: null, + verbose: false); + + Assert.Equal(2, exitCode); + Assert.Contains("I/O error", CapturedOutput); + } + + [Fact] + public async Task Cli_Docs_HtmlFlag_WritesHtmlBundle() + { + var output = new DirectoryInfo(Path.Combine(_tempRoot, "out-html")); + + var exitCode = await Program.ExecuteDocsCommand( + spec: new FileInfo(PetstoreFixture), + output: output, + format: "html", + diagrams: "class", + withExamples: false, + groupBy: "tag", + title: null, + verbose: false); + + Assert.Equal(0, exitCode); + + // Every Markdown file has a parallel .html; theme.css ships alongside. + Assert.True(File.Exists(Path.Combine(output.FullName, "index.html"))); + Assert.True(File.Exists(Path.Combine(output.FullName, "overview.html"))); + Assert.True(File.Exists(Path.Combine(output.FullName, "domain-model.html"))); + Assert.True(File.Exists(Path.Combine(output.FullName, "endpoints/pet.html"))); + Assert.True(File.Exists(Path.Combine(output.FullName, "assets/theme.css"))); + } + + [Fact] + public async Task Cli_Docs_Verbose_PrintsFileList() + { + var output = new DirectoryInfo(Path.Combine(_tempRoot, "out")); + + var exitCode = await Program.ExecuteDocsCommand( + spec: new FileInfo(PetstoreFixture), + output: output, + format: "markdown", + diagrams: "class", + withExamples: false, + groupBy: "tag", + title: null, + verbose: true); + + Assert.Equal(0, exitCode); + + var captured = CapturedOutput; + Assert.Contains("wrote", captured); + Assert.Contains("index.md", captured); + Assert.Contains("overview.md", captured); + Assert.Contains("domain-model.md", captured); + Assert.Contains("endpoints/pet.md", captured); + } +} diff --git a/tests/DocFlow.CLI.Tests/Integrate/WatchModeTests.cs b/tests/DocFlow.CLI.Tests/Integrate/WatchModeTests.cs new file mode 100644 index 0000000..a71a3fe --- /dev/null +++ b/tests/DocFlow.CLI.Tests/Integrate/WatchModeTests.cs @@ -0,0 +1,111 @@ +using System.Text; +using DocFlow.CLI; +using Spectre.Console; +using Xunit; + +namespace DocFlow.CLI.Tests.Integrate; + +public class WatchModeTests : IDisposable +{ + private const string PetstoreFixture = "Fixtures/petstore.json"; + + private readonly string _tempRoot = Path.Combine(Path.GetTempPath(), $"docflow-watch-{Guid.NewGuid():N}"); + private readonly TextWriter _originalOut; + private readonly StringWriter _capturedOut; + private readonly IAnsiConsole _originalConsole; + + public WatchModeTests() + { + Directory.CreateDirectory(_tempRoot); + _capturedOut = new StringWriter(); + _originalOut = Console.Out; + _originalConsole = AnsiConsole.Console; + + Console.SetOut(_capturedOut); + AnsiConsole.Console = AnsiConsole.Create(new AnsiConsoleSettings + { + Ansi = AnsiSupport.No, + ColorSystem = ColorSystemSupport.NoColors, + Out = new AnsiConsoleOutput(_capturedOut) + }); + } + + public void Dispose() + { + AnsiConsole.Console = _originalConsole; + Console.SetOut(_originalOut); + try { Directory.Delete(_tempRoot, recursive: true); } catch { } + } + + [Fact] + public async Task Watch_RegeneratesOnFileChange() + { + // Set up a writable copy of the fixture so we can modify it. + var specPath = Path.Combine(_tempRoot, "spec.json"); + File.Copy(PetstoreFixture, specPath); + + var output = new DirectoryInfo(Path.Combine(_tempRoot, "out")); + var indexPath = Path.Combine(output.FullName, "index.md"); + + using var cts = new CancellationTokenSource(); + var watchTask = Program.RunWatchAsync( + spec: new FileInfo(specPath), + output: output, + format: "markdown", + diagrams: "class", + withExamples: false, + groupBy: "tag", + title: null, + verbose: false, + cancellationToken: cts.Token); + + try + { + // Wait for the initial build to complete. + Assert.True(await PollUntilAsync(() => File.Exists(indexPath), TimeSpan.FromSeconds(10)), + "Initial build did not produce index.md."); + + var initialMtime = File.GetLastWriteTimeUtc(indexPath); + + // FileSystemWatcher triggers on LastWrite; give the filesystem a moment so the mtime + // of the re-generated file is distinguishable from the initial one. + await Task.Delay(1_100); + + // Modify the spec. + var specContent = await File.ReadAllTextAsync(specPath); + specContent = specContent.Replace("\"Petstore API\"", "\"Petstore API (Modified)\""); + await File.WriteAllTextAsync(specPath, specContent); + + // Wait for the regeneration to update index.md. + var updated = await PollUntilAsync( + () => File.Exists(indexPath) && File.GetLastWriteTimeUtc(indexPath) > initialMtime, + TimeSpan.FromSeconds(10)); + + Assert.True(updated, "Bundle did not regenerate after spec change."); + + // The updated overview should reflect the new title. + var overview = await File.ReadAllTextAsync(Path.Combine(output.FullName, "overview.md")); + Assert.Contains("Petstore API (Modified)", overview); + } + finally + { + cts.Cancel(); + try { await watchTask; } catch { } + } + } + + private static async Task PollUntilAsync(Func predicate, TimeSpan timeout) + { + var deadline = DateTime.UtcNow + timeout; + while (DateTime.UtcNow < deadline) + { + try + { + if (predicate()) return true; + } + catch (IOException) { /* retry; file may be mid-write */ } + await Task.Delay(100); + } + return false; + } +} diff --git a/tests/DocFlow.Core.Tests/CanonicalModel/ApiSurfaceTests.cs b/tests/DocFlow.Core.Tests/CanonicalModel/ApiSurfaceTests.cs new file mode 100644 index 0000000..2d59b01 --- /dev/null +++ b/tests/DocFlow.Core.Tests/CanonicalModel/ApiSurfaceTests.cs @@ -0,0 +1,110 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using DocFlow.Core.CanonicalModel; +using Xunit; + +namespace DocFlow.Core.Tests.CanonicalModel; + +public class ApiSurfaceTests +{ + [Fact] + public void SemanticModel_WithoutApiSurface_RemainsBackwardsCompatible() + { + var model = new SemanticModel + { + Name = "Domain" + }; + var customer = model.CreateEntity("Customer", EntityClassification.Class); + + Assert.Null(model.Api); + Assert.Single(model.Entities); + Assert.Same(customer, model.GetEntity(customer.Id)); + // Api is purely additive: callers that ignore the property observe no change. + var issues = model.Validate(); + Assert.DoesNotContain(issues, i => i.Severity == ValidationSeverity.Error); + } + + [Fact] + public void ApiSurface_Records_AreValueEqual() + { + // Records with only scalar fields get structural equality for free. + // (Record equality does not deep-compare IReadOnlyList / IReadOnlyDictionary members.) + var serverA = new ApiServer { Url = "https://api.example.com", Description = "prod" }; + var serverB = new ApiServer { Url = "https://api.example.com", Description = "prod" }; + Assert.Equal(serverA, serverB); + Assert.Equal(serverA.GetHashCode(), serverB.GetHashCode()); + + var tagA = new ApiTag { Name = "pet", Description = "Pet operations" }; + var tagB = new ApiTag { Name = "pet", Description = "Pet operations" }; + Assert.Equal(tagA, tagB); + + var schemaA = new ApiSchema { Type = "integer", Format = "int64", Nullable = false }; + var schemaB = new ApiSchema { Type = "integer", Format = "int64", Nullable = false }; + Assert.Equal(schemaA, schemaB); + + var mediaA = new ApiMediaType { EntityName = "Pet" }; + var mediaB = new ApiMediaType { EntityName = "Pet" }; + Assert.Equal(mediaA, mediaB); + + // Different values are not equal. + Assert.NotEqual(tagA, tagA with { Name = "store" }); + } + + [Fact] + public void ApiOperation_RequiredMembers_AreMarkedAtCompileTime() + { + var requiredMembers = typeof(ApiOperation) + .GetProperties(BindingFlags.Public | BindingFlags.Instance) + .Where(p => p.GetCustomAttribute() is not null) + .Select(p => p.Name) + .ToHashSet(); + + Assert.Contains(nameof(ApiOperation.OperationId), requiredMembers); + Assert.Contains(nameof(ApiOperation.Method), requiredMembers); + Assert.Contains(nameof(ApiOperation.Path), requiredMembers); + + Assert.True(typeof(ApiOperation).GetCustomAttribute() is not null + || requiredMembers.Count > 0, + "ApiOperation must declare at least one required member."); + } + + [Fact] + public void ApiParameterLocation_Enum_CoversAllOpenApiLocations() + { + var values = Enum.GetValues().ToHashSet(); + + Assert.Contains(ApiParameterLocation.Query, values); + Assert.Contains(ApiParameterLocation.Header, values); + Assert.Contains(ApiParameterLocation.Path, values); + Assert.Contains(ApiParameterLocation.Cookie, values); + Assert.Equal(4, values.Count); + } + + [Fact] + public void ApiSurface_DefaultCollectionsAreEmpty() + { + var surface = new ApiSurface + { + Title = "Empty", + Version = "0.0.0" + }; + + Assert.Empty(surface.Servers); + Assert.Empty(surface.Operations); + Assert.Empty(surface.Tags); + Assert.Empty(surface.SecuritySchemes); + Assert.Empty(surface.SecurityRequirements); + } + + [Fact] + public void ApiMediaType_CanReferenceEntityOrInlineSchema() + { + var entityRef = new ApiMediaType { EntityName = "Pet" }; + var inline = new ApiMediaType { Schema = new ApiSchema { Type = "string", Format = "date-time" } }; + + Assert.Equal("Pet", entityRef.EntityName); + Assert.Null(entityRef.Schema); + Assert.Null(inline.EntityName); + Assert.Equal("date-time", inline.Schema!.Format); + } +} diff --git a/tests/DocFlow.Core.Tests/DocFlow.Core.Tests.csproj b/tests/DocFlow.Core.Tests/DocFlow.Core.Tests.csproj new file mode 100644 index 0000000..c9f7dc1 --- /dev/null +++ b/tests/DocFlow.Core.Tests/DocFlow.Core.Tests.csproj @@ -0,0 +1,29 @@ + + + + net8.0 + enable + enable + false + true + latest + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + diff --git a/tests/DocFlow.Diagrams.Tests/Mermaid/MermaidC4ContextGeneratorTests.cs b/tests/DocFlow.Diagrams.Tests/Mermaid/MermaidC4ContextGeneratorTests.cs new file mode 100644 index 0000000..565c98d --- /dev/null +++ b/tests/DocFlow.Diagrams.Tests/Mermaid/MermaidC4ContextGeneratorTests.cs @@ -0,0 +1,97 @@ +using DocFlow.Core.CanonicalModel; +using DocFlow.Diagrams.Mermaid; +using Xunit; + +namespace DocFlow.Diagrams.Tests.Mermaid; + +public class MermaidC4ContextGeneratorTests +{ + private readonly MermaidC4ContextGenerator _generator = new(); + + [Fact] + public void Context_NoServers_ProducesSingleContainer() + { + var api = new ApiSurface { Title = "Minimal", Version = "1.0" }; + + var output = _generator.Generate(api); + + Assert.StartsWith("flowchart LR", output); + Assert.Contains("Client", output); + Assert.Contains("API[[\"Minimal\"]]", output); + Assert.Contains("Client --> API", output); + // No server nodes emitted. + Assert.DoesNotContain("Server1", output); + } + + [Fact] + public void Context_WithOAuth_AddsIdpActor() + { + var api = new ApiSurface + { + Title = "Secured", + Version = "1.0", + SecuritySchemes = new Dictionary + { + ["oauth2"] = new() + { + Name = "oauth2", + Type = ApiSecuritySchemeType.OAuth2, + Flows = new Dictionary + { + ["authorizationCode"] = new() + { + AuthorizationUrl = "https://auth.example/auth", + TokenUrl = "https://auth.example/token" + } + } + } + } + }; + + var output = _generator.Generate(api); + + Assert.Contains("Idp1", output); + Assert.Contains("IdP: oauth2", output); + Assert.Contains("https://auth.example/auth", output); + Assert.Contains("Client --> Idp1", output); + } + + [Fact] + public void Context_Deterministic_OrderingByName() + { + ApiSurface Build(params (string url, string desc)[] servers) => new() + { + Title = "Det", + Version = "1.0", + Servers = servers.Select(s => new ApiServer { Url = s.url, Description = s.desc }).ToList() + }; + + var a = Build( + ("https://api-z.example", "Z"), + ("https://api-a.example", "A"), + ("https://api-m.example", "M")); + + var b = Build( + ("https://api-m.example", "M"), + ("https://api-a.example", "A"), + ("https://api-z.example", "Z")); + + Assert.Equal(_generator.Generate(a), _generator.Generate(b)); + + var output = _generator.Generate(a); + var aIdx = output.IndexOf("api-a", StringComparison.Ordinal); + var mIdx = output.IndexOf("api-m", StringComparison.Ordinal); + var zIdx = output.IndexOf("api-z", StringComparison.Ordinal); + Assert.True(aIdx < mIdx && mIdx < zIdx, "Servers should appear in alphabetical URL order."); + } + + [Fact] + public void Context_NullApi_ProducesMinimalDiagram() + { + var output = _generator.Generate(null); + + Assert.StartsWith("flowchart LR", output); + Assert.Contains("Client", output); + Assert.Contains("API[[\"API\"]]", output); + } +} diff --git a/tests/DocFlow.Diagrams.Tests/Mermaid/MermaidEndpointFlowchartGeneratorTests.cs b/tests/DocFlow.Diagrams.Tests/Mermaid/MermaidEndpointFlowchartGeneratorTests.cs new file mode 100644 index 0000000..94b8c1c --- /dev/null +++ b/tests/DocFlow.Diagrams.Tests/Mermaid/MermaidEndpointFlowchartGeneratorTests.cs @@ -0,0 +1,121 @@ +using DocFlow.Core.CanonicalModel; +using DocFlow.Diagrams.Mermaid; +using Xunit; + +namespace DocFlow.Diagrams.Tests.Mermaid; + +public class MermaidEndpointFlowchartGeneratorTests +{ + private readonly MermaidEndpointFlowchartGenerator _generator = new(); + + [Fact] + public void EndpointFlow_WithMultipleResponses_BranchesForEachStatus() + { + var operation = new ApiOperation + { + OperationId = "getPetById", + Method = ApiHttpMethod.Get, + Path = "/pets/{id}", + Responses = new Dictionary + { + ["200"] = new() + { + Description = "ok", + Content = new Dictionary + { + ["application/json"] = new() { EntityName = "Pet" } + } + }, + ["404"] = new() { Description = "not found" }, + ["500"] = new() { Description = "server error" } + } + }; + + var output = _generator.Generate(operation); + + Assert.StartsWith("flowchart LR", output); + Assert.Contains("Response200", output); + Assert.Contains("Response404", output); + Assert.Contains("Response500", output); + // Success path is solid, non-2xx are dashed. + Assert.Contains("Handler --> Response200", output); + Assert.Contains("Handler -.-> Response404", output); + Assert.Contains("Handler -.-> Response500", output); + } + + [Fact] + public void EndpointFlow_WithoutSecurity_OmitsAuthorizeNode() + { + var operation = new ApiOperation + { + OperationId = "listPets", + Method = ApiHttpMethod.Get, + Path = "/pets", + Responses = new Dictionary + { + ["200"] = new() { Description = "ok" } + } + }; + + var output = _generator.Generate(operation); + + Assert.DoesNotContain("Authorize", output); + // Request flows directly: Request -> Validate -> Handler. + Assert.Contains("Request --> Validate", output); + Assert.Contains("Validate --> Handler", output); + } + + [Fact] + public void EndpointFlow_WithSecurity_IncludesAuthorizeNode() + { + var operation = new ApiOperation + { + OperationId = "deletePet", + Method = ApiHttpMethod.Delete, + Path = "/pets/{id}", + SecurityRequirements = + [ + new ApiSecurityRequirement + { + Schemes = new Dictionary> { ["oauth2"] = ["write"] } + } + ], + Responses = new Dictionary + { + ["204"] = new() { Description = "deleted" } + } + }; + + var output = _generator.Generate(operation); + + Assert.Contains("Authorize[\"Authorize\"]", output); + Assert.Contains("Validate --> Authorize", output); + Assert.Contains("Authorize --> Handler", output); + } + + [Fact] + public void EndpointFlow_IncludesResponseBodySummary() + { + var operation = new ApiOperation + { + OperationId = "getPetById", + Method = ApiHttpMethod.Get, + Path = "/pets/{id}", + Responses = new Dictionary + { + ["200"] = new() + { + Description = "ok", + Content = new Dictionary + { + ["application/json"] = new() { EntityName = "Pet" } + } + } + } + }; + + var output = _generator.Generate(operation); + + Assert.Contains("200: Pet", output); + } +} diff --git a/tests/DocFlow.Diagrams.Tests/Mermaid/MermaidErDiagramGeneratorTests.cs b/tests/DocFlow.Diagrams.Tests/Mermaid/MermaidErDiagramGeneratorTests.cs new file mode 100644 index 0000000..1df74da --- /dev/null +++ b/tests/DocFlow.Diagrams.Tests/Mermaid/MermaidErDiagramGeneratorTests.cs @@ -0,0 +1,101 @@ +using DocFlow.Core.CanonicalModel; +using DocFlow.Diagrams.Mermaid; +using Xunit; + +namespace DocFlow.Diagrams.Tests.Mermaid; + +public class MermaidErDiagramGeneratorTests +{ + private readonly MermaidErDiagramGenerator _generator = new(); + + [Fact] + public void Er_TwoEntitiesWithComposition_EmitsCorrectCardinality() + { + var model = new SemanticModel { Name = "Shop" }; + var order = model.CreateEntity("Order", EntityClassification.AggregateRoot); + var line = model.CreateEntity("LineItem", EntityClassification.Entity); + model.AddRelationship(order.Id, line.Id, RelationshipType.Composition, name: "contains"); + + var output = _generator.Generate(model); + + Assert.StartsWith("erDiagram", output); + Assert.Contains("Order ||--o{ LineItem : contains", output); + } + + [Fact] + public void Er_Aggregation_And_Association_EmitCorrectCardinality() + { + var model = new SemanticModel { Name = "Shop" }; + var dept = model.CreateEntity("Department", EntityClassification.Entity); + var emp = model.CreateEntity("Employee", EntityClassification.Entity); + var cust = model.CreateEntity("Customer", EntityClassification.Entity); + var card = model.CreateEntity("LoyaltyCard", EntityClassification.Entity); + + model.AddRelationship(dept.Id, emp.Id, RelationshipType.Aggregation, name: "employs"); + model.AddRelationship(cust.Id, card.Id, RelationshipType.Association, name: "holds"); + + var output = _generator.Generate(model); + + Assert.Contains("Department }o--o{ Employee : employs", output); + Assert.Contains("Customer }o--|| LoyaltyCard : holds", output); + } + + [Fact] + public void Er_NoRelationships_EmitsSingleEntity() + { + var model = new SemanticModel { Name = "Solo" }; + model.CreateEntity("Customer", EntityClassification.Entity); + + var output = _generator.Generate(model); + + Assert.StartsWith("erDiagram", output); + Assert.Contains("Customer {", output); + // No relationship arrows should appear. + Assert.DoesNotContain("--", output); + } + + [Fact] + public void Er_Deterministic_OrdersEntitiesAlphabetically() + { + // Build two models with the same entities in different insertion orders. + var modelA = new SemanticModel { Name = "A" }; + modelA.CreateEntity("Zeta", EntityClassification.Entity); + modelA.CreateEntity("Alpha", EntityClassification.Entity); + modelA.CreateEntity("Mu", EntityClassification.Entity); + + var modelB = new SemanticModel { Name = "B" }; + modelB.CreateEntity("Alpha", EntityClassification.Entity); + modelB.CreateEntity("Mu", EntityClassification.Entity); + modelB.CreateEntity("Zeta", EntityClassification.Entity); + + var outA = _generator.Generate(modelA); + var outB = _generator.Generate(modelB); + + Assert.Equal(outA, outB); + + // And the entities appear in alphabetical order inside the output. + var alphaIdx = outA.IndexOf("Alpha", StringComparison.Ordinal); + var muIdx = outA.IndexOf("Mu", StringComparison.Ordinal); + var zetaIdx = outA.IndexOf("Zeta", StringComparison.Ordinal); + Assert.True(alphaIdx < muIdx, "Alpha should appear before Mu"); + Assert.True(muIdx < zetaIdx, "Mu should appear before Zeta"); + } + + [Fact] + public void Er_SkipsNonStructuralRelationships() + { + var model = new SemanticModel { Name = "Types" }; + var animal = model.CreateEntity("Animal", EntityClassification.Class); + var dog = model.CreateEntity("Dog", EntityClassification.Class); + model.AddRelationship(dog.Id, animal.Id, RelationshipType.Inheritance); + + var output = _generator.Generate(model); + + // Inheritance is not an ER-diagram concept — it should not render as a relationship line. + Assert.DoesNotContain("||--|{", output); + Assert.DoesNotContain(": inheritance", output); + // Both entities still show up as blocks because neither is touched by a rendered relationship. + Assert.Contains("Animal {", output); + Assert.Contains("Dog {", output); + } +} diff --git a/tests/DocFlow.Diagrams.Tests/Mermaid/MermaidSequenceDiagramGeneratorTests.cs b/tests/DocFlow.Diagrams.Tests/Mermaid/MermaidSequenceDiagramGeneratorTests.cs new file mode 100644 index 0000000..7153215 --- /dev/null +++ b/tests/DocFlow.Diagrams.Tests/Mermaid/MermaidSequenceDiagramGeneratorTests.cs @@ -0,0 +1,151 @@ +using DocFlow.Core.CanonicalModel; +using DocFlow.Diagrams.Mermaid; +using Xunit; + +namespace DocFlow.Diagrams.Tests.Mermaid; + +public class MermaidSequenceDiagramGeneratorTests +{ + private readonly MermaidSequenceDiagramGenerator _generator = new(); + + [Fact] + public void Sequence_GetOperation_ProducesClientApiMessages() + { + var operation = new ApiOperation + { + OperationId = "listPets", + Method = ApiHttpMethod.Get, + Path = "/pets", + Responses = new Dictionary + { + ["200"] = new() + { + Description = "ok", + Content = new Dictionary + { + ["application/json"] = new() { EntityName = "Pet" } + } + } + } + }; + + var output = _generator.Generate(operation); + + Assert.StartsWith("sequenceDiagram", output); + Assert.Contains("participant Client", output); + Assert.Contains("participant API", output); + Assert.Contains("Client->>API: GET /pets", output); + Assert.Contains("API-->>Client: 200 Pet", output); + } + + [Fact] + public void Sequence_SecuredOperation_IncludesAuthActor() + { + var operation = new ApiOperation + { + OperationId = "getSecret", + Method = ApiHttpMethod.Get, + Path = "/secret", + SecurityRequirements = + [ + new ApiSecurityRequirement + { + Schemes = new Dictionary> { ["oauth2"] = ["read"] } + } + ], + Responses = new Dictionary + { + ["200"] = new() { Description = "ok" } + } + }; + + var output = _generator.Generate(operation); + + Assert.Contains("participant Auth", output); + Assert.Contains("Client->>Auth: authenticate", output); + Assert.Contains("Auth-->>Client: token", output); + } + + [Fact] + public void Sequence_UnsecuredOperation_OmitsAuthActor() + { + var operation = new ApiOperation + { + OperationId = "listPets", + Method = ApiHttpMethod.Get, + Path = "/pets", + Responses = new Dictionary + { + ["200"] = new() { Description = "ok" } + } + }; + + var output = _generator.Generate(operation); + + Assert.DoesNotContain("participant Auth", output); + Assert.DoesNotContain("authenticate", output); + } + + [Fact] + public void Sequence_OperationWithRequestBody_IncludesPayloadType() + { + var operation = new ApiOperation + { + OperationId = "createPet", + Method = ApiHttpMethod.Post, + Path = "/pets", + RequestBody = new ApiRequestBody + { + Required = true, + Content = new Dictionary + { + ["application/json"] = new() { EntityName = "Pet" } + } + }, + Responses = new Dictionary + { + ["201"] = new() + { + Description = "created", + Content = new Dictionary + { + ["application/json"] = new() { EntityName = "Pet" } + } + } + } + }; + + var output = _generator.Generate(operation); + + Assert.Contains("Client->>API: POST /pets (Pet)", output); + Assert.Contains("API-->>Client: 201 Pet", output); + } + + [Fact] + public void Sequence_PrefersSuccessfulResponseOverOtherStatuses() + { + var operation = new ApiOperation + { + OperationId = "getPetById", + Method = ApiHttpMethod.Get, + Path = "/pets/{id}", + Responses = new Dictionary + { + ["404"] = new() { Description = "not found" }, + ["200"] = new() + { + Description = "ok", + Content = new Dictionary + { + ["application/json"] = new() { EntityName = "Pet" } + } + } + } + }; + + var output = _generator.Generate(operation); + + Assert.Contains("API-->>Client: 200 Pet", output); + Assert.DoesNotContain("API-->>Client: 404", output); + } +} diff --git a/tests/DocFlow.Documentation.Tests/Diff/SpecDifferTests.cs b/tests/DocFlow.Documentation.Tests/Diff/SpecDifferTests.cs new file mode 100644 index 0000000..813e98d --- /dev/null +++ b/tests/DocFlow.Documentation.Tests/Diff/SpecDifferTests.cs @@ -0,0 +1,181 @@ +using DocFlow.Core.CanonicalModel; +using DocFlow.Documentation.Diff; +using Xunit; + +namespace DocFlow.Documentation.Tests.Diff; + +public class SpecDifferTests +{ + private readonly SpecDiffer _differ = new(); + + [Fact] + public void Diff_AddedOperation_IsNonBreaking() + { + var oldModel = ModelWith(Operations("listPets")); + var newModel = ModelWith(Operations("listPets", "createPet")); + + var diff = _differ.Diff(oldModel, newModel); + + var change = Assert.Single(diff.Changes); + Assert.Equal(ChangeCategory.Operation, change.Category); + Assert.Equal(ChangeSeverity.NonBreaking, change.Severity); + Assert.Contains("Added operation", change.Description); + Assert.Contains("createPet", change.Description); + } + + [Fact] + public void Diff_RemovedOperation_IsBreaking() + { + var oldModel = ModelWith(Operations("listPets", "deletePet")); + var newModel = ModelWith(Operations("listPets")); + + var diff = _differ.Diff(oldModel, newModel); + + var change = Assert.Single(diff.Changes); + Assert.Equal(ChangeCategory.Operation, change.Category); + Assert.Equal(ChangeSeverity.Breaking, change.Severity); + Assert.Contains("Removed operation", change.Description); + Assert.Contains("deletePet", change.Description); + } + + [Fact] + public void Diff_AddedRequiredRequestField_IsBreaking() + { + // Pet schema gains a required `name` property. + var oldModel = new SemanticModel(); + var oldPet = oldModel.CreateEntity("Pet", EntityClassification.DataTransferObject); + oldPet.Properties.Add(Property("Id", "long", required: true)); + + var newModel = new SemanticModel(); + var newPet = newModel.CreateEntity("Pet", EntityClassification.DataTransferObject); + newPet.Properties.Add(Property("Id", "long", required: true)); + newPet.Properties.Add(Property("name", "string", required: true)); + + var diff = _differ.Diff(oldModel, newModel); + + var change = Assert.Single(diff.Changes); + Assert.Equal(ChangeCategory.Schema, change.Category); + Assert.Equal(ChangeSeverity.Breaking, change.Severity); + Assert.Contains("Added required property", change.Description); + Assert.Contains("Pet.name", change.Description); + } + + [Fact] + public void Diff_AddedOptionalQueryParam_IsNonBreaking() + { + var op = (string[] queryParams) => new ApiOperation + { + OperationId = "listPets", + Method = ApiHttpMethod.Get, + Path = "/pets", + Parameters = queryParams.Select(name => new ApiParameter + { + Name = name, + Location = ApiParameterLocation.Query, + Required = false + }).ToList() + }; + + var oldModel = ModelWith([op([])]); + var newModel = ModelWith([op(["status"])]); + + var diff = _differ.Diff(oldModel, newModel); + + var change = Assert.Single(diff.Changes); + Assert.Equal(ChangeCategory.Parameter, change.Category); + Assert.Equal(ChangeSeverity.NonBreaking, change.Severity); + Assert.Contains("optional query parameter `status`", change.Description); + } + + [Fact] + public void Diff_ChangedFieldType_IsBreaking() + { + var oldModel = new SemanticModel(); + var oldPet = oldModel.CreateEntity("Pet", EntityClassification.DataTransferObject); + oldPet.Properties.Add(Property("Id", "int")); + + var newModel = new SemanticModel(); + var newPet = newModel.CreateEntity("Pet", EntityClassification.DataTransferObject); + newPet.Properties.Add(Property("Id", "string")); + + var diff = _differ.Diff(oldModel, newModel); + + Assert.Contains(diff.Changes, c => + c.Category == ChangeCategory.Schema + && c.Severity == ChangeSeverity.Breaking + && c.Description.Contains("Changed type of") + && c.Description.Contains("int") + && c.Description.Contains("string")); + } + + [Fact] + public void Diff_NoChanges_ProducesEmptyChangelogWithHeader() + { + var oldModel = ModelWith(Operations("listPets")); + var newModel = ModelWith(Operations("listPets")); + + var diff = _differ.Diff(oldModel, newModel); + + Assert.False(diff.HasChanges); + Assert.Equal(0, diff.BreakingCount); + Assert.Equal(0, diff.NonBreakingCount); + + var rendered = new ChangelogGenerator().Render(diff); + Assert.Contains("# API Changelog", rendered); + Assert.Contains("| Breaking | 0 |", rendered); + Assert.Contains("| Non-breaking | 0 |", rendered); + Assert.Contains("_No differences detected._", rendered); + } + + [Fact] + public void Diff_RequiredFlagFlip_IsBreakingOnlyWhenTighter() + { + // false → true on a schema property = breaking; true → false = non-breaking. + var oldModel = new SemanticModel(); + var oldPet = oldModel.CreateEntity("Pet", EntityClassification.DataTransferObject); + oldPet.Properties.Add(Property("id", "long", required: false)); + oldPet.Properties.Add(Property("name", "string", required: true)); + + var newModel = new SemanticModel(); + var newPet = newModel.CreateEntity("Pet", EntityClassification.DataTransferObject); + newPet.Properties.Add(Property("id", "long", required: true)); // tightened + newPet.Properties.Add(Property("name", "string", required: false)); // relaxed + + var diff = _differ.Diff(oldModel, newModel); + + Assert.Contains(diff.Changes, c => + c.Path == "Pet.id" && c.Severity == ChangeSeverity.Breaking); + Assert.Contains(diff.Changes, c => + c.Path == "Pet.name" && c.Severity == ChangeSeverity.NonBreaking); + } + + // --- helpers --------------------------------------------------------- + + private static SemanticModel ModelWith(params ApiOperation[] ops) + { + return new SemanticModel + { + Api = new ApiSurface + { + Title = "Test", + Version = "1.0", + Operations = ops + } + }; + } + + private static ApiOperation[] Operations(params string[] ids) => + ids.Select(id => new ApiOperation + { + OperationId = id, + Method = ApiHttpMethod.Get, + Path = "/" + id + }).ToArray(); + + private static SemanticProperty Property(string name, string typeName, bool required = false) => new() + { + Name = name, + Type = new SemanticType { Name = typeName, IsPrimitive = true }, + IsRequired = required + }; +} diff --git a/tests/DocFlow.Documentation.Tests/DocFlow.Documentation.Tests.csproj b/tests/DocFlow.Documentation.Tests/DocFlow.Documentation.Tests.csproj new file mode 100644 index 0000000..811c0f2 --- /dev/null +++ b/tests/DocFlow.Documentation.Tests/DocFlow.Documentation.Tests.csproj @@ -0,0 +1,35 @@ + + + + net8.0 + enable + enable + false + true + latest + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + diff --git a/tests/DocFlow.Documentation.Tests/Examples/ExampleSynthesizerTests.cs b/tests/DocFlow.Documentation.Tests/Examples/ExampleSynthesizerTests.cs new file mode 100644 index 0000000..108d390 --- /dev/null +++ b/tests/DocFlow.Documentation.Tests/Examples/ExampleSynthesizerTests.cs @@ -0,0 +1,123 @@ +using DocFlow.Core.CanonicalModel; +using DocFlow.Documentation.Examples; +using Xunit; + +namespace DocFlow.Documentation.Tests.Examples; + +public class ExampleSynthesizerTests +{ + [Fact] + public void Examples_PrimitiveSchema_ProducesSensibleValues() + { + var model = new SemanticModel(); + var synth = new ExampleSynthesizer(model); + + Assert.Equal("\"string\"", synth.Synthesize(Primitive("string"))); + Assert.Equal("\"2026-01-01T00:00:00Z\"", synth.Synthesize(Primitive("string", "date-time"))); + Assert.Equal("\"00000000-0000-0000-0000-000000000000\"", synth.Synthesize(Primitive("string", "uuid"))); + Assert.Equal("0", synth.Synthesize(Primitive("integer"))); + Assert.Equal("false", synth.Synthesize(Primitive("boolean"))); + Assert.Equal("[\"string\"]", synth.Synthesize(new ApiMediaType + { + Schema = new ApiSchema { Type = "array", Items = new ApiSchema { Type = "string" } } + })); + } + + [Fact] + public void Examples_EnumConstrainedString_UsesFirstEnumValue() + { + var model = new SemanticModel(); + var synth = new ExampleSynthesizer(model); + + var media = new ApiMediaType + { + Schema = new ApiSchema { Type = "string", Enum = ["available", "pending", "sold"] } + }; + + Assert.Equal("\"available\"", synth.Synthesize(media)); + } + + [Fact] + public void Examples_NestedObject_RespectsRequiredFields() + { + var model = new SemanticModel(); + var pet = model.CreateEntity("Pet", EntityClassification.DataTransferObject); + pet.Properties.Add(new SemanticProperty + { + Name = "Id", + Type = new SemanticType { Name = "long", IsPrimitive = true }, + IsRequired = true + }); + pet.Properties.Add(new SemanticProperty + { + Name = "Name", + Type = new SemanticType { Name = "string", IsPrimitive = true }, + IsRequired = true + }); + pet.Properties.Add(new SemanticProperty + { + Name = "Nickname", + Type = new SemanticType { Name = "string", IsPrimitive = true }, + IsRequired = false + }); + + var synth = new ExampleSynthesizer(model); + var json = synth.Synthesize(new ApiMediaType { EntityName = "Pet" }); + + Assert.NotNull(json); + // Required fields must appear. + Assert.Contains("\"Id\"", json); + Assert.Contains("\"Name\"", json); + // Optional field also appears (OpenAPI examples typically illustrate all fields). + Assert.Contains("\"Nickname\"", json); + // Primitive values render sensibly. + Assert.Contains(": 0", json); + Assert.Contains(": \"string\"", json); + } + + [Fact] + public void Examples_CircularSchema_Terminates() + { + // Node { Id: long, Parent: Node } — self-reference. + var model = new SemanticModel(); + var node = model.CreateEntity("Node", EntityClassification.Entity); + node.Properties.Add(new SemanticProperty + { + Name = "Id", + Type = new SemanticType { Name = "long", IsPrimitive = true } + }); + node.Properties.Add(new SemanticProperty + { + Name = "Parent", + Type = new SemanticType { Name = "Node" } + }); + + var synth = new ExampleSynthesizer(model); + var json = synth.Synthesize(new ApiMediaType { EntityName = "Node" }); + + Assert.NotNull(json); + // Ellipsis marks the cycle termination. + Assert.Contains("\"...\"", json); + } + + [Fact] + public void Examples_SpecProvidedExample_IsPreferredOverSynthesis() + { + var model = new SemanticModel(); + model.CreateEntity("Pet", EntityClassification.DataTransferObject); + var synth = new ExampleSynthesizer(model); + + const string specExample = "{\n \"id\": 42,\n \"name\": \"Rex\"\n}"; + + var result = synth.Synthesize(new ApiMediaType { EntityName = "Pet", Example = specExample }); + + Assert.Equal(specExample.Trim(), result); + // The synthesiser did not fall back to "\"string\"" generation. + Assert.Contains("Rex", result); + } + + private static ApiMediaType Primitive(string type, string? format = null) => new() + { + Schema = new ApiSchema { Type = type, Format = format } + }; +} diff --git a/tests/DocFlow.Documentation.Tests/Html/StaticSiteRendererTests.cs b/tests/DocFlow.Documentation.Tests/Html/StaticSiteRendererTests.cs new file mode 100644 index 0000000..5746db5 --- /dev/null +++ b/tests/DocFlow.Documentation.Tests/Html/StaticSiteRendererTests.cs @@ -0,0 +1,110 @@ +using DocFlow.Core.Abstractions; +using DocFlow.Documentation.Html; +using DocFlow.Documentation.Markdown; +using DocFlow.Documentation.Options; +using DocFlow.Integration.Schemas.OpenApi; +using Xunit; + +namespace DocFlow.Documentation.Tests.Html; + +public class StaticSiteRendererTests +{ + private static async Task> GeneratePetstoreHtmlAsync() + { + var parser = new OpenApiParser(); + var result = await parser.ParseAsync(ParserInput.FromFile("Fixtures/petstore.json")); + Assert.True(result.Success); + + var markdown = await new MarkdownDocumentationGenerator().GenerateAsync(result.Model, new DocumentationOptions()); + return new StaticSiteRenderer().Render(markdown); + } + + [Fact] + public async Task Html_Petstore_ProducesParallelHtmlFiles() + { + var files = await GeneratePetstoreHtmlAsync(); + var byPath = files.ToDictionary(f => f.RelativePath, f => f, StringComparer.Ordinal); + + // Every Markdown file has a corresponding .html. + string[] expectedHtml = + [ + "index.html", + "overview.html", + "domain-model.html", + "endpoints/pet.html", + "endpoints/store.html", + "security.html" + ]; + + foreach (var path in expectedHtml) + { + Assert.True(byPath.ContainsKey(path), $"Expected {path} in bundle."); + Assert.Equal("text/html", byPath[path].MediaType); + } + + // CSS asset is present. + Assert.True(byPath.ContainsKey("assets/theme.css")); + Assert.Equal("text/css", byPath["assets/theme.css"].MediaType); + } + + [Fact] + public async Task Html_EmbedsMermaidScript() + { + var files = await GeneratePetstoreHtmlAsync(); + + foreach (var file in files.Where(f => f.RelativePath.EndsWith(".html", StringComparison.Ordinal))) + { + Assert.Contains("mermaid", file.Content); + Assert.Contains("