diff --git a/.github/INSTRUCTION_AUTHORING.md b/.github/INSTRUCTION_AUTHORING.md new file mode 100644 index 00000000..bfb1a8c9 --- /dev/null +++ b/.github/INSTRUCTION_AUTHORING.md @@ -0,0 +1,133 @@ +--- +description: "Standards for creating and maintaining .instructions.md files" +applyTo: "**/*.instructions.md" +tags: ["authoring", "standards", "instructions", "documentation"] +--- + +# Instruction File Authoring Standards + +When creating or updating any Copilot instruction Markdown file (`.instructions.md`), follow these rules to keep guidance durable, easy to review, and maintainable. + +## Purpose + +Instruction files define scoped AI guidance for specific file types or code areas. They must be predictable and machine-readable. + +## General Authoring Rules + +- Prefer precise, testable directives over vague guidance. +- Avoid overlapping or conflicting instructions across files. +- Keep content reusable and not tied to one temporary task. +- Use imperative language ("Use", "Prefer", "Do not", "Validate"). +- If a rule is scoped to a subset of files, use a path-specific `.instructions.md` file rather than adding it to the global file. +- Do not restate general rules in multiple files unless required for clarity. +- When unsure, produce fewer, clearer rules. + +## Required Format + +All `.instructions.md` files must begin with YAML frontmatter and follow this section order: + +```yaml +--- +description: "Short, concrete summary of what the file governs" +applyTo: "src/**/*.cs" +tags: ["tag1", "tag2"] +--- + +# Purpose + +[One paragraph explaining why this guidance exists] + +## Scope + +[What files and scenarios this applies to] + +## Required Rules + +[MUST / MUST NOT directives, phrased imperatively] + +## Preferred Patterns + +[SHOULD directives, repeated patterns, best practices] + +## Validation + +[Concrete checks: build, test, lint, docs validation] + +## Examples + +[Optional: "Preferred" and "Avoid" patterns] +``` + +## Frontmatter Rules + +- `description` must be one sentence and concrete. +- `applyTo` must use explicit glob patterns with the narrowest safe scope. Examples: + - `src/**/*.cs` — all C# files in source + - `tests/**/*.cs` — all test files + - `**/Program.cs` — program entry points +- `applyTo` must **not** be `**` unless the file is intentionally repository-wide. +- If multiple globs are needed, keep them explicit and readable, separated by semicolons or as separate lines. +- `tags` should reflect the instruction's domain (e.g., `["validation", "dependency-injection", "testing"]`). + +## Content Rules + +- **Required rules** must be phrased as MUST / MUST NOT / SHOULD where possible. +- **Validation section** must include concrete checks (build, test, lint, docs validation) when applicable. +- **Examples** must show "preferred" and "avoid" patterns when useful. +- Do not include secrets, tokens, or environment-specific sensitive values. +- Keep sections short and scannable; each section should fit on one screen without scrolling. + +## Conflict Resolution + +When multiple instruction files might apply to the same file: + +- Repository-wide instructions define defaults. +- Path-specific instruction files define narrower, stronger rules for matching files. +- If a new file would conflict with an existing instruction file, revise the narrow file instead of creating duplicate policy. +- Always document the relationship between overlapping instruction files in cross-references. + +## Cross-Referencing + +Link between instruction files using relative paths or workspace absolute paths: + +- Relative: `../other-instruction.md` +- Absolute: `/.github/instructions/host-setup.instructions.md` +- Always verify links work before committing. + +## Example Structure + +A well-formed instruction file: + +```yaml +--- +description: "Controller conventions for CoreEx API hosts" +applyTo: "**/Controllers/**/*.cs" +tags: ["controllers", "api", "dependency-injection"] +--- + +# Purpose + +Controllers define HTTP endpoints for CoreEx API hosts. This guidance ensures consistent routing, dependency injection, and use of CoreEx WebApi helpers. + +## Scope + +Applies to all `Controllers/` directories in API projects (`.Api` projects). + +## Required Rules + +- MUST inherit from `WebApiControllerBase` or `WebApiControllerBase`. +- MUST use `[Route("api/v1/...")]` and follow REST conventions. +- MUST NOT inject `IUnitOfWork` directly; receive it only through application service dependency. + +## Preferred Patterns + +- Prefer `PostAsync()`, `PutAsync()`, `PatchAsync()` from `WebApi` helpers over manual response building. +- Prefer explicit dependency injection over service locator patterns. +- Prefer PATCH with `application/merge-patch+json` for partial updates. + +## Validation + +- Build the project: `dotnet build` +- Run tests: `dotnet test` +- Check inheritance with: `grep "class.*Controller" Controllers/*.cs` +``` diff --git a/.github/SKILL_AUTHORING.md b/.github/SKILL_AUTHORING.md new file mode 100644 index 00000000..9a30c34d --- /dev/null +++ b/.github/SKILL_AUTHORING.md @@ -0,0 +1,152 @@ +--- +description: "Standards for creating and maintaining SKILL.md files" +applyTo: "**/.github/skills/**/SKILL.md" +tags: ["authoring", "standards", "skills", "documentation"] +--- + +# Skill File Authoring Standards + +When creating or updating any skill (SKILL.md file), follow this organization pattern to keep the main file lean and context-efficient. + +## Purpose + +Skill files guide AI agents through complex, multi-step tasks. They must be discoverable, brief, and provide clear pointers to detailed workflows rather than embedding all content inline. + +## Skill Directory Structure + +``` +skills/{skill-name}/ + SKILL.md # Main entry point (lean, <300 lines) + references/ + workflow.md # Detailed step-by-step workflows + checklists.md # Completion gates, validation criteria + patterns.md # Code patterns, templates, conventions + troubleshooting.md # Known issues and solutions + assets/ + templates/ # Code templates, boilerplate files + examples/ # Real working examples from the repo +``` + +Each file serves one purpose. Keep files focused and scannable. + +## SKILL.md Content Rules + +The main SKILL.md must include: + +1. **YAML frontmatter** with `name`, `description`, `argument-hint`, `tags` +2. **One-sentence purpose statement** — what the skill does and when to use it +3. **"When to Use" section** — bullet points, not prose; concrete triggers +4. **"When Not to Use" section** — prevents misuse and clarifies boundaries +5. **Quick reference** — CLI commands, key steps, or summary table (if applicable) +6. **Pointer to detailed workflows** — "For step-by-step guidance, see `references/workflow.md`" +7. **Key References** — links to relevant instructions, samples, or external docs + +**Maximum: 300 lines** including frontmatter. If you exceed this, move content to `references/`. + +## references/ Subdirectory + +Detailed, procedural content lives in `references/`: + +- **workflow.md** — full step-by-step phases, sub-steps, decision trees; 100–200 lines +- **checklists.md** — completion gates, validation steps, sign-off criteria; one page +- **patterns.md** — recurring code patterns, naming conventions, architectural decisions; reference material +- **troubleshooting.md** — known issues, debugging strategies, error messages and fixes; searchable format + +Each file stays focused on one concern. No file should exceed what is readable in one screen scroll without getting lost. + +## assets/ Subdirectory + +Reusable templates and examples: + +- **assets/templates/** — boilerplate code, project structures (copy-and-fill files) +- **assets/examples/** — concrete working examples from the repo (links only, no duplicates) + +**Important**: Never maintain duplicate copies of sample code. Always link to the canonical source in `samples/` or other repository locations. + +## Cross-Referencing + +When skills reference each other, instructions, or samples: + +- **Relative paths**: `../other-skill/references/...` (for other skills) +- **Absolute workspace paths**: `/.github/instructions/host-setup.instructions.md`, `/samples/src/Contoso.Products.Api/Program.cs` +- Always verify links work before committing +- Prefer workspace-relative links for durability + +## Frontmatter Requirements + +All SKILL.md files must include: + +```yaml +--- +name: skill-id +description: "Concise description of when and why to use this skill" +argument-hint: "What user should provide, e.g. 'Optional: domain name and entities'" +tags: ["tag1", "tag2", "tag3"] +--- +``` + +**Tag guidance**: Reflect the skill's domain and primary use cases. Examples: +- `["scaffolding", "microservice", "code-generation"]` +- `["orchestration", "cli", "distributed-apps"]` +- `["retrofit", "integration", "messaging"]` + +## Lean SKILL.md Example + +```yaml +--- +name: generate-domain +description: "Scaffold a new CoreEx domain across all layers following framework conventions" +argument-hint: "Domain name, entity fields, business rules (optional)" +tags: ["scaffolding", "domain", "code-generation"] +--- + +# Generate Domain + +Guides you through creating a new CoreEx domain from scratch. Asks about entity shape, validation, messaging needs, and generates code tailored to your domain model. + +## When to Use + +- Creating a new bounded context or microservice +- Entity has custom fields, business rules, or complex validation +- You want the agent to reason about conventions and event naming +- Scaffolding Products, Orders, Shopping, or similar sample domains + +## When Not to Use + +- Entity fits a standard template shape — use `/scaffold-domain-from-templates` instead +- Domain already exists — use `/add-capability` to retrofit messaging/integration +- You need just one file (a contract or service) — manually create it + +## Workflow Overview + +1. **Load Context** — examine existing domains and conventions +2. **Gather Inputs** — domain name, entity fields, validation rules, events +3. **Contracts** — define DTOs with source-generation markers +4. **Application Services** — validation, unit-of-work patterns, event publishing +5. **Infrastructure** — repositories, mappers, database access +6. **API Host** — controllers, registration, middleware +7. **Database** — migrations, schema, outbox tables +8. **Tests** — integration and API test scaffolding + +For detailed step-by-step guidance, see [`references/workflow.md`](references/workflow.md). + +## Key References + +- [Application Services Instructions](/.github/instructions/application-services.instructions.md) +- [Contracts Instructions](/.github/instructions/contracts.instructions.md) +- [Host Setup Instructions](/.github/instructions/host-setup.instructions.md) +- [Sample Domains](./samples/src/Contoso.Products/) +- [Roslyn Source Generation](./docs/capabilities.md) +``` + +## Quality Gates + +Before completing a skill: + +- [ ] SKILL.md is <300 lines (excluding examples) +- [ ] All `references/` files exist and are linked +- [ ] All links (relative and absolute) are verified +- [ ] YAML frontmatter is valid +- [ ] No inline workflows or checklists in main SKILL.md +- [ ] Cross-references to instructions are correct +- [ ] Example links point to real, canonical code locations diff --git a/.github/agents/coreex-expert.agent.md b/.github/agents/coreex-expert.agent.md new file mode 100644 index 00000000..67de8c19 --- /dev/null +++ b/.github/agents/coreex-expert.agent.md @@ -0,0 +1,47 @@ +--- +name: CoreEx Expert +description: "Use when you need to explain, understand, or decide how CoreEx works. Triggers: explain CoreEx, how does CoreEx, which pattern, which capability, which shape, plan a feature, review a design, compare samples, architecture guidance, coding patterns, layering, host setup, validation, repository conventions, eventing, outbox relay, subscriber design, sample-aligned decisions." +tools: [read, search] +user-invocable: true +argument-hint: Ask for CoreEx pattern guidance, architecture decisions, or sample-aligned implementation advice. +--- +You are the CoreEx Expert for this repository. + +Your mission: +- Provide authoritative, repo-grounded guidance on CoreEx architecture, patterns, and practices. +- Prefer CoreEx-native primitives and conventions over generic .NET advice. +- Keep recommendations aligned with existing layering and sample implementations. + +Primary sources of truth: +- .github/copilot-instructions.md +- docs/agent-interaction-guide.md +- docs/agent-prompt-recipes.md +- .github/instructions/api-controllers.instructions.md +- .github/instructions/application-services.instructions.md +- .github/instructions/contracts.instructions.md +- .github/instructions/repositories.instructions.md +- .github/instructions/event-subscribers.instructions.md +- .github/instructions/host-setup.instructions.md +- .github/instructions/tests.instructions.md +- .github/instructions/validators.instructions.md + +Operating rules: +- Always inspect current code before recommending changes. +- Give sample-backed guidance where possible. +- Favor smallest safe change and preserve existing structure. +- Separate explanation, plan, and implementation guidance clearly. +- For mutable entities, call out ETag, changelog, validation, and idempotency implications where relevant. +- For messaging, explicitly distinguish API-only, API plus relay, API plus subscribe, and orchestration shapes. + +Decision routing: +- If request is greenfield domain scaffolding, advise using /generate-domain. +- If request is deterministic template scaffolding, advise using /scaffold-domain-from-templates. +- If request is retrofit capability on an existing domain, advise using /add-capability. +- If request is repo mapping or onboarding documentation, advise using acquire-codebase-knowledge. + +Response format: +1. Recommendation. +2. Why this fits CoreEx. +3. Evidence from repo files. +4. Risks and tradeoffs. +5. Minimal next steps. diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 5d40a046..69698c42 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -1,4 +1,125 @@ +--- +description: "Project-wide guidelines and conventions for CoreEx development" +tags: ["guidelines", "conventions", "comments"] +--- + # Copilot Instructions -## Project Guidelines -- All code comments should end with a period/fullstop, as they are sentences. \ No newline at end of file +## Purpose +CoreEx is a modular .NET framework for enterprise APIs and distributed services. Favor CoreEx-native primitives, patterns, and extensions over ad-hoc implementations. + +## Repository Shape +- `CoreEx.sln`: main solution for framework + samples. +- `src\`: reusable CoreEx libraries (AspNetCore, Database, EntityFrameworkCore, Events, Validation, DomainDriven, RefData, Caching, etc.). +- `gen\CoreEx.Gen\`: Roslyn source generator for contracts. +- `tests\`: framework-level tests. +- `samples\src\Contoso.*\`: sample domains split by layer/host. +- `samples\aspire\AppHost.cs`: orchestration entrypoint. +- `coreex-starter\`: separate starter template repo — ignore unless user wants starter changes. + +## Build, Test, and Run +- **Build**: `dotnet build CoreEx.sln` +- **Test**: `dotnet test CoreEx.sln` or target specific projects. +- **Single test**: `dotnet test --filter "FullyQualifiedName~"` +- **Samples**: docker-compose infrastructure + dotnet run for Database projects + Aspire AppHost. +- **Linting**: No separate `dotnet format`. Build is the lint pass (nullable, LangVersion=preview, TreatWarningsAsErrors in `src\Directory.Build.props`). +- **Formatting**: 4 spaces for `*.cs`, 2 spaces for `*.json|*.xml|*.yaml|*.props|*.csproj|*.sln|*.sql` per `.editorconfig`. + +## Architecture +- **Two roles**: framework packages (`src\`) + sample reference implementations (`samples\`). +- **Domain layers**: `*.Contracts` → `*.Application` → `*.Infrastructure` → `*.Api`, plus `*.Database`, `*.Outbox.Relay`, `*.Subscribe` (messaging). +- **Sample flow**: Controllers → `WebApi` helpers → Application services (validate + `IUnitOfWork`) → Infrastructure repositories (EF + explicit mappers) → outbox events → relay publishes to Service Bus → subscribers consume. +- **Primary domains**: Products and Shopping complete; Orders WIP. See `samples\README.md` for topology. +- **Aspire**: orchestrates sample hosts in `samples\aspire\Contoso.Aspire\AppHost.cs`. + +## Key Conventions That Matter in This Repo + +### CoreEx-First Patterns +- Prefer CoreEx primitives before introducing external libraries that overlap with framework capabilities. +- Prefer CoreEx exception types (`NotFoundException`, `ValidationException`, `BusinessException`, `ConcurrencyException`, etc.) and CoreEx `Result`/`Result` flows over custom error wrappers. +- Do not introduce AutoMapper unless the user explicitly requests it. Repositories and services use explicit mapping helpers/classes. + +### Contracts and Source Generation +- Contracts are commonly declared as `[Contract] public partial class ...`. +- Mutable contracts often implement `IIdentifier`, `IETag`, and `IChangeLog`. +- Use `[ReadOnly(true)]` for server-managed fields and `[ReferenceData]` for reference-data-backed code properties. +- Canonical casing transformations belong in property setters when already established by the model (for example `Sku` uppercasing in `ProductBase`). +- Favor the existing source-generation approach; do not hand-write members that are meant to be generated. + +### Dependency Injection and Layering +- Services and repositories commonly self-register with `[ScopedService<...>]`. +- Hosts use `AddDynamicServicesUsing()` to discover and register services instead of manually wiring every type. +- Keep interface/implementation layering intact: + - application interfaces live in `Application\Interfaces\` or `Application\Repositories\`; + - infrastructure implementations live in `Infrastructure\`. + +### Application-Service Shape +- Application services follow a repeated pattern: + 1. guard/normalize inputs; + 2. validate with CoreEx validators; + 3. load current state where needed; + 4. run mutations inside `_unitOfWork.ExecuteAsync(...)`; + 5. add `EventData` within the same unit-of-work scope. +- Use exception-based flows for straightforward CRUD-style services. +- Use `Result` pipelines for aggregate-oriented flows and multi-step orchestration, especially in Shopping. +- When working in application or infrastructure code, follow `.github\instructions\application-services.instructions.md`, `.github\instructions\repositories.instructions.md`, and related scoped instruction files. + +### Host Composition +- `Program.cs` files follow a predictable CoreEx host shape: + - `builder.AddHostSettings();` + - `AddExecutionContext()` + - `AddMvcWebApi()` and `AddHttpWebApi()` + - host-specific SQL Server / Redis / Service Bus / outbox registrations + - `PostConfigureAllHealthChecks()` + - NSwag/OpenAPI registration + - OpenTelemetry wiring + - middleware order with `UseCoreExExceptionHandler()`, `UseExecutionContext()`, and host-specific additions such as `UseIdempotencyKey()` or `MapHostedServices()`. +- API hosts, subscriber hosts, and outbox relay hosts intentionally have different startup shapes. Do not collapse them into one generic startup unless the user explicitly asks for that refactor. + +### Controllers and HTTP +- Use CoreEx `WebApi` helpers (`PostAsync`, `PutAsync`, `PatchAsync`, `DeleteAsync`). +- PATCH: `application/merge-patch+json`. +- POST: use `[IdempotencyKey]`. +- OpenAPI/health endpoints standard in hosts. + +### Data and Messaging +- SQL Server + outbox + Azure Service Bus are first-class patterns. +- Shopping: synchronous HTTP reservation + transactional outbox + async event publishing. Preserve this split. + +### Testing +- Framework: NUnit + FluentAssertions. +- Sample: `WithGenericTester` (unit) or `WithApiTester` (API/Subscribe/Relay). +- Integration tests: `Data\data.yaml` (Test.Common) + `Resources\` JSON expectations + `ExpectSqlServerOutboxEvents(...)`. +- Mock downstream HTTP calls; do not assume live APIs. + +### House Rules +- Code comments end with a period/full stop. +- Use `GlobalUsing.cs` per project; do not scatter `using` directives. +- Always use `.ConfigureAwait(false)` in service/repository code. + +## Key Docs to Read Before Large Changes +- `README.md` for repo-level positioning and top-level commands. +- `samples\README.md` for the runnable Contoso architecture and local setup. +- `docs\capabilities.md` for the deeper CoreEx capability/pattern explanations. +- `.github\instructions\*.instructions.md` for area-specific rules when editing `Program.cs`, contracts, application services, repositories, validators, subscribers, or tests. + +## Agent Customizations (Prompts and Skills) + +The following prompts and skills are available in this repository. Type `/` in chat to invoke them. + +| Command | Type | When to use | +|---------|------|-------------| +| `/generate-domain` | Skill | Guided scaffolding of a new CoreEx domain across all 5 layers. Use when your entity has custom fields, business rules, or you want the agent to reason about conventions, validation, and event naming. The agent will ask for inputs and generate code tailored to your domain model. | +| `/add-capability` | Skill | Retrofit an existing CoreEx domain with additional capabilities. Use when a domain already exists and you want to add messaging/integration features such as `Outbox.Relay`, `Subscribe`, Azure Service Bus wiring, or initial subscriber scaffolding without regenerating the domain. | +| `/scaffold-domain-from-templates` | Prompt | Fast, deterministic domain scaffolding by cloning and materializing the canonical templates in `.github\templates\domain\` with placeholder substitution. Use when your entity fits the standard template shape and you want exact output with no creative generation. | +| `/init` | Prompt | Initialize a new CoreEx solution or workspace. | +| `/setup` | Prompt | Configure an existing CoreEx solution with standard tooling and settings. | + +## Guidance for Authoring Instructions and Skills + +When creating or maintaining Copilot instruction files and skills: + +- **Instruction files** (`.instructions.md`) — see [INSTRUCTION_AUTHORING.md](.github/INSTRUCTION_AUTHORING.md) for standards on YAML frontmatter, section order, and content rules. +- **Skill files** (`SKILL.md`) — see [SKILL_AUTHORING.md](.github/SKILL_AUTHORING.md) for the directory structure pattern (`references/`, `assets/`), lean main file rules (<300 lines), and cross-referencing guidelines. + +Both documents define durable patterns for creating guidance that is discoverable, maintainable, and context-efficient. \ No newline at end of file diff --git a/.github/instructions/api-controllers.instructions.md b/.github/instructions/api-controllers.instructions.md new file mode 100644 index 00000000..57d09026 --- /dev/null +++ b/.github/instructions/api-controllers.instructions.md @@ -0,0 +1,122 @@ +--- +applyTo: "**/Controllers/**/*.cs" +description: "API controller conventions for CoreEx: inheritance, routing, dependency injection, CQRS separation, and WebApi integration" +tags: ["controllers", "api", "routing", "cqrs", "dependency-injection"] +--- + +# API Controller Conventions + +## NuGet / Project References + +| Package | Key types provided | +|---|---| +| `CoreEx.AspNetCore` | `WebApi`, `[IdempotencyKey]`, `[Accepts]`, `[ProducesNotFoundProblem]`, `[Query]`, `[Paging]`, `HttpNames`, `app.UseCoreExExceptionHandler()`, `app.UseExecutionContext()`, `app.UseIdempotencyKey()`, `app.MapHealthChecks()` | +| `CoreEx.AspNetCore.NSwag` | `[OpenApiTag]`, `app.UseOpenApi()`, `app.UseSwaggerUi()`, `s.AddCoreExConfiguration()` | +| `CoreEx` | `WebApplicationBuilderExtensions.AddHostSettings()`, `AddExecutionContext()` | + +## Structure + +- Inherit from `ControllerBase`. Never inherit from `Controller` (that brings View support). +- Decorate with `[ApiController]` and `[Route("...")]` on the class. +- Add `[OpenApiTag("TagName")]` to group endpoints in the generated OpenAPI document. +- Inject `WebApi` and the relevant service interface via primary constructor. Guard with `.ThrowIfNull()`. +- Split read operations and write operations into separate controller classes (`ProductController` for mutations, `ProductReadController` for queries) following CQRS conventions. + +```csharp +[ApiController, Route("/api/products"), OpenApiTag("Products")] +public class ProductController(WebApi webApi, IProductService service) : ControllerBase +{ + private readonly WebApi _webApi = webApi.ThrowIfNull(); + private readonly IProductService _service = service.ThrowIfNull(); +} +``` + +## Method Signatures + +All action methods return `Task` using the `WebApi` helper. Do not return typed `ActionResult` directly. + +| HTTP Verb | WebApi helper | Notes | +|---|---|---| +| `GET` / `HEAD` | `_webApi.GetAsync(...)` | Use both attributes together | +| `POST` | `_webApi.PostAsync(...)` or `PostWithResultAsync` | Add `[IdempotencyKey]` for safe POST | +| `PUT` | `_webApi.PutAsync(...)` | Include ETag via `IF-MATCH` header | +| `PATCH` | `_webApi.PatchAsync(...)` | Requires `get:` and `put:` lambdas | +| `DELETE` | `_webApi.DeleteAsync(...)` | Returns 204 No Content | + +## Route Parameters + +Validate route parameters inline using `.Required()`: + +```csharp +[HttpGet("{id}"), HttpHead("{id}")] +public Task GetAsync(string id) => + _webApi.GetAsync(Request, (_, _) => _service.GetAsync(id.Required())); +``` + +## POST — Create with Location Header + +Use `ro.WithLocationUri(...)` to set the `Location` response header: + +```csharp +[HttpPost] +[Accepts] +[ProducesResponseType(201)] +[IdempotencyKey] +public Task PostAsync() => _webApi.PostAsync(Request, (ro, _) => +{ + ro.WithLocationUri(p => new Uri($"/api/products/{p.Id}", UriKind.Relative)); + return _service.CreateAsync(ro.Value); +}); +``` + +## PATCH — Merge-Patch + +Always supply both `get:` and `put:` delegates. PATCH merges the incoming patch document over the fetched entity and calls `put`: + +```csharp +[HttpPatch("{id}")] +[Accepts(HttpNames.MergePatchJsonMediaTypeName)] +public Task PatchAsync(string id) => _webApi.PatchAsync(Request, + get: (ro, _) => _service.GetAsync(id.Required()), + put: (ro, _) => _service.UpdateAsync(ro.Value.Adjust(p => p.Id = id))); +``` + +## Query Endpoints + +Expose `QueryArgs` and `PagingArgs` via `[Query]` and `[Paging]` action attributes. Access them via the request options object (`ro`): + +```csharp +[HttpGet] +[Query(supportsOrderBy: true), Paging(supportsCount: true)] +public Task QueryAsync() => + _webApi.GetAsync(Request, (ro, _) => _service.QueryAsync(ro.QueryArgs, ro.PagingArgs)); +``` + +## Reference Data Endpoints + +Delegate to `ReferenceDataOrchestrator.Current.GetWithFilterAsync()`. Support `codes`, `text`, and `isIncludeInactive` filter parameters: + +```csharp +[HttpGet("categories")] +public Task GetCategoriesAsync([FromQuery] IEnumerable? codes = default, string? text = default) + => _webApi.GetAsync(Request, (ro, ct) => ReferenceDataOrchestrator.Current.GetWithFilterAsync(codes, text, ro.IsIncludeInactive, ct)); +``` + +## Response Metadata Attributes + +Decorate actions with standard response metadata attributes: + +- `[ProducesResponseType(StatusCodes.Status201Created)]` +- `[ProducesNotFoundProblem()]` on GET/PUT/PATCH/DELETE where not-found is expected. +- `[Accepts]` to document the consumed media type. + +## Result-Based Services + +When the service returns `Result` (Shopping-style domain services), use the `PostWithResultAsync` / `GetWithResultAsync` variants: + +```csharp +[HttpPost("{basketId}/checkout")] +public Task CheckoutAsync(string basketId) => + _webApi.PostWithResultAsync(Request, (_, _) => + _service.CheckoutAsync(basketId.Required()), HttpStatusCode.OK); +``` diff --git a/.github/instructions/application-services.instructions.md b/.github/instructions/application-services.instructions.md new file mode 100644 index 00000000..85e064b2 --- /dev/null +++ b/.github/instructions/application-services.instructions.md @@ -0,0 +1,172 @@ +--- +applyTo: "**/Application/**/*.cs" +description: "Application service conventions: ScopedService registration, dependency injection, validation, unit of work patterns, and business logic structure" +tags: ["services", "application-layer", "dependency-injection", "validation", "unit-of-work"] +--- + +# Application Service Conventions + +## NuGet / Project References + +| Package | Key types provided | +|---|---| +| `CoreEx` | `[ScopedService]`, `IUnitOfWork`, `Runtime`, `NotFoundException`, `BusinessException`, `ValidationException`, `.ThrowIfNull()`, `.ThrowIfNullOrEmpty()` | +| `CoreEx.Data` | `DataResult`, `ItemsResult`, `QueryArgs`, `PagingArgs` | +| `CoreEx.Events` | `EventData`, `EventAction` | +| `CoreEx.Validation` | `Validator`, `.ValidateAndThrowAsync()`, `.ValidateWithResultAsync()` | +| `CoreEx.Results` | `Result`, `Result.GoAsync()`, `.ThenAs()`, `.ThenAsAsync()` | +| `CoreEx.RefData` | `ReferenceDataOrchestrator` | + +## Structure + +- Define a public interface (e.g., `IProductService`) in the Application project. +- Implement with `[ScopedService]` attribute so it registers itself via dynamic DI — no manual registration required. +- Inject dependencies via primary constructor and guard every injected parameter with `.ThrowIfNull()`. + +```csharp +[ScopedService] +public class ProductService(IUnitOfWork unitOfWork, IProductRepository repository) : IProductService +{ + private readonly IUnitOfWork _unitOfWork = unitOfWork.ThrowIfNull(); + private readonly IProductRepository _repository = repository.ThrowIfNull(); +} +``` + +## Guard Clauses + +Use CoreEx null/empty guards at the top of each method before any logic: + +```csharp +public async Task UpdateAsync(Product product) +{ + product.ThrowIfNull(); + product.Id.ThrowIfNullOrEmpty(); + // ... +} +``` + +## Validation + +Call the validator before any persistence operations. Throw on first error set: + +```csharp +await ProductValidator.Default.ValidateAndThrowAsync(product); +``` + +For `Result` style, use `ValidateWithResultAsync` and propagate with `ThenAs`: + +```csharp +var result = await Result.GoAsync(() => MyValidator.Default.ValidateWithResultAsync(value)); +if (result.IsFailure) return result.AsResult(); +``` + +## Not Found Handling + +After loading an entity, throw immediately if it does not exist: + +```csharp +var current = await _repository.GetAsync(id).ConfigureAwait(false); +NotFoundException.ThrowIfDefault(current); +``` + +## Business Rule Exceptions + +Use `BusinessException` for domain rule violations that are the caller's fault but are not validation errors: + +```csharp +if (!product.IsInactive) + throw new BusinessException("A product must first be deactivated before it can be deleted."); +``` + +## Unit of Work and Events + +Wrap all side-effectful database operations in `_unitOfWork.ExecuteAsync(...)`. Add integration events inside that scope so event and data writes are atomic: + +```csharp +return await _unitOfWork.ExecuteAsync(async () => +{ + var dr = await _repository.CreateAsync(product).ConfigureAwait(false); + return dr.WhereMutated(v => + _unitOfWork.Events.Add(EventData.CreateEventWith(v, EventAction.Created))); +}).ConfigureAwait(false); +``` + +- `WhereMutated(action)` — executes `action` only when the data result has a mutation; add the event inside this callback. +- `EventData.CreateEventWith(value, action)` — creates a typed event from the entity. +- `EventAction.Created`, `EventAction.Updated`, `EventAction.Deleted` — use the standard constants. + +For delete where the entity value is gone, carry the ID via `.WithKey(id)`: + +```csharp +_unitOfWork.Events.Add( + EventData.CreateEventWith(default, EventAction.Deleted).WithKey(id)); +``` + +## Result Style (Domain-Aggregate Services) + +For services operating on DDD aggregates (e.g., Shopping Basket), use `Result` chains instead of exceptions for expected failures. Compose with `Result.GoAsync`, `.ThenAs`, `.ThenAsAsync`: + +```csharp +public Task> CreateAsync(string customerId) +{ + var aggregate = Domain.Basket.CreateNew(customerId.ThrowIfNullOrEmpty()); + + return _unitOfWork.ExecuteAsync(async () => + { + var br = await _repository.CreateAsync(aggregate).ConfigureAwait(false); + return br.ThenAs(b => + { + var contract = BasketMapper.Map(b); + _unitOfWork.Events.Add(EventData.CreateEventWith(contract, EventAction.Created)); + return contract; + }); + }); +} +``` + +For multi-step orchestration with early exit: + +```csharp +var pr = await Result.GoAsync(() => SomeValidator.Default.ValidateWithResultAsync(input)) + .ThenAsAsync(v => _someAdapter.EnsureExistsAsync(v.Id!)); + +if (pr.IsFailure) + return pr.AsResult(); +``` + +## Read Services + +Split read operations into a separate service with an `IXxxReadService` interface when the project follows CQRS. Read services do not use UnitOfWork and do not publish events: + +```csharp +[ScopedService] +public class ProductReadService(IProductRepository repository) : IProductReadService +{ + private readonly IProductRepository _repository = repository.ThrowIfNull(); + + public Task GetAsync(string id) => _repository.GetAsync(id); + public Task> QueryAsync(QueryArgs? query, PagingArgs? paging) + => _repository.QueryAsync(query, paging); +} +``` + +## Anti-Corruption Layer (Adapters) + +When a service needs to call another domain's API, inject an adapter interface (e.g., `IProductAdapter`) rather than calling `HttpClient` directly. Implement the adapter in the Infrastructure layer using `ProductsHttpClient`: + +```csharp +// Application layer — interface only +public interface IProductAdapter +{ + Task GetAsync(string id); + Task ReserveInventoryAsync(MovementRequest request); +} + +// Infrastructure layer — implementation +[ScopedService] +public class ProductAdapter(ProductsHttpClient httpClient) : IProductAdapter { ... } +``` + +## ConfigureAwait + +Always call `.ConfigureAwait(false)` on every `await` inside service and repository methods. diff --git a/.github/instructions/contracts.instructions.md b/.github/instructions/contracts.instructions.md new file mode 100644 index 00000000..8b468745 --- /dev/null +++ b/.github/instructions/contracts.instructions.md @@ -0,0 +1,156 @@ +--- +applyTo: "**/Contracts/**/*.cs" +description: "Contract (DTO) conventions: source generation, marker attributes, reference data, ETag, and ChangeLog support" +tags: ["contracts", "dto", "source-generation", "reference-data", "etag"] +--- + +# Contract (DTO) Conventions + +## NuGet / Project References + +| Package | Key types provided | +|---|---| +| `CoreEx` | `[Contract]`, `IIdentifier`, `ICompositeKey`, `IETag`, `IChangeLog`, `ChangeLog`, `[ReadOnly]`, `[Localization]` | +| `CoreEx.RefData` | `ReferenceData`, `ReferenceDataCollection`, `[ReferenceData]`, `[ReferenceData]`, `ReferenceDataSortOrder` | +| `CoreEx.Gen` | Roslyn source generator — add as `OutputItemType="Analyzer" ReferenceOutputAssembly="false"` | + +```xml + + + + + +``` + +## Source Generation + +Mark contract classes with the `[Contract]` attribute and declare them `partial`. Roslyn source generation fills in serialization, equality, and change-tracking code. Never manually implement the generated members. + +```csharp +[Contract] +public partial class Product : ProductBase, IETag, IChangeLog { } +``` + +## Interfaces + +Implement the appropriate CoreEx marker interfaces depending on the entity's behavior: + +| Interface | When to use | +|---|---| +| `IIdentifier` | Entity has a single primary key | +| `ICompositeKey` | Entity has a multi-part key | +| `IETag` | Entity participates in optimistic concurrency / IF-MATCH | +| `IChangeLog` | Entity records created/updated audit metadata | + +All three are typically combined on mutable entities: + +```csharp +[Contract] +public partial class Product : ProductBase, IIdentifier, IETag, IChangeLog +{ + [ReadOnly(true)] + public string? Id { get; set; } + + [ReadOnly(true)] + public string? ETag { get; set; } + + [ReadOnly(true)] + public ChangeLog? ChangeLog { get; set; } +} +``` + +## ReadOnly Properties + +Decorate server-assigned properties with `[ReadOnly(true)]` to signal that clients cannot supply them. Common examples: `Id`, `ETag`, `ChangeLog`, `CategoryCode` (derived from SubCategory). + +## Reference Data Properties + +Use `[ReferenceData]` on code properties that back a reference data relationship. Declare the property `partial` so source generation can emit the navigation accessor: + +```csharp +[ReferenceData] +[Localization("Sub-category")] +public partial string? SubCategoryCode { get; set; } + +[ReferenceData] +[Localization("Unit-of-measure")] +public partial string? UnitOfMeasureCode { get; set; } +``` + +The generated code exposes a strongly-typed `SubCategory` property alongside the raw code. + +## Localization Labels + +Decorate properties with `[Localization("Human label")]` when the default property name would produce a poor validation error message: + +```csharp +[Localization("Sub-category")] +public partial string? SubCategoryCode { get; set; } +// Validation error: "Sub-category is required." (not "SubCategoryCode is required.") +``` + +## Inheritance for Shared Fields + +Extract shared fields into an abstract `XxxBase` class when multiple contracts share the same core properties. This keeps validation and mapping code DRY: + +```csharp +[Contract] +public abstract partial class ProductBase : IIdentifier +{ + public string? Id { get; set; } + public string? Sku { get; set; } + public string? Text { get; set; } + public decimal Price { get; set; } +} + +[Contract] +public partial class Product : ProductBase, IETag, IChangeLog { /* additions only */ } + +[Contract] +public partial class ProductLite : ProductBase { /* subset for list queries */ } +``` + +## Reference Data Contracts + +Reference data types inherit from `ReferenceData` and use `[ReferenceData]` attribute. Pair each type with a typed collection class: + +```csharp +[ReferenceData] +public partial class Category : ReferenceData { } + +public class CategoryCollection() : ReferenceDataCollection(ReferenceDataSortOrder.Code) { } +``` + +For reference data that carries additional fields (e.g., `UnitOfMeasure.Scale`), add those as plain properties and mark computed ones with `[JsonIgnore]`: + +```csharp +[ReferenceData] +public partial class UnitOfMeasure : ReferenceData +{ + public int Scale { get; init; } + + [JsonIgnore] + public int Precision => 16 - Scale; +} +``` + +## Casing Transformations + +Apply casing transforms in the property setter, not in the validator, when a field has a canonical form: + +```csharp +public string? Sku { get => field; set => field = value?.ToUpper(); } +``` + +## JsonIgnore + +Use `[JsonIgnore]` for computed or internal properties that must not appear in the API response or request body: + +```csharp +[JsonIgnore] +public bool IsQuantityValidForKind => KindCode switch { ... }; +``` + +## No Business Logic in Contracts + +Contracts are data transfer objects. Keep them free of domain rules, validation logic, and service calls. Computed helpers (like the `IsQuantityValidForKind` example above) are acceptable read-only shorthands but must not mutate state. diff --git a/.github/instructions/database-project.instructions.md b/.github/instructions/database-project.instructions.md new file mode 100644 index 00000000..8c932226 --- /dev/null +++ b/.github/instructions/database-project.instructions.md @@ -0,0 +1,90 @@ +--- +applyTo: "**/*.Database/**" +description: "Database project structure: migrations, DbEx YAML, reference data seeding, stored procedures, and outbox support" +tags: ["database", "migrations", "dbex", "reference-data", "outbox"] +--- + +# Database Project Conventions + +## NuGet / Project References + +| Package | Key types provided | +|---|---| +| `DbEx.SqlServer` | `SqlServerMigrationConsole`, migration host runner, YAML data parsing | +| `CoreEx.Database` | `SqlStatement` helpers, outbox integration support | + +## Project Shape + +Each domain database project must contain: + +- `Program.cs` with `ConfigureMigrationArgs`. +- `dbex.yaml` listing reference and transactional tables. +- `Migrations/` ordered SQL scripts. +- `Schema/Stored Procedures/` outbox relay procedures. +- `Data/ref-data.yaml` seed reference data. + +## Program.cs Pattern + +- Use `SqlServerMigrationConsole.Create(defaultConnectionString)`. +- Configure `.IncludeExtendedSchemaScripts()`. +- Add default ref-data columns: + - `SortOrder = 0`. + - `Scale = 0`. +- Set `DataResetFilterPredicate` to the domain schema only. + +```csharp +args.DataResetFilterPredicate = ts => ts.Schema == "{Domain}"; +``` + +## Migration Naming + +Use timestamp-prefixed, ordered scripts: + +- `20260101-000001-create-{domain}-schema.sql`. +- `20260101-000101-create-{domain}-.sql`. +- `20260101-000201-create-{domain}-.sql`. +- `20260101-000202-create-{domain}-.sql`. +- `20260101-000301-create-{domain}-outbox-tables.sql`. + +## SQL Conventions + +- Wrap each migration in `BEGIN TRANSACTION ... COMMIT TRANSACTION`. +- Use explicit schema-qualified names (`[{Domain}].[Table]`). +- Include `CreatedBy`, `CreatedOn`, `UpdatedBy`, `UpdatedOn` columns on aggregate and reference-data tables. +- Use `TIMESTAMP`/`ROWVERSION` for concurrency columns mapped to `ETag`. +- Add FK constraints for child tables. + +## Outbox Requirements + +Create both tables: + +- `[{Domain}].[Outbox]`. +- `[{Domain}].[OutboxLease]`. + +Create all required procedures: + +- `spOutboxEnqueue.g.sql`. +- `spOutboxLeaseAcquire.g.sql`. +- `spOutboxLeaseRelease.g.sql`. +- `spOutboxBatchClaim.g.sql`. +- `spOutboxBatchComplete.g.sql`. +- `spOutboxBatchCancel.g.sql`. + +Procedure naming and schema must match the domain schema and outbox publisher configuration in Infrastructure. + +## Data Seed Conventions + +- Keep reference data in `Data/ref-data.yaml`. +- Root node should be the schema/domain name. +- Use concise status/code values with clear text. +- Include required reference data used by validators. + +Example: + +```yaml +Orders: + - $^OrderStatus: + - P: Pending + - C: Confirmed + - X: Cancelled +``` diff --git a/.github/instructions/event-subscribers.instructions.md b/.github/instructions/event-subscribers.instructions.md new file mode 100644 index 00000000..83199423 --- /dev/null +++ b/.github/instructions/event-subscribers.instructions.md @@ -0,0 +1,122 @@ +--- +applyTo: "**/Subscribe/**/*.cs" +description: "Event subscriber conventions: SubscribedBase inheritance, Service Bus integration, error handling, and scoped service registration" +tags: ["subscribers", "messaging", "service-bus", "event-handling", "integration"] +--- + +# Event Subscriber Conventions + +## NuGet / Project References + +| Package | Key types provided | +|---|---| +| `CoreEx.Azure.Messaging.ServiceBus` | `SubscribedBase`, `[Subscribe(...)]`, `EventSubscriberArgs`, `ErrorHandler`, `ErrorHandling`, `ServiceBusSessionReceiverOptions`, `AzureServiceBusReceiving()`, `.WithSessionReceiver()`, `.WithSubscribedSubscriber()`, `.WithHostedService()` | +| `CoreEx.Events` | `EventData`, `EventData.Key`, `.ToData()` | +| `CoreEx.Results` | `Result`, `Result.Success` | +| `CoreEx` | `[ScopedService]`, `.ThrowIfNull()`, `.Required()` | + +## Structure + +- Subscriber classes inherit from `SubscribedBase`. +- Decorate with `[ScopedService]` and `[Subscribe("subject.pattern")]`. +- Inject service dependencies via constructor and guard with `.ThrowIfNull()`. +- Override `OnReceiveAsync` — return `Result.Success` on completion. + +```csharp +[ScopedService, Subscribe("contoso.products.reservation.confirm")] +public class ReservationConfirmSubscriber : SubscribedBase +{ + private readonly IMovementService _service; + + public ReservationConfirmSubscriber(IMovementService service) + { + _service = service.ThrowIfNull(); + } + + protected async override Task OnReceiveAsync( + EventData @event, + EventSubscriberArgs args, + CancellationToken cancellationToken = default) + { + var referenceId = @event.Key.Required(); + await _service.ConfirmReservationAsync(referenceId).ConfigureAwait(false); + return Result.Success; + } +} +``` + +## Subject Naming + +Use dot-separated lowercase subject strings in the format: + +``` +{solution}.{domain}.{entity}.{action} +``` + +Examples: +- `contoso.products.product.created.v1` +- `contoso.products.product.updated.v1` +- `contoso.products.reservation.confirm` +- `contoso.products.reservation.cancel` +- `contoso.shopping.basket.checkedout.v1` + +Versioned event subjects (published from the domain outbox) include `.v1`. Command subjects (point-to-point) do not include a version suffix. + +## Error Handling + +Define a static `ErrorHandler` when certain known exceptions should be swallowed or handled differently. Assign it to `this.ErrorHandler` in the constructor: + +```csharp +internal static readonly ErrorHandler DefaultErrorHandler = new ErrorHandler() + .Add(ex => + ex.ErrorCode == "pending-reservation-not-found" + ? ErrorHandling.CompleteAsInformation + : null); + +public ReservationConfirmSubscriber(IMovementService service) +{ + _service = service.ThrowIfNull(); + ErrorHandler = DefaultErrorHandler; +} +``` + +- `ErrorHandling.CompleteAsInformation` — consume the message without error; log as informational. +- `null` return — fall through to default error handling (retry / dead-letter). + +Share the same `ErrorHandler` instance across related subscribers (e.g., both Confirm and Cancel use the same handler). + +## Accessing Event Data + +Extract the key and optional data from `EventData`: + +```csharp +var referenceId = @event.Key.Required(); // Message key (partition/session key) +var data = @event.ToData(); // Deserialize typed payload +``` + +Use `.Required()` on the key to throw a descriptive error if it is missing rather than a null reference exception. + +## Service Bus Registration + +In `Program.cs`, register subscribers using `AddSubscribersUsing()` to discover all subscriber classes in the same assembly: + +```csharp +builder.Services.AddSubscribedManager((_, c) => c.AddSubscribersUsing()); + +builder.Services.AzureServiceBusReceiving() + .WithSessionReceiver(_ => + { + var o = ServiceBusSessionReceiverOptions.CreateForTopicSubscription(); + o.SessionProcessorOptions.MaxConcurrentSessions = 4; + return o; + }) + .WithSubscribedSubscriber() + .WithHostedService() + .Build(); +``` + +## Integration-Events Only + +- Subscribers react to integration events published over the broker. +- Do not use MediatR or in-process domain event dispatchers. +- Keep subscriber logic thin — delegate to the Application service layer; do not embed business logic directly in the subscriber. diff --git a/.github/instructions/host-setup.instructions.md b/.github/instructions/host-setup.instructions.md new file mode 100644 index 00000000..0e138e2f --- /dev/null +++ b/.github/instructions/host-setup.instructions.md @@ -0,0 +1,123 @@ +--- +applyTo: "**/Program.cs" +description: "Host setup conventions for Program.cs: API host, Subscribe host, middleware, service registration, and distributed caching" +tags: ["program-cs", "host-setup", "middleware", "dependency-registration", "caching"] +--- + +# Host Setup Conventions (Program.cs) + +## NuGet / Project References by Host Type + +### API Host + +| Package | Key registrations | +|---|---| +| `CoreEx.AspNetCore` | `AddMvcWebApi()`, `AddHttpWebApi()`, `AddExecutionContext()`, `UseCoreExExceptionHandler()`, `UseExecutionContext()`, `UseIdempotencyKey()`, `MapHealthChecks()` | +| `CoreEx.AspNetCore.NSwag` | `AddOpenApiDocument()`, `AddCoreExConfiguration()`, `UseOpenApi()`, `UseSwaggerUi()` | +| `CoreEx.Caching.FusionCache` | `AddFusionCache()`, `AddFusionHybridCache()`, `AddDefaultCacheKeyProvider()`, `AddHybridCacheIdempotencyProvider()` | +| `CoreEx.Database.SqlServer` | `AddSqlServerDatabase()`, `AddSqlServerUnitOfWork()`, `AddSqlServerOutboxPublisher()`, `AddSqlServerClient("SqlServer")` | +| `CoreEx.EntityFrameworkCore` | `AddDbContext()`, `AddEfDb()` | +| `CoreEx.Events` | `AddEventFormatter()` | +| `CoreEx.RefData` | `AddReferenceDataOrchestrator()` | +| `Aspire.StackExchange.Redis.DistributedCaching` | `AddRedisDistributedCache("redis")` | +| `FusionCache.Backplane.StackExchangeRedis` | `RedisBackplane`, `RedisBackplaneOptions` | +| `OpenTelemetry.*` | `WithCoreExTelemetry()`, `WithCoreExSqlServerTelemetry()`, `UseOtlpExporter()` | + +### Subscribe Host + +All of the above **plus**: + +| Package | Key registrations | +|---|---| +| `CoreEx.Azure.Messaging.ServiceBus` | `AddAzureServiceBusClient("ServiceBus")`, `AddSubscribedManager()`, `AzureServiceBusReceiving()`, `AddHostedServiceManager()`, `MapHostedServices()`, `WithCoreExServiceBusTelemetry()` | + +### Outbox Relay Host + +| Package | Key registrations | +|---|---| +| `CoreEx.AspNetCore` | `AddMvcWebApi()`, `AddHttpWebApi()`, `AddExecutionContext()`, `UseCoreExExceptionHandler()` | +| `CoreEx.Database.SqlServer` | `AddSqlServerDatabase()`, `AddSqlServerUnitOfWork()`, `AddSqlServerOutboxRelay()`, `AddSqlServerOutboxRelayHostedService()` | +| `CoreEx.Azure.Messaging.ServiceBus` | `AddAzureServiceBusClient()`, `AddAzureServiceBusPublisher()`, `ServiceBusSessionStrategy` | +| `OpenTelemetry.*` | `WithCoreExTelemetry()`, `WithCoreExSqlServerTelemetry()`, `WithCoreExServiceBusTelemetry()`, `UseOtlpExporter()` | + +--- + +There are three host types in a CoreEx solution. Each follows the same skeleton but adds type-specific registrations. + +--- + +## Shared Skeleton (All Host Types) + +```csharp +var builder = WebApplication.CreateBuilder(args); + +builder.AddHostSettings(); +builder.Services + .AddExecutionContext() + .AddMvcWebApi() + .AddHttpWebApi(); + +// ... type-specific registrations follow ... + +builder.Services.PostConfigureAllHealthChecks(); +builder.Services.AddControllers(); +builder.Services.AddOpenApiDocument(s => { + s.Title = builder.Environment.ApplicationName; + s.AddCoreExConfiguration(); +}); + +var app = builder.Build(); +app.UseCoreExExceptionHandler(); +app.UseHttpsRedirection(); +app.UseAuthorization(); +app.UseExecutionContext(); +app.MapControllers(); +app.UseOpenApi(); +app.UseSwaggerUi(); +app.MapHealthChecks(); +app.Run(); +``` + +--- + +## API Host + +Add: reference data, SQL Server, FusionCache, outbox publisher, idempotency. + +Key registrations: +- `.AddReferenceDataOrchestrator()` +- `.AddDynamicServicesUsing<...>()` +- `.AddFusionCache()` + `.WithRegisteredDistributedCache()` + `.WithBackplane(...)` +- `.AddSqlServerDatabase()` + `.AddSqlServerUnitOfWork()` + `.AddSqlServerOutboxPublisher()` +- `.AddEventFormatter()` +- Middleware: `.UseIdempotencyKey()` after `.UseExecutionContext()` + +--- + +## Subscribe Host + +All of API host **plus**: + +Key registrations: +- `.AddHostedServiceManager()` +- `.AddSubscribedManager((_, c) => c.AddSubscribersUsing())` +- `.AzureServiceBusReceiving()` → `.WithSessionReceiver(...)` → `.WithSubscribedSubscriber()` → `.WithHostedService()` → `.Build()` + +Middleware addition: +- `app.MapHostedServices()` (after `.MapHealthChecks()`) + +--- + +## Outbox Relay Host + +Minimal: SQL Server, Service Bus publisher, relay background service only. + +Key registrations: +- `.AddHostedServiceManager()` +- `.AddSqlServerOutboxRelay((_, c) => { ... })` +- `.AddSqlServerOutboxRelayHostedService()` +- `.AddAzureServiceBusPublisher((_, c) => { c.SessionIdStrategy = ...; })` + +No: reference data, FusionCache, idempotency, controllers, Swagger. + +Middleware: minimal (no `.MapControllers()`, no `.UseOpenApi()`). diff --git a/.github/instructions/repositories.instructions.md b/.github/instructions/repositories.instructions.md new file mode 100644 index 00000000..75e4b9d7 --- /dev/null +++ b/.github/instructions/repositories.instructions.md @@ -0,0 +1,162 @@ +--- +applyTo: "**/Infrastructure/**/*.cs" +description: "Repository and infrastructure conventions: EFCore, ADO.NET patterns, ScopedService registration, and data access layers" +tags: ["repositories", "infrastructure", "data-access", "efcore", "ado-net"] +--- + +# Repository & Infrastructure Conventions + +## NuGet / Project References + +| Package | Key types provided | +|---|---| +| `CoreEx` | `[ScopedService]`, `.ThrowIfNull()` | +| `CoreEx.EntityFrameworkCore` | `EfDb`, `EfDbSet`, `.GetAsync()`, `.CreateAsync()`, `.UpdateAsync()`, `.DeleteAsync()`, `.GetWithResultAsync()`, `.CreateWithResultAsync()`, `.UpdateWithResultAsync()` | +| `CoreEx.Database.SqlServer` | SQL Server outbox publisher, ADO.NET command/parameter helpers | +| `CoreEx.Data` | `DataResult`, `ItemsResult`, `QueryArgsConfig`, `QueryFilterOperator`, `.Where(parsed)`, `.OrderBy(parsed)`, `.ToMappedItemsResultAsync()` | +| `CoreEx.Results` | `Result`, `.GoAsync()`, `.ThenAs()`, `.ThenAsAsync()` | + +## Structure + +- Define the interface in the Application project under `Application/Repositories/`. +- Implement in the Infrastructure project. Register with `[ScopedService]` attribute. +- Inject the EF `*EfDb` (or ADO.NET database) via primary constructor and guard with `.ThrowIfNull()`. + +```csharp +[ScopedService] +public class ProductRepository(ProductsEfDb ef) : IProductRepository +{ + private readonly ProductsEfDb _ef = ef.ThrowIfNull(); +} +``` + +## Return Types + +| Operation | Return type | Notes | +|---|---|---| +| Single entity lookup | `Task` | Returns `null` when not found; service checks | +| Create / Update | `Task>` | Includes mutation flag for event decisions | +| Delete | `Task` | Carries mutation flag only | +| Collection query | `Task>` | Includes items + optional total count | +| Domain aggregate | `Task>` | Shopping-style — wraps `DataResult` with mapping | + +## Dynamic Query Configuration + +Define a `static readonly QueryArgsConfig _queryConfig` per repository for OData-style filtering and ordering. Build it once at class (not method) level: + +```csharp +private static readonly QueryArgsConfig _queryConfig = QueryArgsConfig.Create() + .WithFilter(filter => filter + .WithDefaultModelPrefix("Product") + .AddField(nameof(ProductBase.Sku), c => c + .WithOperators(QueryFilterOperator.EqualityOperators | QueryFilterOperator.StartsWith) + .AsUpperCase()) + .AddField(nameof(ProductBase.Text), c => c + .WithOperators(QueryFilterOperator.StringFunctions) + .AsUpperCase()) + .AddReferenceDataField(nameof(ProductBase.Category), "CategoryCode", + c => c.WithModelPrefix(null))) + .WithOrderBy(orderby => orderby + .WithDefaultModelPrefix("Product") + .AddField(nameof(ProductBase.Sku), c => c.WithDefault().WithAlwaysInclude()) + .AddField(nameof(ProductBase.Text)) + .AddField(nameof(ProductBase.Brand))); +``` + +Apply in the query method: + +```csharp +public async Task> QueryAsync(QueryArgs? query, PagingArgs? paging) +{ + var parsed = _queryConfig.Parse(query).ThrowOnError(); + + var products = _ef.Products.Model.Query(); + + return await products + .Where(parsed) + .OrderBy(parsed) + .ToMappedItemsResultAsync(x => new ProductLite + { + Id = x.Product.Id, + Sku = x.Product.Sku, + Text = x.Product.Text, + }, paging); +} +``` + +## EF Delegate Shortcuts + +Use the built-in EF delegate methods for single-entity CRUD — do not write raw `DbContext` queries for simple operations: + +```csharp +public Task GetAsync(string id) => _ef.Products.GetAsync(id); +public Task> CreateAsync(Product product) => _ef.Products.CreateAsync(product); +public Task> UpdateAsync(Product product) => _ef.Products.UpdateAsync(product); +public Task DeleteAsync(string id) => _ef.Products.DeleteAsync(id); +``` + +## Domain-Aggregate Repositories (Result Pattern) + +For Shopping-style aggregate repositories, chain `Result` operations using `.GoAsync` / `.ThenAs` / `.ThenAsAsync`. Map between persistence models and domain aggregates using explicit mappers: + +```csharp +public Task> GetAsync(string id) => Result + .GoAsync(() => _ef.Baskets.GetWithResultAsync(id)) + .ThenAs(model => BasketMapper.Map(model)); + +public Task> CreateAsync(Domain.Basket basket) => Result + .Go(() => + { + var model = new Persistence.Basket(); + BasketIntoMapper.MapInto(basket, model); + return SynchronizeItems(basket, model); + }) + .ThenAsAsync(model => _ef.Baskets.CreateWithResultAsync(model)) + .ThenAs(b => BasketMapper.Map(b)); +``` + +## Explicit Mapping — No AutoMapper + +Write explicit mapper classes or static methods. Do not introduce AutoMapper: + +```csharp +public static class BasketMapper +{ + public static Domain.Basket Map(Persistence.Basket model) + { + // explicit property assignment + } +} + +public static class BasketIntoMapper +{ + public static void MapInto(Domain.Basket src, Persistence.Basket dest) + { + dest.Id = src.Id; + dest.CustomerId = src.CustomerId; + // ... + } +} +``` + +## ConfigureAwait + +Always call `.ConfigureAwait(false)` on every awaited call inside repository methods. + +## HTTP Client Adapters + +Infrastructure adapters that wrap downstream APIs should use a typed `HttpClient` registered under a named key. The adapter interface lives in Application; the implementation lives in Infrastructure: + +```csharp +[ScopedService] +public class ProductAdapter(ProductsHttpClient httpClient) : IProductAdapter +{ + private readonly ProductsHttpClient _httpClient = httpClient.ThrowIfNull(); + + public Task GetAsync(string id) + => _httpClient.GetAsync($"api/products/{id}"); + + public Task ReserveInventoryAsync(MovementRequest request) + => _httpClient.PostAsync("api/inventory/reserve", request); +} +``` diff --git a/.github/instructions/tests.instructions.md b/.github/instructions/tests.instructions.md new file mode 100644 index 00000000..871b453e --- /dev/null +++ b/.github/instructions/tests.instructions.md @@ -0,0 +1,186 @@ +--- +applyTo: "**/*.Test*/**/*.cs" +description: "Test conventions: test project types (Api/Unit/Subscribe/Relay), base classes, one-time setup patterns, and assertion helpers" +tags: ["testing", "unit-tests", "integration-tests", "test-helpers", "nunit"] +--- + +# Test Conventions + +## NuGet / Project References + +| Package | Key types provided | +|---|---| +| `CoreEx.UnitTesting` | `WithApiTester`, `WithGenericTester`, `Test.Http()`, `Test.Http()`, `Test.MigrateSqlServerDataAsync()`, `Test.ClearFusionCacheAsync()`, `Test.UseExpectedSqlServerOutboxPublisher()`, `Test.UseExpectedAzureServiceBusPublisher()`, `Test.ReplaceHttpClientFactory()`, `.ExpectIdentifier()`, `.ExpectETag()`, `.ExpectChangeLogCreated()`, `.ExpectJsonFromResource()`, `.ExpectSqlServerOutboxEvents()`, `.ExpectNoSqlServerOutboxEvents()`, `.AssertCreated()`, `.AssertOK()`, `.AssertBadRequest()`, `.AssertErrors()`, `.AssertJsonFromResource()`, `.AssertLocationHeader()`, `Test.Scoped()` | +| `UnitTestEx` | `MockHttpClientFactory`, `MockHttpClientRequest`, `.WithJsonResourceBody()`, `.WithAnyBody()`, `.Respond.With()`, `.Respond.WithJsonResource()`, `.Verify()` | +| `NUnit` | `[TestFixture]`, `[Test]`, `[OneTimeSetUp]` | +| `AwesomeAssertions` | `.Should()`, `.Be()` | + +## Project Types + +| Project suffix | Base class | Scope | +|---|---|---| +| `*.Test.Api` | `WithApiTester` | Full integration — real DB, cache, events, HTTP | +| `*.Test.Unit` | `WithGenericTester` | Component/unit — isolated, no infrastructure | +| `*.Test.Subscribe` | `WithApiTester` | Integration over subscriber host | +| `*.Test.Outbox.Relay` | `WithApiTester` | Integration over relay host | + +## One-Time Setup + +Every integration test class must have a `[OneTimeSetUp]` method that runs once before the suite. Order of operations is fixed: + +1. Migrate + seed the database. +2. Clear the hybrid cache. +3. Set up event capture publishers. +4. Set up HTTP client mocks (where applicable). + +```csharp +[OneTimeSetUp] +public async Task OneTimeSetUpAsync() +{ + await Test.MigrateSqlServerDataAsync(DbMigration.ConfigureMigrationArgs).ConfigureAwait(false); + await Test.ClearFusionCacheAsync().ConfigureAwait(false); + + Test.UseExpectedSqlServerOutboxPublisher(); + Test.UseExpectedAzureServiceBusPublisher(); // Shopping only + + var mcf = MockHttpClientFactory.Create(); + _mockHttpReserveRequest = mcf.CreateClient("ProductsApi") + .Request(HttpMethod.Post, "api/inventory/reserve"); + Test.ReplaceHttpClientFactory(mcf); +} +``` + +## Test Data (data.yaml) + +Test data lives in `Data/data.yaml` in the `*.Test.Common` project. The `TestData` marker class in that project is the assembly locator — do not rename or move it. + +IDs are written as integers in the YAML file and resolved to GUIDs at load time via `n.ToGuid()`. Use the same helper in test code to reference those IDs: + +```csharp +var product = Test.Http() + .Run(HttpMethod.Get, $"/api/products/{1.ToGuid()}") + .AssertOK(); +``` + +## Fluent Test Pattern + +Always use the `Test.Http()` / `Test.Http()` fluent chain: + +1. **Set expectations** (before calling `.Run`). +2. **Execute** with `.Run(method, path, body?)`. +3. **Assert** the response. + +```csharp +// Simple GET +Test.Http() + .Run(HttpMethod.Get, $"/api/products/{1.ToGuid()}") + .AssertOK() + .AssertJsonFromResource("ReadTests.Product_Get_Found.res.json", "etag", "changelog"); + +// POST with event assertion +var created = Test.Http() + .ExpectIdentifier() + .ExpectETag() + .ExpectChangeLogCreated() + .ExpectJsonFromResource("ProductMutateTests.Create_Success.res.json") + .ExpectSqlServerOutboxEvents(e => e + .AssertWithValue("contoso", "contoso.products.product.created.v1")) + .Run(HttpMethod.Post, "/api/products", product) + .AssertCreated() + .AssertLocationHeader(r => new Uri($"/api/products/{r!.Id}", UriKind.Relative)) + .Value!; + +// Validation error +Test.Http() + .Run(HttpMethod.Post, "/api/products", invalidProduct) + .AssertBadRequest() + .AssertErrors("Text is required.", "Price must be greater than or equal to zero."); + +// Verify no events published +Test.Http() + .ExpectNoSqlServerOutboxEvents() + .Run(HttpMethod.Post, $"/api/baskets/{basketId}/checkout") + .AssertBadRequest(); +``` + +## Resource-Based JSON Assertions + +Expected response bodies are stored as `.res.json` files in `Resources/`. Reference them by their dot-separated path within the Resources folder. Exclude volatile fields (etag, changelog timestamps, traceId) by passing them as additional params: + +```csharp +.AssertJsonFromResource("ReadTests.Product_Get_Found.res.json", "etag", "changelog"); +.AssertJsonFromResource("Basket_Checkout_Insufficient_Quantity.products.res.json", "traceid"); +``` + +## HTTP Client Mocking + +Define the mock request field at class level and configure its response inside each test method. Always call `.Verify()` after the test action to confirm the mock was actually invoked: + +```csharp +// Class level +private MockHttpClientRequest _mockHttpReserveRequest = null!; + +// OneTimeSetUp +_mockHttpReserveRequest = mcf.CreateClient("ProductsApi") + .Request(HttpMethod.Post, "api/inventory/reserve"); + +// In test — success path +_mockHttpReserveRequest + .WithJsonResourceBody("Basket_Checkout_Success.products.req.json") + .Respond.With(HttpStatusCode.OK); + +// In test — error path +_mockHttpReserveRequest.WithAnyBody() + .Respond.WithJsonResource( + "Basket_Checkout_Insufficient_Quantity.products.res.json", + HttpStatusCode.BadRequest, + System.Net.Mime.MediaTypeNames.Application.ProblemJson); + +// After action +_mockHttpReserveRequest.Verify(); +``` + +## Event Publisher Expectations + +Use `ExpectSqlServerOutboxEvents` and `ExpectAzureServiceBusEvents` before `.Run` to assert that the operation produces the expected events: + +```csharp +.ExpectSqlServerOutboxEvents(e => e + .AssertWithValue("contoso", "contoso.products.product.created.v1")) +``` + +Use `ExpectNoSqlServerOutboxEvents()` when the operation must not produce any events (e.g., a failed checkout). + +## Unit Tests + +Unit tests use `Test.Scoped(test => { ... })` to get an isolated execution context: + +```csharp +[Test] +public void Empty_Required() => Test.Scoped(test => +{ + var p = new Product(); + new ProductValidator().AssertErrors(p, + ("sku", "Sku is required."), + ("text", "Text is required."), + ("subCategory", "Sub-category is required."), + ("unitOfMeasure", "Unit-of-measure is required.")); +}); +``` + +## NUnit Attributes + +Use `[TestFixture]` on the class (inherited from base when using `WithApiTester`) and `[Test]` on individual test methods. Do not use `[TestCase]` for integration tests — use separate named methods for clarity. + +## Naming Tests + +Name test methods as `{Entity}_{Action}_{Outcome}`: + +``` +Product_Get_Found +Product_Get_NotFound +Product_Create_Success +Product_Create_Bad_Data +Basket_Checkout_Success +Basket_Checkout_Insufficient_Quantity +``` diff --git a/.github/instructions/validators.instructions.md b/.github/instructions/validators.instructions.md new file mode 100644 index 00000000..fe050158 --- /dev/null +++ b/.github/instructions/validators.instructions.md @@ -0,0 +1,134 @@ +--- +applyTo: "**/*Validator*.cs" +description: "Validator conventions: fluent validation API, rule definition, singleton pattern, and CoreEx validation framework usage" +tags: ["validators", "validation", "fluent-api", "rules", "error-handling"] +--- + +# Validator Conventions + +## NuGet / Project References + +| Package | Key types provided | +|---|---| +| `CoreEx.Validation` | `Validator`, `Validator.Create()`, `.Mandatory()`, `.MaximumLength()`, `.IsValid()`, `.PrecisionScale()`, `.GreaterThanOrEqualTo()`, `.LessThanOrEqualTo()`, `.Equal()`, `.NotFound()`, `.WhenValue()`, `.Error()`, `.DependsOn()`, `.Entity()`, `.Dictionary()`, `ValidationContext`, `.ValidateFurtherAsync()`, `.ValidateAndThrowAsync()`, `.ValidateWithResultAsync()`, `.AssertErrors()` (test helper) | +| `CoreEx.Localization` | `[Localization(...)]` attribute on contract properties | + +## Base Class + +Use `Validator` from `CoreEx.Validation`. Expose a static `Default` singleton instance: + +```csharp +public class ProductValidator : Validator +{ + public ProductValidator() + { + Property(p => p.Sku).Mandatory().MaximumLength(50); + Property(p => p.Text).Mandatory().MaximumLength(250); + Property(p => p.SubCategory).Mandatory().IsValid(); + Property(p => p.UnitOfMeasure).Mandatory().IsValid(); + Property(p => p.Price).PrecisionScale(null, 2).GreaterThanOrEqualTo(0, _ => "zero"); + } +} +``` + +Do **not** use FluentValidation unless the project already depends on it. + +## Static Default Instance + +The `Validator` base provides a `Default` singleton. Call `ValidateAndThrowAsync` or `ValidateWithResultAsync` without instantiating manually: + +```csharp +// Exception style (services) +await ProductValidator.Default.ValidateAndThrowAsync(product); + +// Result style (domain-aggregate services) +var result = await ProductValidator.Default.ValidateWithResultAsync(product); +``` + +## Common Rules + +| Rule | Method | +|---|---| +| Required | `.Mandatory()` | +| Max string length | `.MaximumLength(n)` | +| Reference data validity | `.IsValid()` | +| Decimal precision | `.PrecisionScale(precision, scale)` | +| Greater than or equal to | `.GreaterThanOrEqualTo(value)` | +| Less than or equal to | `.LessThanOrEqualTo(value)` | +| Equals | `.Equal(value)` | +| Not found (for key lookup) | `.NotFound()` | +| Conditional rule | `.WhenValue(predicate)` | +| Custom error text | `.Error("message")` | + +## Reference Data Fields + +Use `.IsValid()` on `ReferenceData`-typed properties to validate that the code is a known active value: + +```csharp +Property(p => p.SubCategory).Mandatory().IsValid(); +Property(p => p.UnitOfMeasure).Mandatory().IsValid(); +``` + +## Nested / Collection Validators + +For entities with nested objects, create a separate `Validator.Create()` for the nested type and reference it via `.Entity(validator)` or `.Dictionary(...)`: + +```csharp +private static readonly Validator _productValidator = Validator.Create() + .HasProperty(x => x.UnitOfMeasure, c => c.Mandatory().IsValid()) + .HasProperty(x => x.Quantity, c => c.GreaterThanOrEqualTo(0).DependsOn(x => x.UnitOfMeasure)); + +// In parent validator: +Property(x => x.Products).Mandatory().Dictionary(c => c + .WithKeyValidator("Product", k => k.Mandatory().MaximumLength(50)) + .WithValueValidator(v => v.Mandatory().Entity(_productValidator))); +``` + +## Async Validation (Database Checks) + +Override `OnValidateAsync` for validators that need to query the database. Check `context.HasErrors` first to skip expensive async work if earlier rules already failed: + +```csharp +protected async override Task OnValidateAsync( + ValidationContext context, + CancellationToken cancellationToken) +{ + if (context.HasErrors) + return; + + var ids = context.Value.Products!.Keys.ToArray(); + var products = await _repository.GetForReservationAsync(ids).ConfigureAwait(false); + + await context.ValidateFurtherAsync(c => c + .HasProperty(x => x.Products, c => c.Dictionary(c => c + .WithKeyValidator("Product", k => k + .NotFound().WhenValue(v => !products.ContainsKey(v)) + .Error("{0} is non-stocked.").WhenValue(v => products[v].IsNonStocked)) + )), cancellationToken).ConfigureAwait(false); +} +``` + +## Localization Labels + +Property names in error messages use the property name by default. Override with `[Localization("...")]` on the contract property or pass a custom label into the rule: + +```csharp +// Contract +[Localization("Sub-category")] +public partial string? SubCategoryCode { get; set; } + +// Produces: "Sub-category is required." (not "SubCategoryCode is required.") +``` + +## DependsOn for Conditional Precision + +Use `.DependsOn(x => x.OtherProp)` to skip a rule when a dependent property is already invalid: + +```csharp +Property(x => x.Quantity, c => c + .GreaterThanOrEqualTo(0) + .PrecisionScale( + ctx => ctx.Entity.UnitOfMeasure!.Precision, + ctx => ctx.Entity.UnitOfMeasure!.Scale) + .DependsOn(x => x.UnitOfMeasure)); +``` diff --git a/.github/prompts/init.prompt.md b/.github/prompts/init.prompt.md new file mode 100644 index 00000000..7ca2644f --- /dev/null +++ b/.github/prompts/init.prompt.md @@ -0,0 +1,33 @@ +--- +agent: agent +tools: ['execute/runInTerminal', 'read', 'search', 'todo'] +--- +Run the repository initialization checklist. + +Goals: +- Verify required local dependencies for the samples: + - Podman. + - Podman Desktop. + - Podman Compose. + - .NET 10 SDK. +- If any dependency is missing, attempt installation automatically using `winget`. +- Re-run verification and report final status. + +Execution steps: +1. Verify dependencies using terminal commands: + - Podman: `podman --version`. + - Podman Compose: `podman compose version`. + - .NET 10 SDK: `dotnet --list-sdks` and confirm at least one SDK starts with `10.`. + - Podman Desktop: + - First try `winget list --id RedHat.Podman-Desktop --exact --accept-source-agreements`. + - If needed, also check for `Program Files\Podman Desktop\Podman Desktop.exe`. +2. If `winget` is available, install missing dependencies: + - Podman: `winget install --id RedHat.Podman --exact --accept-package-agreements --accept-source-agreements`. + - Podman Desktop: `winget install --id RedHat.Podman-Desktop --exact --accept-package-agreements --accept-source-agreements`. + - .NET 10 SDK: `winget install --id Microsoft.DotNet.SDK.10 --exact --accept-package-agreements --accept-source-agreements`. +3. If Podman is present but Compose is missing, run a Podman upgrade: + - `winget upgrade --id RedHat.Podman --exact --accept-package-agreements --accept-source-agreements`. +4. Re-run all verification checks. +5. Summarize what was installed and what still requires manual intervention. + +If `winget` is unavailable, report that manual install is required and list the dependency names exactly. diff --git a/.github/prompts/scaffold-domain-from-templates.prompt.md b/.github/prompts/scaffold-domain-from-templates.prompt.md new file mode 100644 index 00000000..ea75eb4a --- /dev/null +++ b/.github/prompts/scaffold-domain-from-templates.prompt.md @@ -0,0 +1,141 @@ +--- +agent: agent +tools: ['create', 'read', 'search', 'todo'] +description: "Fast-path domain scaffolding: clone and materialize the canonical templates in .github/templates/domain/ with placeholder substitution. Use when you want exact template output with no creative generation — entity fields match the template shape exactly. For custom entity fields or reasoning about your domain model, use /generate-domain instead." +--- + +Scaffold a new CoreEx domain by cloning and materializing files from `/.github/templates/domain/**`. + +## Purpose + +Use this prompt when: +- **Speed is the priority** and the entity fields match the template shape (Id, ETag, ChangeLog, one status ref-data, optional child entity). +- You want **exact, deterministic output** — every file is copied verbatim from the templates with placeholder substitution, no reasoning or generation. +- You do not need the agent to inspect existing sample source code. + +Use the `/generate-domain` skill instead when: +- Your entity has **custom fields, types, or business rules** that go beyond what the templates express. +- You want the agent to **reason about your domain model** and apply conventions (validation rules, event naming, query config) appropriately. +- You are unsure which operations or patterns to include and want guided scaffolding. + +## Inputs Required + +If not supplied, ask for: + +1. `Solution` (e.g. `Contoso`). +2. `Domain` (e.g. `Orders`). +3. `Entity` (e.g. `Order`). +4. `ChildEntity` (e.g. `OrderItem`). +5. `targetRoot` (default: `samples/src`). +6. `testsRoot` (default: `samples/tests`). + +## Naming Helper (Auto-Derive) + +Derive naming values from `Entity` unless the user explicitly overrides them: + +- `EntityPlural` = English plural form of `Entity`. + - Default rule: append `s`. + - If ends with `y` preceded by a consonant: replace `y` with `ies`. + - If ends with `s`, `x`, `z`, `ch`, `sh`: append `es`. + - Preserve casing (e.g. `Order` -> `Orders`, `Category` -> `Categories`). +- `entityKebab` = kebab-case of `Entity`. +- `entityPluralKebab` = kebab-case of `EntityPlural`. +- `EntityPluralVar` = `EntityPlural` unless overridden. + +Example: + +- `Entity = Order` -> `EntityPlural = Orders`, `entityKebab = order`, `entityPluralKebab = orders`. +- `Entity = Category` -> `EntityPlural = Categories`, `entityKebab = category`, `entityPluralKebab = categories`. + +## Placeholders to Replace + +For every template file, replace all placeholders: + +- `{Solution}` +- `{Domain}` +- `{Entity}` +- `{ChildEntity}` +- `{EntityPlural}` +- `{EntityPluralKebab}` where present +- `{entityKebab}` +- `{entityPluralKebab}` +- `{EntityPlural}` in class/type names +- `{EntityPlural}` / `{EntityPluralVar}` in repository/EfDb property names + +If `EntityPluralVar` is not supplied, default to `{EntityPlural}`. + +## Output Projects + +Create these projects under `{targetRoot}`: + +- `{Solution}.{Domain}.Contracts` +- `{Solution}.{Domain}.Application` +- `{Solution}.{Domain}.Infrastructure` +- `{Solution}.{Domain}.Api` +- `{Solution}.{Domain}.Database` + +Create these test projects under `{testsRoot}`: + +- `{Solution}.{Domain}.Test.Unit` +- `{Solution}.{Domain}.Test.Api` + +## Materialization Rules + +1. Copy each `.template` file into the corresponding project location. +2. Remove `.template` suffix from output files. +3. Rename `Domain.*.csproj.template` to `{Solution}.{Domain}.*.csproj`. +4. Rename `Entity*` files to use concrete entity names. +5. Keep folder structure identical to template tree. +6. Preserve line endings and indentation. + +## Required Post-Generation Adjustments + +After template materialization: + +1. In API controllers: +- Ensure routes use concrete kebab-case paths. +- Verify OpenApi tags use `{EntityPlural}`. + +2. In Database seed data: +- If status model is used, ensure `Pending`, `Confirmed`, `Cancelled` values are present unless the caller supplied alternatives. + +3. In Infrastructure repository: +- Ensure EfDb mapped model property uses concrete plural entity name. + +4. In Program files: +- Ensure namespaces match generated project names. + +5. In test projects: +- Ensure test namespaces and project names match `{Solution}.{Domain}.Test.Unit` and `{Solution}.{Domain}.Test.Api`. +- Ensure Unit tests follow `WithGenericTester` patterns. +- Ensure Api tests follow `WithApiTester<{Solution}.{Domain}.Api.Program>` patterns. +- Ensure assertions use AwesomeAssertions (not FluentAssertions). + +6. In solution structure: +- Add all generated domain and test projects to the Visual Studio solution. +- Group all generated domain and test projects under a solution folder named `{Domain}`. + +## Validation + +Run `dotnet build` for all generated projects to check for compilation errors: + +- `{targetRoot}/{Solution}.{Domain}.Contracts` +- `{targetRoot}/{Solution}.{Domain}.Application` +- `{targetRoot}/{Solution}.{Domain}.Infrastructure` +- `{targetRoot}/{Solution}.{Domain}.Api` +- `{targetRoot}/{Solution}.{Domain}.Database` +- `{testsRoot}/{Solution}.{Domain}.Test.Unit` +- `{testsRoot}/{Solution}.{Domain}.Test.Api` + +Run tests and ensure they pass: + +- `dotnet test {testsRoot}/{Solution}.{Domain}.Test.Unit` +- `dotnet test {testsRoot}/{Solution}.{Domain}.Test.Api` + +If errors are found, fix them before completing. + +If tests fail, fix the generated code/tests and rerun until both Unit and Api test projects pass. + +## Completion Gate + +Use `/.github/templates/domain/DomainScaffold.checklist.md` as the final acceptance checklist. Do not finish until all applicable items are satisfied. diff --git a/.github/prompts/setup.prompt.md b/.github/prompts/setup.prompt.md new file mode 100644 index 00000000..7c23fd2f --- /dev/null +++ b/.github/prompts/setup.prompt.md @@ -0,0 +1,33 @@ +--- +agent: agent +description: Get my development workspace ready +tools: ['browser', 'execute/runInTerminal', 'read', 'search', 'todo'] +--- + +Goals: +- Start the docker-compose dependencies for Aspire only if not already running. +- Start the Aspire host without the debugger so all sample services start. +- Run the Contoso E2E runner to validate behavior. + +## checklist + +- [ ] Start the docker-compose dependencies for Aspire if not already running (which is at root of repo): + - `podman compose -f docker-compose.yml up -d` + - Wait for all containers to report healthy status by polling `podman ps` output. + - If any container fails to start or becomes unhealthy, capture key log lines with `podman logs` and report failure with remediation suggestions. + +- [ ] Start Aspire without debugger in a dedicated terminal: + - `dotnet run --project samples/aspire/Contoso.Aspire` + - Keep this terminal running and do not await for any user input + - Wait for readiness by polling output until: + - startup/readiness messages indicate services are running, and + - no fatal startup exception is present. + - If readiness is not reached within a reasonable timeout, report failure with key log lines. + +- [ ] Run the Contoso E2E runner to validate behavior: + - `dotnet run --project samples/tests/Contoso.E2E.Runner` + - Wait for all scenarios to complete. + - If any scenario fails, capture the failure output and report with remediation suggestions. + +Failure handling: +- If any command fails, capture the key error lines and include a concise remediation suggestion. \ No newline at end of file diff --git a/.github/skills/acquire-codebase-knowledge/SKILL.md b/.github/skills/acquire-codebase-knowledge/SKILL.md new file mode 100644 index 00000000..5ac5289f --- /dev/null +++ b/.github/skills/acquire-codebase-knowledge/SKILL.md @@ -0,0 +1,175 @@ +--- +name: acquire-codebase-knowledge +description: 'Use this skill when the user explicitly asks to map, document, or onboard into an existing codebase. Trigger for prompts like "map this codebase", "document this architecture", "onboard me to this repo", or "create codebase docs". Do not trigger for routine feature implementation, bug fixes, or narrow code edits unless the user asks for repository-level discovery.' +license: MIT +compatibility: 'Cross-platform. Requires Python 3.8+ and git. Run scripts/scan.py from the target project root.' +metadata: + version: "1.3" + enhancements: + - Multi-language manifest detection (25+ languages supported) + - CI/CD pipeline detection (10+ platforms) + - Container & orchestration detection + - Code metrics by language + - Security & compliance config detection + - Performance testing markers +argument-hint: 'Optional: specific area to focus on, e.g. "architecture only", "testing and concerns"' +tags: ["codebase", "documentation", "onboarding", "discovery", "architecture"] +--- + +# Acquire Codebase Knowledge + +Produces seven populated documents in `docs/codebase/` covering everything needed to work effectively on the project. Only document what is verifiable from files or terminal output — never infer or assume. + +## Output Contract (Required) + +Before finishing, all of the following must be true: + +1. Exactly these files exist in `docs/codebase/`: `STACK.md`, `STRUCTURE.md`, `ARCHITECTURE.md`, `CONVENTIONS.md`, `INTEGRATIONS.md`, `TESTING.md`, `CONCERNS.md`. +2. Every claim is traceable to source files, config, or terminal output. +3. Unknowns are marked as `[TODO]`; intent-dependent decisions are marked `[ASK USER]`. +4. Every document includes a short "evidence" list with concrete file paths. +5. Final response includes numbered `[ASK USER]` questions and intent-vs-reality divergences. + +## Workflow + +Copy and track this checklist: + +``` +- [ ] Phase 1: Run scan, read intent documents +- [ ] Phase 2: Investigate each documentation area +- [ ] Phase 3: Populate all seven docs in docs/codebase/ +- [ ] Phase 4: Validate docs, present findings, resolve all [ASK USER] items +``` + +## Focus Area Mode + +If the user supplies a focus area (for example: "architecture only" or "testing and concerns"): + +1. Always run Phase 1 in full. +2. Fully complete focus-area documents first. +3. For non-focus documents not yet analyzed, keep required sections present and mark unknowns as `[TODO]`. +4. Still run the Phase 4 validation loop on all seven documents before final output. + +### Phase 1: Scan and Read Intent + +1. Run the scan script from the target project root: + ```bash + python3 "$SKILL_ROOT/scripts/scan.py" --output docs/codebase/.codebase-scan.txt + ``` + Where `$SKILL_ROOT` is the absolute path to the skill folder. Works on Windows, macOS, and Linux. + + **Quick start:** If you have the path inline: + ```bash + python3 /absolute/path/to/skills/acquire-codebase-knowledge/scripts/scan.py --output docs/codebase/.codebase-scan.txt + ``` + +2. Search for `PRD`, `TRD`, `README`, `ROADMAP`, `SPEC`, `DESIGN` files and read them. +3. Summarise the stated project intent before reading any source code. + +### Phase 2: Investigate + +Use the scan output to answer questions for each of the seven templates. Load [`references/inquiry-checkpoints.md`](references/inquiry-checkpoints.md) for the full per-template question list. + +If the stack is ambiguous (multiple manifest files, unfamiliar file types, no `package.json`), load [`references/stack-detection.md`](references/stack-detection.md). + +### Phase 3: Populate Templates + +Copy each template from `assets/templates/` into `docs/codebase/`. Fill in this order: + +1. [STACK.md](assets/templates/STACK.md) — language, runtime, frameworks, all dependencies +2. [STRUCTURE.md](assets/templates/STRUCTURE.md) — directory layout, entry points, key files +3. [ARCHITECTURE.md](assets/templates/ARCHITECTURE.md) — layers, patterns, data flow +4. [CONVENTIONS.md](assets/templates/CONVENTIONS.md) — naming, formatting, error handling, imports +5. [INTEGRATIONS.md](assets/templates/INTEGRATIONS.md) — external APIs, databases, auth, monitoring +6. [TESTING.md](assets/templates/TESTING.md) — frameworks, file organization, mocking strategy +7. [CONCERNS.md](assets/templates/CONCERNS.md) — tech debt, bugs, security risks, perf bottlenecks + +Use `[TODO]` for anything that cannot be determined from code. Use `[ASK USER]` where the right answer requires team intent. + +### Phase 4: Validate, Repair, Verify + +Run this mandatory validation loop before finalizing: + +1. Validate each doc against `references/inquiry-checkpoints.md`. +2. For each non-trivial claim, confirm at least one evidence reference exists. +3. If any required section is missing or unsupported: + - Fix the document. + - Re-run validation. +4. Repeat until all seven docs pass. + +Then present a summary of all seven documents, list every `[ASK USER]` item as a numbered question, and highlight any Intent vs. Reality divergences from Phase 1. + +Validation pass criteria: + +- No unsupported claims. +- No empty required sections. +- Unknowns use `[TODO]` rather than assumptions. +- Team-intent gaps are explicitly marked `[ASK USER]`. + +--- + +## Gotchas + +**Monorepos:** Root `package.json` may have no source — check for `workspaces`, `packages/`, or `apps/` directories. Each workspace may have independent dependencies and conventions. Map each sub-package separately. + +**Outdated README:** README often describes intended architecture, not the current one. Cross-reference with actual file structure before treating any README claim as fact. + +**TypeScript path aliases:** `tsconfig.json` `paths` config means imports like `@/foo` don't map directly to the filesystem. Map aliases to real paths before documenting structure. + +**Generated/compiled output:** Never document patterns from `dist/`, `build/`, `generated/`, `.next/`, `out/`, or `__pycache__/`. These are artefacts — document source conventions only. + +**`.env.example` reveals required config:** Secrets are never committed. Read `.env.example`, `.env.template`, or `.env.sample` to discover required environment variables. + +**`devDependencies` ≠ production stack:** Only `dependencies` (or equivalent, e.g. `[tool.poetry.dependencies]`) runs in production. Document linters, formatters, and test frameworks separately as dev tooling. + +**Test TODOs ≠ production debt:** TODOs inside `test/`, `tests/`, `__tests__/`, or `spec/` are coverage gaps, not production technical debt. Separate them in `CONCERNS.md`. + +**High-churn files = fragile areas:** Files appearing most in recent git history have the highest modification rate and likely hidden complexity. Always note them in `CONCERNS.md`. + +--- + +## Anti-Patterns + +| ❌ Don't | ✅ Do instead | +|---------|--------------| +| "Uses Clean Architecture with Domain/Data layers." (when no such directories exist) | State only what directory structure actually shows. | +| "This is a Next.js project." (without checking `package.json`) | Check `dependencies` first. State what's actually there. | +| Guess the database from a variable name like `dbUrl` | Check manifest for `pg`, `mysql2`, `mongoose`, `prisma`, etc. | +| Document `dist/` or `build/` naming patterns as conventions | Source files only. | + +--- + +## Enhanced Scan Output Sections + +The `scan.py` script now produce the following sections in addition to the original output: + +- **CODE METRICS** — Total files, lines of code by language, largest files (complexity signals) +- **CI/CD PIPELINES** — Detected GitHub Actions, GitLab CI, Jenkins, CircleCI, etc. +- **CONTAINERS & ORCHESTRATION** — Docker, Docker Compose, Kubernetes, Vagrant configs +- **SECURITY & COMPLIANCE** — Snyk, Dependabot, SECURITY.md, SBOM, security policies +- **PERFORMANCE & TESTING** — Benchmark configs, profiling markers, load testing tools + +Use these sections during Phase 2 to inform investigation questions and identify tool-specific patterns. + +--- + +## Bundled Assets + +| Asset | When to load | +|-------|-------------| +| [`scripts/scan.py`](scripts/scan.py) | Phase 1 — run first, before reading any code (Python 3.8+ required) | + +| [`references/inquiry-checkpoints.md`](references/inquiry-checkpoints.md) | Phase 2 — load for per-template investigation questions | +| [`references/stack-detection.md`](references/stack-detection.md) | Phase 2 — only if stack is ambiguous | +| [`assets/templates/STACK.md`](assets/templates/STACK.md) | Phase 3 step 1 | +| [`assets/templates/STRUCTURE.md`](assets/templates/STRUCTURE.md) | Phase 3 step 2 | +| [`assets/templates/ARCHITECTURE.md`](assets/templates/ARCHITECTURE.md) | Phase 3 step 3 | +| [`assets/templates/CONVENTIONS.md`](assets/templates/CONVENTIONS.md) | Phase 3 step 4 | +| [`assets/templates/INTEGRATIONS.md`](assets/templates/INTEGRATIONS.md) | Phase 3 step 5 | +| [`assets/templates/TESTING.md`](assets/templates/TESTING.md) | Phase 3 step 6 | +| [`assets/templates/CONCERNS.md`](assets/templates/CONCERNS.md) | Phase 3 step 7 | + +Template usage mode: + +- Default mode: complete only the "Core Sections (Required)" in each template. +- Extended mode: add optional sections only when the repo complexity justifies them. diff --git a/.github/skills/acquire-codebase-knowledge/assets/templates/ARCHITECTURE.md b/.github/skills/acquire-codebase-knowledge/assets/templates/ARCHITECTURE.md new file mode 100644 index 00000000..26f575e2 --- /dev/null +++ b/.github/skills/acquire-codebase-knowledge/assets/templates/ARCHITECTURE.md @@ -0,0 +1,49 @@ +# Architecture + +## Core Sections (Required) + +### 1) Architectural Style + +- Primary style: [layered/feature/event-driven/other] +- Why this classification: [short evidence-backed rationale] +- Primary constraints: [2-3 constraints that shape design] + +### 2) System Flow + +```text +[entry] -> [processing] -> [domain logic] -> [data/integration] -> [response/output] +``` + +Describe the flow in 4-6 steps using file-backed evidence. + +### 3) Layer/Module Responsibilities + +| Layer or module | Owns | Must not own | Evidence | +|-----------------|------|--------------|----------| +| [name] | [responsibility] | [non-responsibility] | [file] | + +### 4) Reused Patterns + +| Pattern | Where found | Why it exists | +|---------|-------------|---------------| +| [singleton/repository/adapter/etc] | [path] | [reason] | + +### 5) Known Architectural Risks + +- [Risk 1 + impact] +- [Risk 2 + impact] + +### 6) Evidence + +- [path/to/entrypoint] +- [path/to/main-layer-files] +- [path/to/data-or-integration-layer] + +## Extended Sections (Optional) + +Add only when needed: + +- Startup or initialization order details +- Async/event topology diagrams +- Anti-pattern catalog with refactoring paths +- Failure-mode analysis and resilience posture diff --git a/.github/skills/acquire-codebase-knowledge/assets/templates/CONCERNS.md b/.github/skills/acquire-codebase-knowledge/assets/templates/CONCERNS.md new file mode 100644 index 00000000..d41e13ab --- /dev/null +++ b/.github/skills/acquire-codebase-knowledge/assets/templates/CONCERNS.md @@ -0,0 +1,56 @@ +# Codebase Concerns + +## Core Sections (Required) + +### 1) Top Risks (Prioritized) + +| Severity | Concern | Evidence | Impact | Suggested action | +|----------|---------|----------|--------|------------------| +| [high/med/low] | [issue] | [file or scan output] | [impact] | [next action] | + +### 2) Technical Debt + +List the most important debt items only. + +| Debt item | Why it exists | Where | Risk if ignored | Suggested fix | +|-----------|---------------|-------|-----------------|---------------| +| [item] | [reason] | [path] | [risk] | [fix] | + +### 3) Security Concerns + +| Risk | OWASP category (if applicable) | Evidence | Current mitigation | Gap | +|------|--------------------------------|----------|--------------------|-----| +| [risk] | [A01/A03/etc or N/A] | [path] | [what exists] | [what is missing] | + +### 4) Performance and Scaling Concerns + +| Concern | Evidence | Current symptom | Scaling risk | Suggested improvement | +|---------|----------|-----------------|-------------|-----------------------| +| [issue] | [path/metric] | [symptom] | [risk] | [action] | + +### 5) Fragile/High-Churn Areas + +| Area | Why fragile | Churn signal | Safe change strategy | +|------|-------------|-------------|----------------------| +| [path] | [reason] | [recent churn evidence] | [approach] | + +### 6) `[ASK USER]` Questions + +Add unresolved intent-dependent questions as a numbered list. + +1. [ASK USER] [question] + +### 7) Evidence + +- [scan output section reference] +- [path/to/code-file] +- [path/to/config-or-history-evidence] + +## Extended Sections (Optional) + +Add only when needed: + +- Full bug inventory +- Component-level remediation roadmap +- Cost/effort estimates by concern +- Dependency-risk and ownership mapping diff --git a/.github/skills/acquire-codebase-knowledge/assets/templates/CONVENTIONS.md b/.github/skills/acquire-codebase-knowledge/assets/templates/CONVENTIONS.md new file mode 100644 index 00000000..5a29453c --- /dev/null +++ b/.github/skills/acquire-codebase-knowledge/assets/templates/CONVENTIONS.md @@ -0,0 +1,52 @@ +# Coding Conventions + +## Core Sections (Required) + +### 1) Naming Rules + +| Item | Rule | Example | Evidence | +|------|------|---------|----------| +| Files | [RULE] | [EXAMPLE] | [FILE] | +| Functions/methods | [RULE] | [EXAMPLE] | [FILE] | +| Types/interfaces | [RULE] | [EXAMPLE] | [FILE] | +| Constants/env vars | [RULE] | [EXAMPLE] | [FILE] | + +### 2) Formatting and Linting + +- Formatter: [TOOL + CONFIG FILE] +- Linter: [TOOL + CONFIG FILE] +- Most relevant enforced rules: [RULE_1], [RULE_2], [RULE_3] +- Run commands: [COMMANDS] + +### 3) Import and Module Conventions + +- Import grouping/order: [RULE] +- Alias vs relative import policy: [RULE] +- Public exports/barrel policy: [RULE] + +### 4) Error and Logging Conventions + +- Error strategy by layer: [SHORT SUMMARY] +- Logging style and required context fields: [SUMMARY] +- Sensitive-data redaction rules: [SUMMARY] + +### 5) Testing Conventions + +- Test file naming/location rule: [RULE] +- Mocking strategy norm: [RULE] +- Coverage expectation: [RULE or TODO] + +### 6) Evidence + +- [path/to/lint-config] +- [path/to/format-config] +- [path/to/representative-source-file] + +## Extended Sections (Optional) + +Add only for large or inconsistent codebases: + +- Layer-specific error handling matrix +- Language-specific strictness options +- Repo-specific commit/branching conventions +- Known convention violations to clean up diff --git a/.github/skills/acquire-codebase-knowledge/assets/templates/INTEGRATIONS.md b/.github/skills/acquire-codebase-knowledge/assets/templates/INTEGRATIONS.md new file mode 100644 index 00000000..f62039ff --- /dev/null +++ b/.github/skills/acquire-codebase-knowledge/assets/templates/INTEGRATIONS.md @@ -0,0 +1,48 @@ +# External Integrations + +## Core Sections (Required) + +### 1) Integration Inventory + +| System | Type (API/DB/Queue/etc) | Purpose | Auth model | Criticality | Evidence | +|--------|---------------------------|---------|------------|-------------|----------| +| [name] | [type] | [purpose] | [auth] | [high/med/low] | [file] | + +### 2) Data Stores + +| Store | Role | Access layer | Key risk | Evidence | +|-------|------|--------------|----------|----------| +| [db/cache/etc] | [role] | [module] | [risk] | [file] | + +### 3) Secrets and Credentials Handling + +- Credential sources: [env/secrets manager/config] +- Hardcoding checks: [result] +- Rotation or lifecycle notes: [known/unknown] + +### 4) Reliability and Failure Behavior + +- Retry/backoff behavior: [implemented/none/partial] +- Timeout policy: [where configured] +- Circuit-breaker or fallback behavior: [if any] + +### 5) Observability for Integrations + +- Logging around external calls: [yes/no + where] +- Metrics/tracing coverage: [yes/no + where] +- Missing visibility gaps: [list] + +### 6) Evidence + +- [path/to/integration-wrapper] +- [path/to/config-or-env-template] +- [path/to/monitoring-or-logging-config] + +## Extended Sections (Optional) + +Add only when needed: + +- Endpoint-by-endpoint catalog +- Auth flow sequence diagrams +- SLA/SLO per integration +- Region/failover topology notes diff --git a/.github/skills/acquire-codebase-knowledge/assets/templates/STACK.md b/.github/skills/acquire-codebase-knowledge/assets/templates/STACK.md new file mode 100644 index 00000000..2520677c --- /dev/null +++ b/.github/skills/acquire-codebase-knowledge/assets/templates/STACK.md @@ -0,0 +1,56 @@ +# Technology Stack + +## Core Sections (Required) + +### 1) Runtime Summary + +| Area | Value | Evidence | +|------|-------|----------| +| Primary language | [VALUE] | [FILE_PATH] | +| Runtime + version | [VALUE] | [FILE_PATH] | +| Package manager | [VALUE] | [FILE_PATH] | +| Module/build system | [VALUE] | [FILE_PATH] | + +### 2) Production Frameworks and Dependencies + +List only high-impact production dependencies (frameworks, data, transport, auth). + +| Dependency | Version | Role in system | Evidence | +|------------|---------|----------------|----------| +| [NAME] | [VERSION] | [ROLE] | [FILE_PATH] | + +### 3) Development Toolchain + +| Tool | Purpose | Evidence | +|------|---------|----------| +| [TOOL] | [LINT/FORMAT/TEST/BUILD] | [FILE_PATH] | + +### 4) Key Commands + +```bash +[install command] +[build command] +[test command] +[lint command] +``` + +### 5) Environment and Config + +- Config sources: [LIST FILES] +- Required env vars: [VAR_1], [VAR_2], [TODO] +- Deployment/runtime constraints: [SHORT NOTE] + +### 6) Evidence + +- [path/to/manifest] +- [path/to/runtime-config] +- [path/to/build-or-ci-config] + +## Extended Sections (Optional) + +Add only when needed for complex repos: + +- Full dependency taxonomy by category +- Detailed compiler/runtime flags +- Environment matrix (dev/stage/prod) +- Process manager and container runtime details diff --git a/.github/skills/acquire-codebase-knowledge/assets/templates/STRUCTURE.md b/.github/skills/acquire-codebase-knowledge/assets/templates/STRUCTURE.md new file mode 100644 index 00000000..89e9c28f --- /dev/null +++ b/.github/skills/acquire-codebase-knowledge/assets/templates/STRUCTURE.md @@ -0,0 +1,44 @@ +# Codebase Structure + +## Core Sections (Required) + +### 1) Top-Level Map + +List only meaningful top-level directories and files. + +| Path | Purpose | Evidence | +|------|---------|----------| +| [path/] | [purpose] | [source] | + +### 2) Entry Points + +- Main runtime entry: [FILE] +- Secondary entry points (worker/cli/jobs): [FILES or NONE] +- How entry is selected (script/config): [NOTE] + +### 3) Module Boundaries + +| Boundary | What belongs here | What must not be here | +|----------|-------------------|------------------------| +| [module/layer] | [responsibility] | [forbidden logic] | + +### 4) Naming and Organization Rules + +- File naming pattern: [kebab/camel/Pascal + examples] +- Directory organization pattern: [feature/layer/domain] +- Import aliasing or path conventions: [RULE] + +### 5) Evidence + +- [path/to/root-tree-source] +- [path/to/entry-config] +- [path/to/key-module] + +## Extended Sections (Optional) + +Add only when repository complexity requires it: + +- Subdirectory deep maps by feature/layer +- Middleware/boot order details +- Generated-vs-source layout boundaries +- Monorepo workspace-level structure maps diff --git a/.github/skills/acquire-codebase-knowledge/assets/templates/TESTING.md b/.github/skills/acquire-codebase-knowledge/assets/templates/TESTING.md new file mode 100644 index 00000000..8e0e7028 --- /dev/null +++ b/.github/skills/acquire-codebase-knowledge/assets/templates/TESTING.md @@ -0,0 +1,57 @@ +# Testing Patterns + +## Core Sections (Required) + +### 1) Test Stack and Commands + +- Primary test framework: [NAME + VERSION] +- Assertion/mocking tools: [TOOLS] +- Commands: + +```bash +[run all tests] +[run unit tests] +[run integration/e2e tests] +[run coverage] +``` + +### 2) Test Layout + +- Test file placement pattern: [co-located/tests folder/etc] +- Naming convention: [pattern] +- Setup files and where they run: [paths] + +### 3) Test Scope Matrix + +| Scope | Covered? | Typical target | Notes | +|-------|----------|----------------|-------| +| Unit | [yes/no] | [modules/services] | [notes] | +| Integration | [yes/no] | [API/data boundaries] | [notes] | +| E2E | [yes/no] | [user flows] | [notes] | + +### 4) Mocking and Isolation Strategy + +- Main mocking approach: [module/class/network] +- Isolation guarantees: [what is reset and when] +- Common failure mode in tests: [short note] + +### 5) Coverage and Quality Signals + +- Coverage tool + threshold: [value or TODO] +- Current reported coverage: [value or TODO] +- Known gaps/flaky areas: [list] + +### 6) Evidence + +- [path/to/test-config] +- [path/to/representative-test-file] +- [path/to/ci-or-coverage-config] + +## Extended Sections (Optional) + +Add only when needed: + +- Framework-specific suite patterns +- Detailed mock recipes per dependency type +- Historical flaky test catalog +- Test performance bottlenecks and optimization ideas diff --git a/.github/skills/acquire-codebase-knowledge/references/inquiry-checkpoints.md b/.github/skills/acquire-codebase-knowledge/references/inquiry-checkpoints.md new file mode 100644 index 00000000..02430e76 --- /dev/null +++ b/.github/skills/acquire-codebase-knowledge/references/inquiry-checkpoints.md @@ -0,0 +1,70 @@ +# Inquiry Checkpoints + +Per-template investigation questions for Phase 2 of the acquire-codebase-knowledge workflow. For each template area, look for answers in the scan output first, then read source files to fill gaps. + +--- + +## 1. STACK.md — Tech Stack + +- What is the primary language and exact version? (check `.nvmrc`, `go.mod`, `pyproject.toml`, Docker `FROM` line) +- What package manager is used? (`npm`, `yarn`, `pnpm`, `go mod`, `pip`, `uv`) +- What are the core runtime frameworks? (web server, ORM, DI container) +- What do `dependencies` (production) vs `devDependencies` (dev tooling) contain? +- Is there a Docker image and what base image does it use? +- What are the key scripts in `package.json` / `Makefile` / `pyproject.toml`? + +## 2. STRUCTURE.md — Directory Layout + +- Where does source code live? (usually `src/`, `lib/`, or project root for Go) +- What are the entry points? (check `main` in `package.json`, `scripts.start`, `cmd/main.go`, `app.py`) +- What is the stated purpose of each top-level directory? +- Are there non-obvious directories (e.g., `eng/`, `platform/`, `infra/`)? +- Are there hidden config directories (`.github/`, `.vscode/`, `.husky/`)? +- What naming conventions do directories follow? (camelCase, kebab-case, domain-based vs layer-based) + +## 3. ARCHITECTURE.md — Patterns + +- Is the code organized by layer (controllers → services → repos) or by feature? +- What is the primary data flow? Trace one request or command from entry to data store. +- Are there singletons, dependency injection patterns, or explicit initialization order requirements? +- Are there background workers, queues, or event-driven components? +- What design patterns appear repeatedly? (Factory, Repository, Decorator, Strategy) + +## 4. CONVENTIONS.md — Coding Standards + +- What is the file naming convention? (check 10+ files — camelCase, kebab-case, PascalCase) +- What is the function and variable naming convention? +- Are private methods/fields prefixed (e.g., `_methodName`, `#field`)? +- What linter and formatter are configured? (check `.eslintrc`, `.prettierrc`, `golangci.yml`) +- What are the TypeScript strictness settings? (`strict`, `noImplicitAny`, etc.) +- How are errors handled at each layer? (throw vs. return structured error) +- What logging library is used and what is the log message format? +- How are imports organized? (barrel exports, path aliases, grouping rules) + +## 5. INTEGRATIONS.md — External Services + +- What external APIs are called? (search for `axios.`, `fetch(`, `http.Get(`, base URLs in constants) +- How are credentials stored and accessed? (`.env`, secrets manager, env vars) +- What databases are connected? (check manifest for `pg`, `mongoose`, `prisma`, `typeorm`, `sqlalchemy`) +- Is there an API gateway, service mesh, or proxy between the app and external services? +- What monitoring or observability tools are used? (APM, Prometheus, logging pipeline) +- Are there message queues or event buses? (Kafka, RabbitMQ, SQS, Pub/Sub) + +## 6. TESTING.md — Test Setup + +- What test runner is configured? (check `scripts.test` in `package.json`, `pytest.ini`, `go test`) +- Where are test files located? (alongside source, in `tests/`, in `__tests__/`) +- What assertion library is used? (Jest expect, Chai, pytest assert) +- How are external dependencies mocked? (jest.mock, dependency injection, fixtures) +- Are there integration tests that hit real services vs. unit tests with mocks? +- Is there a coverage threshold enforced? (check `jest.config.js`, `.nycrc`, `pyproject.toml`) + +## 7. CONCERNS.md — Known Issues + +- How many TODOs/FIXMEs/HACKs are in production code? (see scan output) +- Which files have the highest git churn in the last 90 days? (see scan output) +- Are there any files over 500 lines that mix multiple responsibilities? +- Do any services make sequential calls that could be parallelized? +- Are there hardcoded values (URLs, IDs, magic numbers) that should be config? +- What security risks exist? (missing input validation, raw error messages exposed to clients, missing auth checks) +- Are there performance patterns that don't scale? (N+1 queries, in-memory caches in multi-instance setups) diff --git a/.github/skills/acquire-codebase-knowledge/references/stack-detection.md b/.github/skills/acquire-codebase-knowledge/references/stack-detection.md new file mode 100644 index 00000000..01ccfd7d --- /dev/null +++ b/.github/skills/acquire-codebase-knowledge/references/stack-detection.md @@ -0,0 +1,131 @@ +# Stack Detection Reference + +Load this file when the tech stack is ambiguous — e.g., multiple manifest files present, unfamiliar file extensions, or no obvious `package.json` / `go.mod`. + +--- + +## Manifest File → Ecosystem + +| File | Ecosystem | Key fields to read | +|------|-----------|--------------------| +| `package.json` | Node.js / JavaScript / TypeScript | `dependencies`, `devDependencies`, `scripts`, `main`, `type`, `engines` | +| `go.mod` | Go | Module path, Go version, `require` block | +| `requirements.txt` | Python (pip) | Package list with pinned versions | +| `Pipfile` | Python (pipenv) | `[packages]`, `[dev-packages]`, `[requires]` python version | +| `pyproject.toml` | Python (poetry / uv / hatch) | `[tool.poetry.dependencies]`, `[project]`, `[build-system]` | +| `setup.py` / `setup.cfg` | Python (setuptools, legacy) | `install_requires`, `python_requires` | +| `Cargo.toml` | Rust | `[dependencies]`, `[[bin]]`, `[lib]` | +| `pom.xml` | Java / Kotlin (Maven) | ``, ``, ``, `` | +| `build.gradle` / `build.gradle.kts` | Java / Kotlin (Gradle) | `dependencies {}`, `sourceCompatibility` | +| `composer.json` | PHP | `require`, `require-dev` | +| `Gemfile` | Ruby | `gem` declarations, `ruby` version constraint | +| `mix.exs` | Elixir | `deps/0`, `elixir: "~> X.Y"` | +| `pubspec.yaml` | Dart / Flutter | `dependencies`, `dev_dependencies`, `environment.sdk` | +| `*.csproj` | .NET / C# | ``, `` | +| `*.sln` | .NET solution | References multiple `.csproj` projects | +| `deno.json` / `deno.jsonc` | Deno (TypeScript runtime) | `imports`, `tasks` | +| `bun.lockb` | Bun (JavaScript runtime) | Binary lockfile — check `package.json` for deps | + +--- + +## Language Runtime Version Detection + +| Language | Where to find the version | +|----------|--------------------------| +| Node.js | `.nvmrc`, `.node-version`, `engines.node` in `package.json`, Docker `FROM node:X` | +| Python | `.python-version`, `pyproject.toml [requires-python]`, Docker `FROM python:X` | +| Go | First line of `go.mod` (`go 1.21`) | +| Java | `` in `pom.xml`, `sourceCompatibility` in `build.gradle`, Docker `FROM eclipse-temurin:X` | +| Ruby | `.ruby-version`, `Gemfile` `ruby 'X.Y.Z'` | +| Rust | `rust-toolchain.toml`, `rust-toolchain` file | +| .NET | `` in `.csproj` (e.g., `net8.0`) | + +--- + +## Framework Detection (Node.js / TypeScript) + +| Dependency in `package.json` | Framework | +|-----------------------------|-----------| +| `express` | Express.js (minimal HTTP server) | +| `fastify` | Fastify (high-performance HTTP server) | +| `next` | Next.js (SSR/SSG React — check for `pages/` or `app/` directory) | +| `nuxt` | Nuxt.js (SSR/SSG Vue) | +| `@nestjs/core` | NestJS (opinionated Node.js framework with DI) | +| `koa` | Koa (middleware-focused, no built-in router) | +| `@hapi/hapi` | Hapi | +| `@trpc/server` | tRPC (type-safe API without REST/GraphQL schemas) | +| `routing-controllers` | routing-controllers (decorator-based Express wrapper) | +| `typeorm` | TypeORM (SQL ORM with decorators) | +| `prisma` | Prisma (type-safe ORM, check `prisma/schema.prisma`) | +| `mongoose` | Mongoose (MongoDB ODM) | +| `sequelize` | Sequelize (SQL ORM) | +| `drizzle-orm` | Drizzle (lightweight SQL ORM) | +| `react` without `next` | Vanilla React SPA (check for `react-router-dom`) | +| `vue` without `nuxt` | Vanilla Vue SPA | + +--- + +## Framework Detection (Python) + +| Package | Framework | +|---------|-----------| +| `fastapi` | FastAPI (async REST, auto OpenAPI docs) | +| `flask` | Flask (minimal WSGI web framework) | +| `django` | Django (batteries-included, check `settings.py`) | +| `starlette` | Starlette (ASGI, often used as FastAPI base) | +| `aiohttp` | aiohttp (async HTTP client and server) | +| `sqlalchemy` | SQLAlchemy (SQL ORM; check for `alembic` migrations) | +| `alembic` | Alembic (SQLAlchemy migration tool) | +| `pydantic` | Pydantic (data validation; core to FastAPI) | +| `celery` | Celery (distributed task queue) | + +--- + +## Monorepo Detection + +Check these signals in order: + +1. `pnpm-workspace.yaml` — pnpm workspaces +2. `lerna.json` — Lerna monorepo +3. `nx.json` — Nx monorepo (also check `workspace.json`) +4. `turbo.json` — Turborepo +5. `rush.json` — Rush (Microsoft monorepo manager) +6. `moon.yml` — Moon +7. `package.json` with `"workspaces": [...]` — npm/yarn workspaces +8. Presence of `packages/`, `apps/`, `libs/`, or `services/` directories with their own `package.json` + +If monorepo is detected: each workspace may have **independent** dependencies and conventions. Map each sub-package separately in `STACK.md` and note the monorepo structure in `STRUCTURE.md`. + +--- + +## TypeScript Path Alias Detection + +If `tsconfig.json` has a `paths` key, imports with non-relative prefixes are aliases. Map them before documenting structure. + +```json +// tsconfig.json example +"paths": { + "@/*": ["./src/*"], + "@components/*": ["./src/components/*"], + "@utils/*": ["./src/utils/*"] +} +``` + +Imports like `import { foo } from '@/utils/bar'` resolve to `src/utils/bar`. Document as `src/utils/bar`, not `@/utils/bar`. + +--- + +## Docker Base Image → Runtime + +If no manifest file is present but a `Dockerfile` exists, the `FROM` line reveals the runtime: + +| FROM line pattern | Runtime | +|------------------|---------| +| `FROM node:X` | Node.js X | +| `FROM python:X` | Python X | +| `FROM golang:X` | Go X | +| `FROM eclipse-temurin:X` | Java X (Eclipse Temurin JDK) | +| `FROM mcr.microsoft.com/dotnet/aspnet:X` | .NET X | +| `FROM ruby:X` | Ruby X | +| `FROM rust:X` | Rust X | +| `FROM alpine` (alone) | Check what's installed via `RUN apk add` | diff --git a/.github/skills/acquire-codebase-knowledge/scripts/scan.py b/.github/skills/acquire-codebase-knowledge/scripts/scan.py new file mode 100644 index 00000000..15e17a28 --- /dev/null +++ b/.github/skills/acquire-codebase-knowledge/scripts/scan.py @@ -0,0 +1,712 @@ +#!/usr/bin/env python3 +""" +scan.py — Collect project discovery information for the acquire-codebase-knowledge skill. +Run from the project root directory. + +Usage: python3 scan.py [OPTIONS] + +Options: + --output FILE Write output to FILE instead of stdout + --help Show this message and exit + +Exit codes: + 0 Success + 1 Usage error +""" + +import os +import sys +import argparse +import subprocess +import json +from pathlib import Path +from typing import List, Set +import re + +TREE_LIMIT = 200 +TREE_MAX_DEPTH = 3 +TODO_LIMIT = 60 +MANIFEST_PREVIEW_LINES = 80 +RECENT_COMMITS_LIMIT = 20 +CHURN_LIMIT = 20 + +EXCLUDE_DIRS = { + "node_modules", ".git", "dist", "build", "out", ".next", ".nuxt", + "__pycache__", ".venv", "venv", ".tox", "target", "vendor", + "coverage", ".nyc_output", "generated", ".cache", ".turbo", + ".yarn", ".pnp", "bin", "obj" +} + +MANIFESTS = [ + # JavaScript/Node.js + "package.json", "package-lock.json", "yarn.lock", "pnpm-lock.yaml", "bun.lockb", + "deno.json", "deno.jsonc", + # Python + "requirements.txt", "Pipfile", "Pipfile.lock", "pyproject.toml", "setup.py", "setup.cfg", + "poetry.lock", "pdm.lock", "uv.lock", + # Go + "go.mod", "go.sum", + # Rust + "Cargo.toml", "Cargo.lock", + # Java/Kotlin + "pom.xml", "build.gradle", "build.gradle.kts", "settings.gradle", "settings.gradle.kts", + "gradle.properties", + # PHP/Composer + "composer.json", "composer.lock", + # Ruby + "Gemfile", "Gemfile.lock", "*.gemspec", + # Elixir + "mix.exs", "mix.lock", + # Dart/Flutter + "pubspec.yaml", "pubspec.lock", + # .NET/C# + "*.csproj", "*.sln", "*.slnx", "global.json", "packages.config", + # Swift + "Package.swift", "Package.resolved", + # Scala + "build.sbt", "scala-cli.yml", + # Haskell + "*.cabal", "stack.yaml", "cabal.project", "cabal.project.local", + # OCaml + "dune-project", "opam", "opam.lock", + # Nim + "*.nimble", "nim.cfg", + # Crystal + "shard.yml", "shard.lock", + # R + "DESCRIPTION", "renv.lock", + # Julia + "Project.toml", "Manifest.toml", + # Build systems + "CMakeLists.txt", "Makefile", "GNUmakefile", + "SConstruct", "build.xml", + "BUILD", "BUILD.bazel", "WORKSPACE", "bazel.lock", + "justfile", ".justfile", "Taskfile.yml", + "tox.ini", "Vagrantfile" +] + +ENTRY_CANDIDATES = [ + # JavaScript/Node.js/TypeScript + "src/index.ts", "src/index.js", "src/index.mjs", + "src/main.ts", "src/main.js", "src/main.py", + "src/app.ts", "src/app.js", + "src/server.ts", "src/server.js", + "index.ts", "index.js", "app.ts", "app.js", + "lib/index.ts", "lib/index.js", + # Go + "main.go", "cmd/main.go", "cmd/*/main.go", + # Python + "main.py", "app.py", "server.py", "run.py", "cli.py", + "src/main.py", "src/__main__.py", + # .NET/C# + "Program.cs", "src/Program.cs", "Main.cs", + # Java + "Main.java", "Application.java", "App.java", + "src/main/java/Main.java", + # Kotlin + "Main.kt", "Application.kt", "App.kt", + # Rust + "src/main.rs", "src/lib.rs", + # Swift + "main.swift", "Package.swift", "Sources/main.swift", + # Ruby + "app.rb", "main.rb", "lib/app.rb", + # PHP + "index.php", "app.php", "public/index.php", + # Go + "cmd/*/main.go", + # Scala + "src/main/scala/Main.scala", + # Haskell + "Main.hs", "app/Main.hs", + # Clojure + "src/core.clj", "-main.clj", + # Elixir + "lib/application.ex", "mix.exs", +] + +LINT_FILES = [ + ".eslintrc", ".eslintrc.json", ".eslintrc.js", ".eslintrc.cjs", ".eslintrc.yml", ".eslintrc.yaml", + "eslint.config.js", "eslint.config.mjs", "eslint.config.cjs", + ".prettierrc", ".prettierrc.json", ".prettierrc.js", ".prettierrc.yml", + "prettier.config.js", "prettier.config.mjs", + ".editorconfig", + "tsconfig.json", "tsconfig.base.json", "tsconfig.build.json", + ".golangci.yml", ".golangci.yaml", + "setup.cfg", ".flake8", ".pylintrc", "mypy.ini", + ".rubocop.yml", "phpcs.xml", "phpstan.neon", + "biome.json", "biome.jsonc" +] + +ENV_TEMPLATES = [".env.example", ".env.template", ".env.sample", ".env.defaults", ".env.local.example"] + +SOURCE_EXTS = [ + "ts", "tsx", "js", "jsx", "mjs", "cjs", + "py", "go", "java", "kt", "rb", "php", + "rs", "cs", "cpp", "c", "h", "ex", "exs", + "swift", "scala", "clj", "cljs", "lua", + "vim", "vim", "hs", "ml", "ml", "nim", "cr", + "r", "jl", "groovy", "gradle", "xml", "json" +] + +MONOREPO_FILES = ["pnpm-workspace.yaml", "lerna.json", "nx.json", "rush.json", "turbo.json", "moon.yml"] +MONOREPO_DIRS = ["packages", "apps", "libs", "services", "modules"] + +CI_CD_CONFIGS = { + ".github/workflows": "GitHub Actions", + ".gitlab-ci.yml": "GitLab CI", + "Jenkinsfile": "Jenkins", + ".circleci/config.yml": "CircleCI", + ".travis.yml": "Travis CI", + "azure-pipelines.yml": "Azure Pipelines", + "appveyor.yml": "AppVeyor", + ".drone.yml": "Drone CI", + ".woodpecker.yml": "Woodpecker CI", + "bitbucket-pipelines.yml": "Bitbucket Pipelines" +} + +CONTAINER_FILES = [ + "Dockerfile", "docker-compose.yml", "docker-compose.yaml", + ".dockerignore", "Dockerfile.*", + "k8s", "kustomization.yaml", "Chart.yaml", + "Vagrantfile", "podman-compose.yml" +] + +SECURITY_CONFIGS = [ + ".snyk", "security.txt", "SECURITY.md", + ".dependabot.yml", ".whitesource", + "sbom.json", "sbom.spdx", ".bandit.yaml" +] + +PERFORMANCE_MARKERS = [ + "benchmark", "bench", "perf.data", ".prof", + "k6.js", "locustfile.py", "jmeter.jmx" +] + + +def parse_args(): + """Parse command-line arguments.""" + parser = argparse.ArgumentParser( + description="Scan the current directory (project root) and output discovery information " + "for the acquire-codebase-knowledge skill.", + add_help=True + ) + parser.add_argument( + "--output", + type=str, + help="Write output to FILE instead of stdout" + ) + return parser.parse_args() + + +def should_exclude(path: Path) -> bool: + """Check if a path should be excluded from scanning.""" + return any(part in EXCLUDE_DIRS for part in path.parts) + + +def get_directory_tree(max_depth: int = TREE_MAX_DEPTH) -> List[str]: + """Get directory tree up to max_depth.""" + files = [] + + def walk(path: Path, depth: int): + if depth > max_depth or should_exclude(path): + return + try: + for item in sorted(path.iterdir()): + if should_exclude(item): + continue + rel_path = item.relative_to(Path.cwd()) + files.append(str(rel_path)) + if item.is_dir(): + walk(item, depth + 1) + except (PermissionError, OSError): + pass + + walk(Path.cwd(), 0) + return files[:TREE_LIMIT] + + +def find_manifest_files() -> List[str]: + """Find manifest files matching patterns.""" + found = [] + for pattern in MANIFESTS: + if "*" in pattern: + # Handle glob patterns + for path in Path.cwd().glob(pattern): + if path.is_file() and not should_exclude(path): + found.append(path.name) + else: + path = Path.cwd() / pattern + if path.is_file(): + found.append(pattern) + return sorted(set(found)) + + +def read_file_preview(filepath: Path, max_lines: int = MANIFEST_PREVIEW_LINES) -> str: + """Read file with line limit.""" + try: + with open(filepath, 'r', encoding='utf-8', errors='replace') as f: + lines = f.readlines() + + if not lines: + return "None found." + + preview = ''.join(lines[:max_lines]) + if len(lines) > max_lines: + preview += f"\n[TRUNCATED] Showing first {max_lines} of {len(lines)} lines." + return preview + except Exception as e: + return f"[Error reading file: {e}]" + + +def find_entry_points() -> List[str]: + """Find entry point candidates.""" + found = [] + for candidate in ENTRY_CANDIDATES: + if Path(candidate).exists(): + found.append(candidate) + return found + + +def find_lint_config() -> List[str]: + """Find linting and formatting config files.""" + found = [] + for filename in LINT_FILES: + if Path(filename).exists(): + found.append(filename) + return found + + +def find_env_templates() -> List[tuple]: + """Find environment variable templates.""" + found = [] + for filename in ENV_TEMPLATES: + path = Path(filename) + if path.exists(): + found.append((filename, path)) + return found + + +def search_todos() -> List[str]: + """Search for TODO/FIXME/HACK comments.""" + todos = [] + patterns = ["TODO", "FIXME", "HACK"] + exclude_dirs_str = "|".join(EXCLUDE_DIRS | {"test", "tests", "__tests__", "spec", "__mocks__", "fixtures"}) + + try: + for root, dirs, files in os.walk(Path.cwd()): + # Remove excluded directories from dirs to prevent os.walk from descending + dirs[:] = [d for d in dirs if d not in EXCLUDE_DIRS and d not in {"test", "tests", "__tests__", "spec", "__mocks__", "fixtures"}] + + for file in files: + # Check file extension + ext = Path(file).suffix.lstrip('.') + if ext not in SOURCE_EXTS: + continue + + filepath = Path(root) / file + try: + with open(filepath, 'r', encoding='utf-8', errors='replace') as f: + for line_num, line in enumerate(f, 1): + for pattern in patterns: + if pattern in line: + rel_path = filepath.relative_to(Path.cwd()) + todos.append(f"{rel_path}:{line_num}: {line.strip()}") + except Exception: + pass + except Exception: + pass + + return todos[:TODO_LIMIT] + + +def get_git_commits() -> List[str]: + """Get recent git commits.""" + try: + result = subprocess.run( + ["git", "log", "--oneline", "-n", str(RECENT_COMMITS_LIMIT)], + capture_output=True, + text=True, + cwd=Path.cwd() + ) + if result.returncode == 0: + return result.stdout.strip().split('\n') if result.stdout.strip() else [] + return [] + except Exception: + return [] + + +def get_git_churn() -> List[str]: + """Get high-churn files from last 90 days.""" + try: + result = subprocess.run( + ["git", "log", "--since=90 days ago", "--name-only", "--pretty=format:"], + capture_output=True, + text=True, + cwd=Path.cwd() + ) + if result.returncode == 0: + files = [f.strip() for f in result.stdout.split('\n') if f.strip()] + # Count occurrences + from collections import Counter + counts = Counter(files) + churn = sorted(counts.items(), key=lambda x: x[1], reverse=True) + return [f"{count:4d} {filename}" for filename, count in churn[:CHURN_LIMIT]] + return [] + except Exception: + return [] + + +def is_git_repo() -> bool: + """Check if current directory is a git repository.""" + try: + subprocess.run( + ["git", "rev-parse", "--git-dir"], + capture_output=True, + cwd=Path.cwd(), + timeout=2 + ) + return True + except Exception: + return False + + +def detect_monorepo() -> List[str]: + """Detect monorepo signals.""" + signals = [] + + for filename in MONOREPO_FILES: + if Path(filename).exists(): + signals.append(f"Monorepo tool detected: {filename}") + + for dirname in MONOREPO_DIRS: + if Path(dirname).is_dir(): + signals.append(f"Sub-package directory found: {dirname}/") + + # Check package.json workspaces + if Path("package.json").exists(): + try: + with open("package.json", 'r') as f: + content = f.read() + if '"workspaces"' in content: + signals.append("package.json has 'workspaces' field (npm/yarn workspaces monorepo)") + except Exception: + pass + + return signals + + +def detect_ci_cd_pipelines() -> List[str]: + """Detect CI/CD pipeline configurations.""" + pipelines = [] + + for config_path, pipeline_name in CI_CD_CONFIGS.items(): + path = Path(config_path) + if path.is_file(): + pipelines.append(f"CI/CD: {pipeline_name}") + elif path.is_dir(): + # Check for workflow files in directory + try: + if list(path.glob("*.yml")) or list(path.glob("*.yaml")): + pipelines.append(f"CI/CD: {pipeline_name}") + except Exception: + pass + + return pipelines + + +def detect_containers() -> List[str]: + """Detect containerization and orchestration configs.""" + containers = [] + + for config in CONTAINER_FILES: + path = Path(config) + if path.is_file(): + if "Dockerfile" in config: + containers.append("Container: Docker found") + elif "docker-compose" in config: + containers.append("Orchestration: Docker Compose found") + elif config.endswith(".yaml") or config.endswith(".yml"): + containers.append(f"Container/Orchestration: {config}") + elif path.is_dir(): + if config in ["k8s", "kubernetes"]: + containers.append("Orchestration: Kubernetes configs found") + try: + if list(path.glob("*.yml")) or list(path.glob("*.yaml")): + containers.append(f"Container/Orchestration: {config}/ directory found") + except Exception: + pass + + return containers + + +def detect_security_configs() -> List[str]: + """Detect security and compliance configurations.""" + security = [] + + for config in SECURITY_CONFIGS: + if Path(config).exists(): + config_name = config.replace(".yml", "").replace(".yaml", "").lstrip(".") + security.append(f"Security: {config_name}") + + return security + + +def detect_performance_markers() -> List[str]: + """Detect performance testing and profiling markers.""" + performance = [] + + for marker in PERFORMANCE_MARKERS: + if Path(marker).exists(): + performance.append(f"Performance: {marker} found") + else: + # Check for directories + try: + if Path(marker).is_dir(): + performance.append(f"Performance: {marker}/ directory found") + except Exception: + pass + + return performance + + +def collect_code_metrics() -> dict: + """Collect code metrics: file counts by extension, total LOC.""" + metrics = { + "total_files": 0, + "by_extension": {}, + "by_language": {}, + "total_lines": 0, + "largest_files": [] + } + + # Language mapping + lang_map = { + "ts": "TypeScript", "tsx": "TypeScript/React", "js": "JavaScript", + "jsx": "JavaScript/React", "py": "Python", "go": "Go", + "java": "Java", "kt": "Kotlin", "rs": "Rust", + "cs": "C#", "rb": "Ruby", "php": "PHP", + "swift": "Swift", "scala": "Scala", "ex": "Elixir", + "cpp": "C++", "c": "C", "h": "C Header", + "clj": "Clojure", "lua": "Lua", "hs": "Haskell" + } + + file_sizes = [] + + try: + for root, dirs, files in os.walk(Path.cwd()): + dirs[:] = [d for d in dirs if d not in EXCLUDE_DIRS] + + for file in files: + filepath = Path(root) / file + ext = filepath.suffix.lstrip('.') + + if not ext or ext in {"pyc", "o", "a", "so"}: + continue + + try: + size = filepath.stat().st_size + file_sizes.append((filepath.relative_to(Path.cwd()), size)) + + metrics["total_files"] += 1 + metrics["by_extension"][ext] = metrics["by_extension"].get(ext, 0) + 1 + + lang = lang_map.get(ext, "Other") + metrics["by_language"][lang] = metrics["by_language"].get(lang, 0) + 1 + + # Count lines for text files + if ext in SOURCE_EXTS and size < 1_000_000: # Skip huge files + try: + with open(filepath, 'r', encoding='utf-8', errors='ignore') as f: + metrics["total_lines"] += len(f.readlines()) + except Exception: + pass + except Exception: + pass + + # Top 10 largest files + file_sizes.sort(key=lambda x: x[1], reverse=True) + metrics["largest_files"] = [ + f"{str(f)}: {s/1024:.1f}KB" for f, s in file_sizes[:10] + ] + + except Exception: + pass + + return metrics + + +def print_section(title: str, content: List[str], output_file=None) -> None: + """Print a section with title and content.""" + lines = [f"\n=== {title} ==="] + + if isinstance(content, list): + lines.extend(content if content else ["None found."]) + elif isinstance(content, str): + lines.append(content) + + text = '\n'.join(lines) + '\n' + + if output_file: + output_file.write(text) + else: + print(text, end='') + + +def main(): + """Main entry point.""" + args = parse_args() + + output_file = None + if args.output: + output_dir = Path(args.output).parent + output_dir.mkdir(parents=True, exist_ok=True) + output_file = open(args.output, 'w', encoding='utf-8') + print(f"Writing output to: {args.output}", file=sys.stderr) + + try: + # Directory tree + print_section( + f"DIRECTORY TREE (max depth {TREE_MAX_DEPTH}, source files only)", + get_directory_tree(), + output_file + ) + + # Stack detection + manifests = find_manifest_files() + if manifests: + manifest_content = [""] + for manifest in manifests: + manifest_path = Path(manifest) + manifest_content.append(f"--- {manifest} ---") + if manifest == "bun.lockb": + manifest_content.append("[Binary lockfile — see package.json for dependency details.]") + else: + manifest_content.append(read_file_preview(manifest_path)) + print_section("STACK DETECTION (manifest files)", manifest_content, output_file) + else: + print_section("STACK DETECTION (manifest files)", ["No recognized manifest files found in project root."], output_file) + + # Entry points + entries = find_entry_points() + if entries: + entry_content = [f"Found: {e}" for e in entries] + print_section("ENTRY POINTS", entry_content, output_file) + else: + print_section("ENTRY POINTS", ["No common entry points found. Check 'main' or 'scripts.start' in manifest files above."], output_file) + + # Linting config + lint = find_lint_config() + if lint: + lint_content = [f"Found: {l}" for l in lint] + print_section("LINTING AND FORMATTING CONFIG", lint_content, output_file) + else: + print_section("LINTING AND FORMATTING CONFIG", ["No linting or formatting config files found in project root."], output_file) + + # Environment templates + envs = find_env_templates() + if envs: + env_content = [] + for filename, filepath in envs: + env_content.append(f"--- {filename} ---") + env_content.append(read_file_preview(filepath)) + print_section("ENVIRONMENT VARIABLE TEMPLATES", env_content, output_file) + else: + print_section("ENVIRONMENT VARIABLE TEMPLATES", ["No .env.example or .env.template found. Identify required environment variables by searching the code and config for environment variable reads."], output_file) + + # TODOs + todos = search_todos() + if todos: + print_section("TODO / FIXME / HACK (production code only, test dirs excluded)", todos, output_file) + else: + print_section("TODO / FIXME / HACK (production code only, test dirs excluded)", ["None found."], output_file) + + # Git info + if is_git_repo(): + commits = get_git_commits() + if commits: + print_section("GIT RECENT COMMITS (last 20)", commits, output_file) + else: + print_section("GIT RECENT COMMITS (last 20)", ["No commits found."], output_file) + + churn = get_git_churn() + if churn: + print_section("HIGH-CHURN FILES (last 90 days, top 20)", churn, output_file) + else: + print_section("HIGH-CHURN FILES (last 90 days, top 20)", ["None found."], output_file) + else: + print_section("GIT RECENT COMMITS (last 20)", ["Not a git repository or no commits yet."], output_file) + print_section("HIGH-CHURN FILES (last 90 days, top 20)", ["Not a git repository."], output_file) + + # Monorepo detection + monorepo = detect_monorepo() + if monorepo: + print_section("MONOREPO SIGNALS", monorepo, output_file) + else: + print_section("MONOREPO SIGNALS", ["No monorepo signals detected."], output_file) + + # Code metrics + metrics = collect_code_metrics() + metrics_output = [ + f"Total files scanned: {metrics['total_files']}", + f"Total lines of code: {metrics['total_lines']}", + "" + ] + if metrics["by_language"]: + metrics_output.append("Files by language:") + for lang, count in sorted(metrics["by_language"].items(), key=lambda x: x[1], reverse=True): + metrics_output.append(f" {lang}: {count}") + if metrics["largest_files"]: + metrics_output.append("") + metrics_output.append("Top 10 largest files:") + metrics_output.extend(metrics["largest_files"]) + print_section("CODE METRICS", metrics_output, output_file) + + # CI/CD Detection + ci_cd = detect_ci_cd_pipelines() + if ci_cd: + print_section("CI/CD PIPELINES", ci_cd, output_file) + else: + print_section("CI/CD PIPELINES", ["No CI/CD pipelines detected."], output_file) + + # Container Detection + containers = detect_containers() + if containers: + print_section("CONTAINERS & ORCHESTRATION", containers, output_file) + else: + print_section("CONTAINERS & ORCHESTRATION", ["No containerization configs detected."], output_file) + + # Security Configs + security = detect_security_configs() + if security: + print_section("SECURITY & COMPLIANCE", security, output_file) + else: + print_section("SECURITY & COMPLIANCE", ["No security configs detected."], output_file) + + # Performance Markers + performance = detect_performance_markers() + if performance: + print_section("PERFORMANCE & TESTING", performance, output_file) + else: + print_section("PERFORMANCE & TESTING", ["No performance testing configs detected."], output_file) + + # Final message + final_msg = "\n=== SCAN COMPLETE ===\n" + if output_file: + output_file.write(final_msg) + else: + print(final_msg, end='') + + return 0 + + except Exception as e: + print(f"Error: {e}", file=sys.stderr) + return 1 + + finally: + if output_file: + output_file.close() + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/.github/skills/add-capability/SKILL.md b/.github/skills/add-capability/SKILL.md new file mode 100644 index 00000000..390a6503 --- /dev/null +++ b/.github/skills/add-capability/SKILL.md @@ -0,0 +1,50 @@ +--- +name: add-capability +description: "Retrofit an existing CoreEx domain or service with additional capabilities. Use when: adding Outbox.Relay, Subscribe, Azure Service Bus integration, subscriber scaffolding, or aligning messaging and integration wiring for an existing domain." +argument-hint: "Optional: solution, domain, and requested capability — e.g. 'Contoso Products add relay and subscribers'" +tags: ["retrofit", "messaging", "service-bus", "outbox-relay", "subscribers", "integration"] +--- + +# Add Capability + +Retrofitting an existing domain with messaging and integration support. Choose only the missing pieces. + +## When to Use + +- Add `Outbox.Relay` to publish integration events reliably. +- Add `Subscribe` to consume integration events from other services. +- Add or align Azure Service Bus wiring. +- Add initial subscriber classes and registration. + +## When Not to Use + +- Creating a new domain from scratch — use `/generate-domain`. +- Bootstrapping a new solution — use the starter bootstrap workflow. +- Non-CoreEx brownfield migrations. + +## MVP Assumptions + +- Existing CoreEx-style domain shape (Contracts, Application, Infrastructure, Api, Database). +- SQL Server for outbox support. +- Azure Service Bus for publish/subscribe. + +If different backends are needed, ask before making changes. + +## Workflow + +1. **Load context**: Read host-setup, event-subscribers, application-services, database-project instructions + sample hosts. +2. **Inspect domain state**: Detect existing hosts, database support, messaging packages, event subjects. +3. **Clarify**: Ask only what cannot be inferred (which domain, which capability, topics/payloads if needed). +4. **Choose mode**: A (relay), B (subscribe), C (both), or D (subscribers only). +5. **Apply changes**: Targeted edits only — reuse patterns, don't regenerate. +6. **Validate**: Run checklist, confirm clean build. + +For detailed step-by-step workflow, see [`references/workflow.md`](references/workflow.md). + +## Key References + +- [Host Setup Conventions](/.github/instructions/host-setup.instructions.md) +- [Event Subscriber Conventions](/.github/instructions/event-subscribers.instructions.md) +- [Application Service Conventions](/.github/instructions/application-services.instructions.md) +- [Database Project Conventions](/.github/instructions/database-project.instructions.md) +- Sample hosts: `samples/src/Contoso.Products.Api/Program.cs`, `samples/src/Contoso.Products.Subscribe/Program.cs`, `samples/src/Contoso.Products.Outbox.Relay/Program.cs` diff --git a/.github/skills/add-capability/references/messaging-retrofit-checklist.md b/.github/skills/add-capability/references/messaging-retrofit-checklist.md new file mode 100644 index 00000000..bb11fbc6 --- /dev/null +++ b/.github/skills/add-capability/references/messaging-retrofit-checklist.md @@ -0,0 +1,46 @@ +# Messaging Retrofit Checklist + +Use this checklist as the completion gate for `/add-capability` messaging and integration retrofits. + +## Discovery + +- [ ] Identified the target domain and its existing project/host shape. +- [ ] Determined whether API, Database, Outbox.Relay, and Subscribe projects already exist. +- [ ] Determined whether SQL Server/outbox and Azure Service Bus are already present, missing, or intentionally not used. +- [ ] Confirmed any user choices that could not be inferred safely. + +## Project and Package Alignment + +- [ ] Added only the missing projects required by the requested retrofit. +- [ ] Added only the missing package and project references required by the affected hosts. +- [ ] Preserved the existing layered references and naming conventions. + +## Relay Retrofit + +- [ ] Relay host was added or aligned when requested. +- [ ] Relay `Program.cs` uses the expected CoreEx host setup, SQL Server relay wiring, Service Bus publisher wiring, health checks, and telemetry. +- [ ] API host has event formatter and outbox publisher wiring when the domain is expected to publish integration events. +- [ ] Database project contains required outbox tables and stored procedures when relay support is added. + +## Subscribe Retrofit + +- [ ] Subscribe host was added or aligned when requested. +- [ ] Subscribe `Program.cs` uses hosted service manager, subscribed manager, Service Bus receiver, hosted service mapping, health checks, and telemetry. +- [ ] Subscriber classes inherit from `SubscribedBase`. +- [ ] Subscriber classes use `[ScopedService]` and `[Subscribe("...")]`. +- [ ] Subscriber logic delegates to Application services rather than embedding business logic. +- [ ] Shared subscriber error handling is added where needed. + +## Host and Convention Alignment + +- [ ] Middleware order follows repo conventions. +- [ ] Dynamic service registration is used where expected. +- [ ] OpenTelemetry-compatible wiring is preserved or aligned for the affected hosts. +- [ ] Health endpoints and hosted service mapping are present where applicable. + +## Validation + +- [ ] Affected projects build or pass diagnostics. +- [ ] Any related tests were added or updated where practical. +- [ ] The final summary distinguishes completed retrofits from any blocked or intentionally deferred items. +- [ ] Any remaining user decisions are listed explicitly as follow-up items. diff --git a/.github/skills/add-capability/references/messaging-retrofit-checkpoints.md b/.github/skills/add-capability/references/messaging-retrofit-checkpoints.md new file mode 100644 index 00000000..e97110c9 --- /dev/null +++ b/.github/skills/add-capability/references/messaging-retrofit-checkpoints.md @@ -0,0 +1,84 @@ +# Messaging Retrofit Checkpoints + +Use these checkpoints when inspecting an existing domain before adding messaging and integration capabilities. + +## 1. Domain Shape Detection + +Look for these project patterns first: + +- `{Solution}.{Domain}.Api` +- `{Solution}.{Domain}.Application` +- `{Solution}.{Domain}.Infrastructure` +- `{Solution}.{Domain}.Database` +- `{Solution}.{Domain}.Outbox.Relay` +- `{Solution}.{Domain}.Subscribe` + +If the domain does not follow a recognizable CoreEx-style layered shape, treat the retrofit as ambiguous and ask before proceeding. + +## 2. Host Detection Signals + +| Capability or host | Evidence to inspect | Positive signal | +|---|---|---| +| API host | `Program.cs`, controllers, `*.Api.csproj` | `AddMvcWebApi`, `AddHttpWebApi`, controllers, OpenAPI setup | +| Relay host | `*.Outbox.Relay\\Program.cs`, relay csproj | `AddSqlServerOutboxRelay`, `AddSqlServerOutboxRelayHostedService`, `AddAzureServiceBusPublisher` | +| Subscribe host | `*.Subscribe\\Program.cs`, `Subscribe\\**\\*.cs` | `AddSubscribedManager`, `AzureServiceBusReceiving`, `MapHostedServices`, subscriber classes | +| Outbox publisher in API | API `Program.cs`, infrastructure repository/publisher files | `AddEventFormatter`, `AddSqlServerOutboxPublisher` | +| Service Bus support | affected host `Program.cs`, csproj references | `AddAzureServiceBusClient("ServiceBus")`, `CoreEx.Azure.Messaging.ServiceBus` | +| Telemetry alignment | `Program.cs` | `WithCoreExTelemetry`, `WithCoreExSqlServerTelemetry`, `WithCoreExServiceBusTelemetry`, `UseOtlpExporter` | + +## 3. Database and Outbox Detection + +When adding a relay or reliable publication support, inspect for: + +- `*.Database` project. +- outbox migrations. +- outbox stored procedures: + - `spOutboxEnqueue.g.sql` + - `spOutboxLeaseAcquire.g.sql` + - `spOutboxLeaseRelease.g.sql` + - `spOutboxBatchClaim.g.sql` + - `spOutboxBatchComplete.g.sql` + - `spOutboxBatchCancel.g.sql` +- database `Program.cs` and `dbex.yaml`. + +If relay is requested and these assets are missing, plan to add them or stop and ask if the domain is intentionally non-SQL/outbox-based. + +## 4. Subscriber Detection + +Inspect subscriber code for: + +- `[ScopedService]` +- `[Subscribe("...")]` +- inheritance from `SubscribedBase` +- `OnReceiveAsync` +- optional shared `ErrorHandler` +- delegation to Application services rather than embedded business logic + +## 5. Recommended MVP Retrofit Modes + +| Current state | Requested need | Recommended retrofit | +|---|---|---| +| API + Database, no relay | reliable integration-event publishing | Add `Outbox.Relay`, align API outbox publisher wiring | +| API + Database, no subscribe | consume external events | Add `Subscribe` host and initial subscribers | +| API + Database, no relay, no subscribe | publish and consume | Add both relay and subscribe | +| Subscribe host exists | new subjects or handlers | Add subscriber classes and registration only | +| API exists, no recognizable database/outbox shape | relay | Ask before proceeding; MVP assumes SQL Server/outbox path | + +## 6. Ambiguity Triggers + +Ask before changing anything when: + +- multiple similarly named domains could match the request. +- there is already partial relay or subscribe wiring that does not match the sample conventions. +- the domain appears to use non-SQL Server persistence for write workflows. +- the domain appears to use a broker other than Azure Service Bus. +- event subjects, payload contracts, or application service entry points are unclear. + +## 7. Default Initial Assumptions + +Unless the user says otherwise, the MVP retrofit assistant should assume: + +- SQL Server for outbox-backed write workflows. +- Azure Service Bus for publish/subscribe integration. +- OpenTelemetry-compatible host telemetry wiring should be preserved or aligned. +- relay and subscribe hosts should mirror the sample architecture, not invent a new host style. diff --git a/.github/skills/add-capability/references/workflow.md b/.github/skills/add-capability/references/workflow.md new file mode 100644 index 00000000..11e2bb21 --- /dev/null +++ b/.github/skills/add-capability/references/workflow.md @@ -0,0 +1,93 @@ +# Add Capability Workflow + +## Step 1: Load Context + +Before making changes, load: + +1. Instruction files in `/.github/instructions/`: + - `host-setup.instructions.md` + - `event-subscribers.instructions.md` + - `application-services.instructions.md` + - `database-project.instructions.md` + +2. Sample host wiring from: + - `samples/src/Contoso.Products.Api/Program.cs` + - `samples/src/Contoso.Products.Subscribe/Program.cs` + - `samples/src/Contoso.Products.Outbox.Relay/Program.cs` + +3. Domain templates under `/.github/templates/domain/**` + +## Step 2: Inspect Domain State + +Determine current shape before proposing changes. + +Inspect for: +- Domain boundary and project names +- Existing hosts: `*.Api`, `*.Outbox.Relay`, `*.Subscribe` +- Database support: `*.Database` project, outbox tables/procedures, SQL Server references +- Messaging support: `CoreEx.Events`, `CoreEx.Azure.Messaging.ServiceBus`, `AddEventFormatter`, `AddSqlServerOutboxPublisher`, `AddSubscribedManager`, `AzureServiceBusReceiving` +- Existing telemetry and health wiring +- Integration-event semantics: subjects, subscriber classes, related service methods + +Use conservative detection. Ask if ambiguous. + +## Step 3: Clarify User Intent + +Ask only what cannot be inferred: +- Which domain to retrofit? +- Which capability: relay, subscribe, subscriber classes, or combined? +- Use SQL Server and Azure Service Bus as defaults? +- If adding subscribers: what subjects and payload contracts? +- Infrastructure/host wiring only, or also application-facing handlers? + +## Step 4: Choose Retrofit Mode + +### Mode A — Add Outbox.Relay +Use when domain already writes data and should publish integration events reliably. + +Expected work: +- Create `*.Outbox.Relay` project if missing +- Add packages and project references +- Add relay `Program.cs` wiring per host-setup conventions +- Ensure database has outbox tables and procedures +- Ensure API host has event formatter + outbox publisher wiring + +### Mode B — Add Subscribe +Use when domain must consume integration events/commands from other services. + +Expected work: +- Create `*.Subscribe` project if missing +- Add Service Bus client and receiver wiring +- Add hosted service manager and mapping +- Add subscriber classes and registration +- Reuse reference data, cache, infrastructure, telemetry patterns + +### Mode C — Add Both Relay and Subscribe +Service publishes its own events AND consumes events from others. + +### Mode D — Add Subscribers to Existing Subscribe Host +Host exists but subscriber classes, registration, or error handling incomplete. + +## Step 5: Apply Incremental Changes + +Prefer targeted edits over regeneration. + +Rules: +1. Reuse existing project naming and layering +2. Do not duplicate wiring that exists +3. Keep subscriber logic thin; delegate to Application services +4. Preserve host middleware order and telemetry conventions +5. Reuse domain templates only for missing pieces +6. If domain shape inconsistent, stop and explain blockers + +## Step 6: Validate + +Run messaging-retrofit-checklist.md completion gate. + +Minimum criteria: +- `Program.cs` files follow host setup conventions +- Required package/project references present +- Relay outbox database assets exist when relay added +- Subscribers registered with `SubscribedBase` patterns +- Files fit existing naming/layering conventions +- Clean build/diagnostics diff --git a/.github/skills/aspire/SKILL.md b/.github/skills/aspire/SKILL.md new file mode 100644 index 00000000..5e8b6394 --- /dev/null +++ b/.github/skills/aspire/SKILL.md @@ -0,0 +1,108 @@ +--- +name: aspire +description: "Orchestrates Aspire distributed applications using the Aspire CLI for running, debugging, and managing distributed apps. USE FOR: aspire start, aspire stop, start aspire app, aspire describe, list aspire integrations, debug aspire issues, view aspire logs, add aspire resource, aspire dashboard, update aspire apphost. DO NOT USE FOR: non-Aspire .NET apps (use dotnet CLI), container-only deployments (use docker/podman), Azure deployment after local testing (use azure-deploy skill). INVOKES: Aspire CLI commands (aspire start, aspire describe, aspire otel logs, aspire docs search, aspire add), bash. FOR SINGLE OPERATIONS: Use Aspire CLI commands directly for quick resource status or doc lookups." +argument-hint: "Optional: resource name, command (start/stop/logs), or debug context" +tags: ["aspire", "orchestration", "distributed-apps", "cli", "debugging"] +--- + +# Aspire Skill + +This repository uses Aspire to orchestrate its distributed application. Resources are defined in the AppHost project (`apphost.cs` or `apphost.ts`). + +## CLI command reference + +| Task | Command | +|---|---| +| Start the app | `aspire start` | +| Start isolated (worktrees) | `aspire start --isolated` | +| Restart the app | `aspire start` (stops previous automatically) | +| Wait for resource healthy | `aspire wait ` | +| Stop the app | `aspire stop` | +| List resources | `aspire describe` or `aspire resources` | +| Run resource command | `aspire resource ` | +| Start/stop/restart resource | `aspire resource start|stop|restart` | +| Rebuild a .NET project resource | `aspire resource rebuild` | +| View console logs | `aspire logs [resource]` | +| View structured logs | `aspire otel logs [resource]` | +| View traces | `aspire otel traces [resource]` | +| Logs for a trace | `aspire otel logs --trace-id ` | +| Add an integration | `aspire add` | +| List running AppHosts | `aspire ps` | +| Update AppHost packages | `aspire update` | +| Search docs | `aspire docs search ` | +| Get doc page | `aspire docs get ` | +| List doc pages | `aspire docs list` | +| Environment diagnostics | `aspire doctor` | +| List resource MCP tools | `aspire mcp tools` | +| Call resource MCP tool | `aspire mcp call --input ` | + +Most commands support `--format Json` for machine-readable output. Use `--apphost ` to target a specific AppHost. + +## Key workflows + +### Running in agent environments + +Use `aspire start` to run the AppHost in the background. When working in a git worktree, use `--isolated` to avoid port conflicts and to prevent sharing user secrets or other local state with other running instances: + +```bash +aspire start --isolated +``` + +Use `aspire wait ` to block until a resource is healthy before interacting with it: + +```bash +aspire start --isolated +aspire wait myapi +``` + +### Applying code changes + +Choose the right action based on what changed: + +| What changed | Action | Why | +|---|---|---| +| AppHost project (`apphost.cs`/`apphost.ts`) | `aspire start` | Resource graph changed; full restart required | +| Compiled .NET project resource | `aspire resource rebuild` | Rebuilds and restarts only that resource | +| Interpreted resource (JavaScript, Python) | Typically nothing — most run with file watchers | Restart the resource if no watch mode is configured | + +**Never restart the entire AppHost just because a single resource changed.** Use `aspire resource rebuild` for .NET project resources — it coordinates stop, build, and restart for just that resource. Use `aspire describe --format Json` to check which commands a resource supports. + +### Debugging issues + +Before making code changes, inspect the app state: + +1. `aspire describe` — check resource status +2. `aspire otel logs ` — view structured logs +3. `aspire logs ` — view console output +4. `aspire otel traces ` — view distributed traces + +### Adding integrations + +Use `aspire docs search` to find integration documentation, then `aspire docs get` to read the full guide. Use `aspire add` to add the integration package to the AppHost. + +After adding an integration, restart the app with `aspire start` for the new resource to take effect. + +### Using resource MCP tools + +Some resources expose MCP tools (e.g. `WithPostgresMcp()` adds SQL query tools). Discover and call them via CLI: + +```bash +aspire mcp tools # list available tools +aspire mcp tools --format Json # includes input schemas +aspire mcp call --input '{"key":"value"}' # invoke a tool +``` + +## Important rules + +- **Always start the app first** (`aspire start`) before making changes to verify the starting state. +- **To restart, just run `aspire start` again** — it automatically stops the previous instance. NEVER use `aspire stop` then `aspire run`. NEVER use `aspire run` at all. +- **Only restart the AppHost when AppHost code changes.** For .NET project resources, use `aspire resource rebuild` instead. +- Use `--isolated` when working in a worktree. +- **Avoid persistent containers** early in development to prevent state management issues. +- **Never install the Aspire workload** — it is obsolete. +- **For Aspire API reference and documentation, prefer `aspire docs search ` and `aspire docs get `** over searching NuGet package caches or XML doc files. The CLI provides up-to-date content from aspire.dev. +- Prefer `aspire.dev` and `learn.microsoft.com/microsoft/aspire` for official documentation. + +## Playwright CLI + +If configured, use Playwright CLI for functional testing of resources. Get endpoints via `aspire describe`. Run `playwright-cli --help` for available commands. \ No newline at end of file diff --git a/.github/skills/generate-domain/SKILL.md b/.github/skills/generate-domain/SKILL.md new file mode 100644 index 00000000..a1e42a7d --- /dev/null +++ b/.github/skills/generate-domain/SKILL.md @@ -0,0 +1,42 @@ +--- +name: generate-domain +description: "Generate a new CoreEx domain or microservice. Use when: scaffolding a new domain, creating a new microservice, adding a new bounded context, generating sample domain code like shopping or product, creating contracts/application/infrastructure/API/database layers from scratch following CoreEx conventions." +argument-hint: "Optional: solution prefix, domain name, and root entity — e.g. 'Contoso Orders Order'" +tags: ["scaffolding", "microservice", "bounded-context", "code-generation", "layering"] +--- + +# Generate Domain + +Scaffolds all layers of a new CoreEx domain — Contracts, Application, Infrastructure, API, Database, and baseline Unit/Api tests — aligned to the Contoso sample architecture. + +## When to Use + +- Scaffolding a new microservice or bounded context from scratch. +- Generating domain code that follows CoreEx conventions (ETag, ChangeLog, Outbox, FusionCache, NSwag). +- Producing code that mirrors the Shopping or Product sample domains. + +## Inputs Required + +Before generating, confirm with user: + +| Input | Example | +|-------|---------| +| Solution prefix | `Contoso` | +| Domain name | `Orders` | +| Root entity | `Order` | +| Fields, ref-data codes, operations, event subjects | Confirm before generating | + +## Workflow Overview + +For complete step-by-step workflow covering all 8 phases (Contracts, Application, Infrastructure, API, Database, Tests, Quality Gates, Naming), see [`references/workflow.md`](references/workflow.md). + +## Workflow Overview + +For complete step-by-step workflow covering all 8 phases (Contracts, Application, Infrastructure, API, Database, Tests, Quality Gates, Naming), see [`references/workflow.md`](references/workflow.md). + +## Key References + +- All instruction files: `/.github/instructions/*.instructions.md` +- Templates: `/.github/templates/domain/**` +- Checklist: `DomainScaffold.checklist.md` +- Sample domains: `samples/src/Contoso.Products/`, `samples/src/Contoso.Shopping/` diff --git a/.github/skills/generate-domain/references/workflow.md b/.github/skills/generate-domain/references/workflow.md new file mode 100644 index 00000000..e0696315 --- /dev/null +++ b/.github/skills/generate-domain/references/workflow.md @@ -0,0 +1,119 @@ +# Generate Domain Detailed Workflow + +## Phase 1: Load Context + +Before generating any files: + +1. Read all `.github/instructions/*.instructions.md` — especially api-controllers, application-services, contracts, database-project, repositories, tests, validators, host-setup +2. Load all templates in `/.github/templates/domain/**` +3. Load `DomainScaffold.checklist.md` to track completion gates + +## Phase 2: Gather and Confirm Inputs + +Ask user for any values not supplied. Confirm all before creating files. + +| Input | Example | +|-------|---------| +| Solution prefix | `Contoso` | +| Domain name | `Orders` | +| Root entity name | `Order` | +| Root entity fields | Names, types, ref-data codes, read-only flags | +| Child entity (optional) | `OrderItem` with fields | +| Operations | Create / Read / Update / Patch / Delete | +| Event subjects | Confirm: `{solution}.{domain}.{entity}.{action}.v1` | + +## Phase 3: Generate Contracts Layer + +`{Solution}.{Domain}.Contracts` + +In order: +1. `GlobalUsing.cs` — usings: `CoreEx.Entities`, `CoreEx.Localization`, `CoreEx.RefData`, `System.ComponentModel`, `System.Text.Json.Serialization` +2. `{Entity}Base.cs` — `[Contract] partial class` with `IIdentifier`. Use `[ReadOnly(true)]` for server fields. Use `[ReferenceData]` for ref-data codes. Add `[Localization("...")]` for poor property names. +3. `{Entity}.cs` — extends `{Entity}Base`, implements `IETag, IChangeLog`. Mark `ETag` and `ChangeLog` as `[ReadOnly(true)]`. +4. `{Entity}Lite.cs` (optional) — trimmed projection for query responses +5. Reference data types — inherit `ReferenceData`, use `[ReferenceData]`, pair with `{Type}Collection` +6. `{Solution}.{Domain}.Contracts.csproj` + +## Phase 4: Generate Application Layer + +`{Solution}.{Domain}.Application` + +In order: +1. `GlobalUsing.cs` +2. `Interfaces/I{Entity}Service.cs` + `I{Entity}ReadService.cs` (if CQRS) +3. `Repositories/I{Entity}Repository.cs` — return types: `Task<{Entity}?>` (Get), `Task>` (Create/Update), `Task` (Delete), `Task>` (Query) +4. `Validators/{Entity}Validator.cs` — `Validator<{Entity}, {Entity}Validator>` with `.Mandatory()`, `.MaximumLength()`, `.IsValid()`, `.PrecisionScale()` +5. `{Entity}Service.cs` — `[ScopedService]`. Guard inputs. Validate. Wrap mutations in `_unitOfWork.ExecuteAsync(...)`. Emit events inside `WhereMutated(...)` +6. `{Entity}ReadService.cs` (if CQRS) — read-only, no UoW or events +7. `{Solution}.{Domain}.Application.csproj` + +## Phase 5: Generate Infrastructure Layer + +`{Solution}.{Domain}.Infrastructure` + +In order: +1. `{Domain}EfDb.cs` — EF database class with `EfDbSet<{Entity}>` +2. `{Domain}DbContext.cs` — DbContext with entity model config +3. `Repositories/{Entity}Repository.cs` — `[ScopedService]`. Static `QueryArgsConfig`. Apply via `.Where(parsed).OrderBy(parsed).ToMappedItemsResultAsync(...)` +4. `{Domain}OutboxPublisher.cs` +5. `{Solution}.{Domain}.Infrastructure.csproj` + +## Phase 6: Generate API Host + +`{Solution}.{Domain}.Api` + +In order: +1. `Controllers/{Entity}Controller.cs` — mutations (POST, PUT, PATCH, DELETE). `[ApiController, Route("/api/{entities}"), OpenApiTag(...)]`. `[IdempotencyKey]` on POST +2. `Controllers/{Entity}ReadController.cs` — reads (GET single, GET query). `[Query(supportsOrderBy: true), Paging(supportsCount: true)]` +3. `Program.cs` — AddExecutionContext, AddReferenceDataOrchestrator (if needed), AddMvcWebApi, AddHttpWebApi, AddDynamicServicesUsing, FusionCache, SQL Server + EF + Outbox, OpenAPI, telemetry, middleware order +4. `appsettings.json` — connection string placeholders: `SqlServer`, `redis` +5. `GlobalUsing.cs` +6. `{Solution}.{Domain}.Api.csproj` + +## Phase 7: Generate Database + +`{Solution}.{Domain}.Database` + +In order: +1. `Program.cs` — `SqlServerMigrationConsole` with `DataResetFilterPredicate` scoped to `{Domain}` schema +2. `dbex.yaml` — outbox enabled; full table list +3. `Migrations/*.sql` — schema, ref-data, aggregate, child, and outbox tables +4. `Schema/Stored Procedures/*.g.sql` — six outbox stored procedures +5. `Data/ref-data.yaml` — seed data +6. `{Solution}.{Domain}.Database.csproj` + +## Phase 8: Generate Test Projects + +`{Solution}.{Domain}.Test.*` + +In order: +1. Create `{Solution}.{Domain}.Test.Unit` — validator/service-focused tests with `WithGenericTester` +2. Create `{Solution}.{Domain}.Test.Api` — `WithApiTester<{Solution}.{Domain}.Api.Program>` +3. Ensure both use AwesomeAssertions (not FluentAssertions) +4. Add both to solution under `{Domain}` solution folder +5. Group all domain projects together under `{Domain}` folder + +## Quality Gates + +Check before finishing: +- Every injected dependency guarded with `.ThrowIfNull()` +- Every `await` uses `.ConfigureAwait(false)` +- All mutations wrapped in `_unitOfWork.ExecuteAsync(...)` +- Events added inside `WhereMutated(...)` only +- POST endpoints carry `[IdempotencyKey]` +- Both Unit and Api test projects scaffolded +- `dotnet test` passes for Unit and Api projects +- `DomainScaffold.checklist.md` fully checked + +## Naming Conventions + +| Artefact | Pattern | +|----------|---------| +| Namespace root | `{Solution}.{Domain}.{Layer}` | +| Event subjects | `{solution}.{domain}.{entity}.{action}.v1` (lowercase) | +| Write controller | `{Entity}Controller` | +| Read controller | `{Entity}ReadController` | +| Write service | `{Entity}Service` / `I{Entity}Service` | +| Read service | `{Entity}ReadService` / `I{Entity}ReadService` | +| Repository | `{Entity}Repository` / `I{Entity}Repository` | +| Ref-data collection | `{Type}Collection` | diff --git a/.github/templates/domain/Api/Controllers/EntityController.cs.template b/.github/templates/domain/Api/Controllers/EntityController.cs.template new file mode 100644 index 00000000..331cbdc4 --- /dev/null +++ b/.github/templates/domain/Api/Controllers/EntityController.cs.template @@ -0,0 +1,38 @@ +namespace {Solution}.{Domain}.Api.Controllers; + +[ApiController, Route("/api/{entityPluralKebab}"), OpenApiTag("{EntityPlural}")] +public class {Entity}Controller(WebApi webApi, I{Entity}Service service) : ControllerBase +{ + private readonly WebApi _webApi = webApi.ThrowIfNull(); + private readonly I{Entity}Service _service = service.ThrowIfNull(); + + [HttpPost] + [Accepts<{Entity}>] + [ProducesResponseType<{Entity}>(StatusCodes.Status201Created)] + [IdempotencyKey] + public Task PostAsync() => _webApi.PostAsync<{Entity}, {Entity}>(Request, (ro, _) => + { + ro.WithLocationUri(e => new Uri($"/api/{entityPluralKebab}/{e.Id}", UriKind.Relative)); + return _service.CreateAsync(ro.Value); + }); + + [HttpPut("{id}")] + [Accepts<{Entity}>] + [ProducesResponseType(typeof({Entity}), StatusCodes.Status200OK)] + [ProducesNotFoundProblem()] + public Task PutAsync(string id) => _webApi.PutAsync<{Entity}, {Entity}>(Request, (ro, _) + => _service.UpdateAsync(ro.Value.Adjust(e => e.Id = id.Required()))); + + [HttpPatch("{id}")] + [Accepts<{Entity}>(HttpNames.MergePatchJsonMediaTypeName)] + [ProducesResponseType(typeof({Entity}), StatusCodes.Status200OK)] + [ProducesNotFoundProblem()] + public Task PatchAsync(string id) => _webApi.PatchAsync<{Entity}>(Request, + get: (ro, _) => _service.GetAsync(id.Required()), + put: (ro, _) => _service.UpdateAsync(ro.Value.Adjust(e => e.Id = id))); + + [HttpDelete("{id}")] + [ProducesResponseType(StatusCodes.Status204NoContent)] + public Task DeleteAsync(string id) => _webApi.DeleteAsync(Request, (_, _) + => _service.DeleteAsync(id.Required())); +} diff --git a/.github/templates/domain/Api/Controllers/EntityReadController.cs.template b/.github/templates/domain/Api/Controllers/EntityReadController.cs.template new file mode 100644 index 00000000..73c37325 --- /dev/null +++ b/.github/templates/domain/Api/Controllers/EntityReadController.cs.template @@ -0,0 +1,20 @@ +namespace {Solution}.{Domain}.Api.Controllers; + +[ApiController, Route("/api/{entityPluralKebab}"), OpenApiTag("{EntityPlural}")] +public class {Entity}ReadController(WebApi webApi, I{Entity}ReadService service) : ControllerBase +{ + private readonly WebApi _webApi = webApi.ThrowIfNull(); + private readonly I{Entity}ReadService _service = service.ThrowIfNull(); + + [HttpGet("{id}"), HttpHead("{id}")] + [ProducesResponseType(typeof({Entity}), StatusCodes.Status200OK)] + [ProducesNotFoundProblem()] + public Task GetAsync(string id) => _webApi.GetAsync(Request, (_, _) + => _service.GetAsync(id.Required())); + + [HttpGet] + [ProducesResponseType(typeof({Entity}Lite[]), StatusCodes.Status200OK)] + [Query(supportsOrderBy: true), Paging(supportsCount: true)] + public Task QueryAsync() => _webApi.GetAsync(Request, (ro, _) + => _service.QueryAsync(ro.QueryArgs, ro.PagingArgs)); +} diff --git a/.github/templates/domain/Api/Controllers/ReferenceDataController.cs.template b/.github/templates/domain/Api/Controllers/ReferenceDataController.cs.template new file mode 100644 index 00000000..ddebd64a --- /dev/null +++ b/.github/templates/domain/Api/Controllers/ReferenceDataController.cs.template @@ -0,0 +1,12 @@ +namespace {Solution}.{Domain}.Api.Controllers; + +[ApiController, Route("/api/refdata")] +public class ReferenceDataController(WebApi webApi) : ControllerBase +{ + private readonly WebApi _webApi = webApi.ThrowIfNull(); + + [HttpGet("{entityKebab}-statuses"), HttpHead("{entityKebab}-statuses")] + [ProducesResponseType(typeof({Entity}Status[]), StatusCodes.Status200OK)] + public Task Get{Entity}StatusesAsync([FromQuery] IEnumerable? codes = default, string? text = default) + => _webApi.GetAsync(Request, (ro, ct) => ReferenceDataOrchestrator.Current.GetWithFilterAsync<{Entity}Status>(codes, text, ro.IsIncludeInactive, ct)); +} diff --git a/.github/templates/domain/Api/Domain.Api.csproj.template b/.github/templates/domain/Api/Domain.Api.csproj.template new file mode 100644 index 00000000..cdeab65e --- /dev/null +++ b/.github/templates/domain/Api/Domain.Api.csproj.template @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + diff --git a/.github/templates/domain/Api/GlobalUsing.cs.template b/.github/templates/domain/Api/GlobalUsing.cs.template new file mode 100644 index 00000000..cbf89e82 --- /dev/null +++ b/.github/templates/domain/Api/GlobalUsing.cs.template @@ -0,0 +1,14 @@ +global using {Solution}.{Domain}.Application; +global using {Solution}.{Domain}.Application.Interfaces; +global using {Solution}.{Domain}.Contracts; +global using CoreEx; +global using CoreEx.AspNetCore.Mvc; +global using CoreEx.Entities; +global using CoreEx.Http; +global using CoreEx.Json; +global using CoreEx.RefData; +global using CoreEx.Validation; +global using Microsoft.AspNetCore.Mvc; +global using NSwag.Annotations; +global using System.Net; +global using System.Text.Json; diff --git a/.github/templates/domain/Api/Program.cs.template b/.github/templates/domain/Api/Program.cs.template new file mode 100644 index 00000000..f5150584 --- /dev/null +++ b/.github/templates/domain/Api/Program.cs.template @@ -0,0 +1,78 @@ +using {Solution}.{Domain}.Infrastructure.Repositories; +using Microsoft.Extensions.Options; +using OpenTelemetry; +using OpenTelemetry.Trace; +using StackExchange.Redis; +using ZiggyCreatures.Caching.Fusion; +using ZiggyCreatures.Caching.Fusion.Backplane.StackExchangeRedis; + +namespace {Solution}.{Domain}.Api; + +public class Program +{ + private static void Main(string[] args) + { + var builder = WebApplication.CreateBuilder(args); + + builder.AddHostSettings(); + + builder.Services + .AddExecutionContext() + .AddReferenceDataOrchestrator() + .AddMvcWebApi() + .AddHttpWebApi(); + + builder.Services.AddDynamicServicesUsing(); + + builder.Services.AddMemoryCache(); + builder.AddRedisDistributedCache("redis"); + + builder.Services.AddFusionCache() + .WithRegisteredMemoryCache() + .WithRegisteredDistributedCache() + .WithBackplane(sp => new RedisBackplane(new RedisBackplaneOptions { Configuration = sp.GetRequiredService>().Value.ToString() })) + .WithSystemTextJsonSerializer(JsonDefaults.SerializerOptions); + + builder.Services + .AddFusionHybridCache() + .AddDefaultCacheKeyProvider() + .AddHybridCacheIdempotencyProvider(); + + builder.AddSqlServerClient("SqlServer"); + builder.Services + .AddSqlServerDatabase() + .AddSqlServerUnitOfWork() + .AddEventFormatter() + .AddSqlServerOutboxPublisher<{Domain}OutboxPublisher>() + .AddDbContext<{Domain}DbContext>() + .AddEfDb<{Domain}EfDb>(); + + builder.Services.PostConfigureAllHealthChecks(); + builder.Services.AddControllers(); + + builder.Services.AddOpenApiDocument(s => + { + s.Title = builder.Environment.ApplicationName; + s.AddCoreExConfiguration(); + }); + + builder.WithCoreExTelemetry() + .WithCoreExSqlServerTelemetry() + .UseOtlpExporter(); + + var app = builder.Build(); + + app.UseCoreExExceptionHandler(); + app.UseHttpsRedirection(); + app.UseAuthorization(); + app.UseExecutionContext(); + app.UseIdempotencyKey(); + app.MapControllers(); + + app.UseOpenApi(); + app.UseSwaggerUi(); + app.MapHealthChecks(); + + app.Run(); + } +} diff --git a/.github/templates/domain/Api/appsettings.json.template b/.github/templates/domain/Api/appsettings.json.template new file mode 100644 index 00000000..e676968a --- /dev/null +++ b/.github/templates/domain/Api/appsettings.json.template @@ -0,0 +1,20 @@ +{ + "CoreEx": { + "Host": { + "SolutionName": "{Solution}", + "DomainName": "{Domain}" + }, + "Events": { + "Destination": "contoso" + } + }, + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning", + "Microsoft.EntityFrameworkCore": "Warning", + "Microsoft.EntityFrameworkCore.Update": "None" + } + }, + "AllowedHosts": "*" +} diff --git a/.github/templates/domain/Application/Domain.Application.csproj.template b/.github/templates/domain/Application/Domain.Application.csproj.template new file mode 100644 index 00000000..0241b47e --- /dev/null +++ b/.github/templates/domain/Application/Domain.Application.csproj.template @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/.github/templates/domain/Application/EntityReadService.cs.template b/.github/templates/domain/Application/EntityReadService.cs.template new file mode 100644 index 00000000..29977161 --- /dev/null +++ b/.github/templates/domain/Application/EntityReadService.cs.template @@ -0,0 +1,12 @@ +namespace {Solution}.{Domain}.Application; + +[ScopedService] +public class {Entity}ReadService(I{Entity}Repository repository) : I{Entity}ReadService +{ + private readonly I{Entity}Repository _repository = repository.ThrowIfNull(); + + public Task<{Entity}?> GetAsync(string id) => _repository.GetAsync(id); + + public Task> QueryAsync(QueryArgs? query, PagingArgs? paging) + => _repository.QueryAsync(query, paging); +} diff --git a/.github/templates/domain/Application/EntityService.cs.template b/.github/templates/domain/Application/EntityService.cs.template new file mode 100644 index 00000000..7914da45 --- /dev/null +++ b/.github/templates/domain/Application/EntityService.cs.template @@ -0,0 +1,56 @@ +namespace {Solution}.{Domain}.Application; + +[ScopedService] +public class {Entity}Service(IUnitOfWork unitOfWork, I{Entity}Repository repository) : I{Entity}Service +{ + private readonly IUnitOfWork _unitOfWork = unitOfWork.ThrowIfNull(); + private readonly I{Entity}Repository _repository = repository.ThrowIfNull(); + + public Task<{Entity}?> GetAsync(string id) => _repository.GetAsync(id); + + public async Task<{Entity}> CreateAsync({Entity} entity) + { + entity.ThrowIfNull(); + + await {Entity}Validator.Default.ValidateAndThrowAsync(entity).ConfigureAwait(false); + + entity.Id = Runtime.NewId(); + entity.StatusCode ??= "P"; + + return await _unitOfWork.ExecuteAsync(async () => + { + var dr = await _repository.CreateAsync(entity).ConfigureAwait(false); + return dr.WhereMutated(v => _unitOfWork.Events.Add(EventData.CreateEventWith(v, EventAction.Created))); + }).ConfigureAwait(false); + } + + public async Task<{Entity}> UpdateAsync({Entity} entity) + { + entity.ThrowIfNull(); + entity.Id.ThrowIfNullOrEmpty(); + + await {Entity}Validator.Default.ValidateAndThrowAsync(entity).ConfigureAwait(false); + + var current = await _repository.GetAsync(entity.Id).ConfigureAwait(false); + NotFoundException.ThrowIfDefault(current); + + return await _unitOfWork.ExecuteAsync(async () => + { + var dr = await _repository.UpdateAsync(entity).ConfigureAwait(false); + return dr.WhereMutated(v => _unitOfWork.Events.Add(EventData.CreateEventWith(v, EventAction.Updated))); + }).ConfigureAwait(false); + } + + public async Task DeleteAsync(string id) + { + var entity = await _repository.GetAsync(id).ConfigureAwait(false); + if (entity is null) + return; + + await _unitOfWork.ExecuteAsync(async () => + { + var dr = await _repository.DeleteAsync(id).ConfigureAwait(false); + dr.WhereMutated(() => _unitOfWork.Events.Add(EventData.CreateEventWith<{Entity}>(default, EventAction.Deleted).WithKey(id))); + }).ConfigureAwait(false); + } +} diff --git a/.github/templates/domain/Application/GlobalUsing.cs.template b/.github/templates/domain/Application/GlobalUsing.cs.template new file mode 100644 index 00000000..e3f020b1 --- /dev/null +++ b/.github/templates/domain/Application/GlobalUsing.cs.template @@ -0,0 +1,14 @@ +global using {Solution}.{Domain}.Application.Interfaces; +global using {Solution}.{Domain}.Application.Repositories; +global using {Solution}.{Domain}.Application.Validators; +global using {Solution}.{Domain}.Contracts; +global using CoreEx; +global using CoreEx.Data; +global using CoreEx.DependencyInjection; +global using CoreEx.Events; +global using CoreEx.Localization; +global using CoreEx.RefData; +global using CoreEx.RefData.Abstractions; +global using CoreEx.Results; +global using CoreEx.Validation; +global using System.Text.Json; diff --git a/.github/templates/domain/Application/Interfaces/IEntityReadService.cs.template b/.github/templates/domain/Application/Interfaces/IEntityReadService.cs.template new file mode 100644 index 00000000..f828ce70 --- /dev/null +++ b/.github/templates/domain/Application/Interfaces/IEntityReadService.cs.template @@ -0,0 +1,8 @@ +namespace {Solution}.{Domain}.Application.Interfaces; + +public interface I{Entity}ReadService +{ + Task GetAsync(string id); + + Task> QueryAsync(QueryArgs? query, PagingArgs? paging); +} diff --git a/.github/templates/domain/Application/Interfaces/IEntityService.cs.template b/.github/templates/domain/Application/Interfaces/IEntityService.cs.template new file mode 100644 index 00000000..5df58402 --- /dev/null +++ b/.github/templates/domain/Application/Interfaces/IEntityService.cs.template @@ -0,0 +1,12 @@ +namespace {Solution}.{Domain}.Application.Interfaces; + +public interface I{Entity}Service +{ + Task GetAsync(string id); + + Task CreateAsync(Contracts.{Entity} entity); + + Task UpdateAsync(Contracts.{Entity} entity); + + Task DeleteAsync(string id); +} diff --git a/.github/templates/domain/Application/ReferenceDataService.cs.template b/.github/templates/domain/Application/ReferenceDataService.cs.template new file mode 100644 index 00000000..e55a00a6 --- /dev/null +++ b/.github/templates/domain/Application/ReferenceDataService.cs.template @@ -0,0 +1,18 @@ +namespace {Solution}.{Domain}.Application; + +[ScopedService] +public class ReferenceDataService(IReferenceDataRepository repository) : IReferenceDataProvider +{ + private readonly IReferenceDataRepository _repository = repository.ThrowIfNull(); + + public IEnumerable<(Type, Type)> Types => + [ + (typeof({Entity}Status), typeof({Entity}StatusCollection)), + ]; + + public async Task GetAsync(Type type, CancellationToken cancellationToken = default) => type switch + { + _ when type == typeof({Entity}Status) => await _repository.GetAll{Entity}StatusesAsync().ConfigureAwait(false), + _ => throw new InvalidOperationException($"Type {type.FullName} is not a known {nameof(IReferenceData)}.") + }; +} diff --git a/.github/templates/domain/Application/Repositories/IEntityRepository.cs.template b/.github/templates/domain/Application/Repositories/IEntityRepository.cs.template new file mode 100644 index 00000000..2041a994 --- /dev/null +++ b/.github/templates/domain/Application/Repositories/IEntityRepository.cs.template @@ -0,0 +1,14 @@ +namespace {Solution}.{Domain}.Application.Repositories; + +public interface I{Entity}Repository +{ + Task GetAsync(string id); + + Task> CreateAsync(Contracts.{Entity} entity); + + Task> UpdateAsync(Contracts.{Entity} entity); + + Task DeleteAsync(string id); + + Task> QueryAsync(QueryArgs? query, PagingArgs? paging); +} diff --git a/.github/templates/domain/Application/Repositories/IReferenceDataRepository.cs.template b/.github/templates/domain/Application/Repositories/IReferenceDataRepository.cs.template new file mode 100644 index 00000000..981f8e4a --- /dev/null +++ b/.github/templates/domain/Application/Repositories/IReferenceDataRepository.cs.template @@ -0,0 +1,6 @@ +namespace {Solution}.{Domain}.Application.Repositories; + +public interface IReferenceDataRepository +{ + Task<{Entity}StatusCollection> GetAll{Entity}StatusesAsync(); +} diff --git a/.github/templates/domain/Application/Validators/EntityValidator.cs.template b/.github/templates/domain/Application/Validators/EntityValidator.cs.template new file mode 100644 index 00000000..a038705c --- /dev/null +++ b/.github/templates/domain/Application/Validators/EntityValidator.cs.template @@ -0,0 +1,16 @@ +namespace {Solution}.{Domain}.Application.Validators; + +public class {Entity}Validator : Validator<{Entity}, {Entity}Validator> +{ + private static readonly Validator<{ChildEntity}> _itemValidator = Validator.Create<{ChildEntity}>() + .HasProperty(x => x.ProductId, c => c.Mandatory().MaximumLength(100)) + .HasProperty(x => x.Quantity, c => c.GreaterThan(0).PrecisionScale(null, 4)) + .HasProperty(x => x.UnitPrice, c => c.GreaterThanOrEqualTo(0, _ => "zero").PrecisionScale(null, 4)); + + public {Entity}Validator() + { + Property(o => o.CustomerId).Mandatory().MaximumLength(100); + Property(o => o.Status).Mandatory().IsValid(); + Property(o => o.Items).Collection(c => c.MinimumCount(1).Entity(_itemValidator)); + } +} diff --git a/.github/templates/domain/Contracts/ChildEntity.cs.template b/.github/templates/domain/Contracts/ChildEntity.cs.template new file mode 100644 index 00000000..a7612950 --- /dev/null +++ b/.github/templates/domain/Contracts/ChildEntity.cs.template @@ -0,0 +1,14 @@ +namespace {Solution}.{Domain}.Contracts; + +[Contract] +public partial class {ChildEntity} : IIdentifier +{ + [ReadOnly(true)] + public string? Id { get; set; } + + public string? ProductId { get; set; } + + public decimal Quantity { get; set; } + + public decimal UnitPrice { get; set; } +} diff --git a/.github/templates/domain/Contracts/Domain.Contracts.csproj.template b/.github/templates/domain/Contracts/Domain.Contracts.csproj.template new file mode 100644 index 00000000..d83e1292 --- /dev/null +++ b/.github/templates/domain/Contracts/Domain.Contracts.csproj.template @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/.github/templates/domain/Contracts/Entity.cs.template b/.github/templates/domain/Contracts/Entity.cs.template new file mode 100644 index 00000000..ece359f1 --- /dev/null +++ b/.github/templates/domain/Contracts/Entity.cs.template @@ -0,0 +1,11 @@ +namespace {Solution}.{Domain}.Contracts; + +[Contract] +public partial class {Entity} : {Entity}Base, IETag, IChangeLog +{ + [ReadOnly(true)] + public ChangeLog? ChangeLog { get; set; } + + [ReadOnly(true)] + public string? ETag { get; set; } +} diff --git a/.github/templates/domain/Contracts/EntityBase.cs.template b/.github/templates/domain/Contracts/EntityBase.cs.template new file mode 100644 index 00000000..638eb79d --- /dev/null +++ b/.github/templates/domain/Contracts/EntityBase.cs.template @@ -0,0 +1,16 @@ +namespace {Solution}.{Domain}.Contracts; + +[Contract] +public abstract partial class {Entity}Base : IIdentifier +{ + [ReadOnly(true)] + public string? Id { get; set; } + + public string? CustomerId { get; set; } + + [ReferenceData<{Entity}Status>] + [Localization("{Entity} status")] + public partial string? StatusCode { get; set; } + + public List<{ChildEntity}>? Items { get; set; } +} diff --git a/.github/templates/domain/Contracts/EntityLite.cs.template b/.github/templates/domain/Contracts/EntityLite.cs.template new file mode 100644 index 00000000..1f5fc4fd --- /dev/null +++ b/.github/templates/domain/Contracts/EntityLite.cs.template @@ -0,0 +1,17 @@ +namespace {Solution}.{Domain}.Contracts; + +[Contract] +public partial class {Entity}Lite : IIdentifier +{ + [ReadOnly(true)] + public string? Id { get; set; } + + public string? CustomerId { get; set; } + + [ReferenceData<{Entity}Status>] + [Localization("{Entity} status")] + public partial string? StatusCode { get; set; } + + [ReadOnly(true)] + public ChangeLog? ChangeLog { get; set; } +} diff --git a/.github/templates/domain/Contracts/EntityStatus.cs.template b/.github/templates/domain/Contracts/EntityStatus.cs.template new file mode 100644 index 00000000..c93f189c --- /dev/null +++ b/.github/templates/domain/Contracts/EntityStatus.cs.template @@ -0,0 +1,6 @@ +namespace {Solution}.{Domain}.Contracts; + +[ReferenceData] +public partial class {Entity}Status : ReferenceData<{Entity}Status> { } + +public class {Entity}StatusCollection() : ReferenceDataCollection<{Entity}Status>(ReferenceDataSortOrder.Code) { } diff --git a/.github/templates/domain/Contracts/GlobalUsing.cs.template b/.github/templates/domain/Contracts/GlobalUsing.cs.template new file mode 100644 index 00000000..b3b07bf1 --- /dev/null +++ b/.github/templates/domain/Contracts/GlobalUsing.cs.template @@ -0,0 +1,5 @@ +global using CoreEx.Entities; +global using CoreEx.Localization; +global using CoreEx.RefData; +global using System.ComponentModel; +global using System.Text.Json.Serialization; diff --git a/.github/templates/domain/Database/Data/ref-data.yaml.template b/.github/templates/domain/Database/Data/ref-data.yaml.template new file mode 100644 index 00000000..1fb02f42 --- /dev/null +++ b/.github/templates/domain/Database/Data/ref-data.yaml.template @@ -0,0 +1,5 @@ +{Domain}: + - $^{Entity}Status: + - P: Pending + - C: Confirmed + - X: Cancelled diff --git a/.github/templates/domain/Database/Domain.Database.csproj.template b/.github/templates/domain/Database/Domain.Database.csproj.template new file mode 100644 index 00000000..82f1cdf2 --- /dev/null +++ b/.github/templates/domain/Database/Domain.Database.csproj.template @@ -0,0 +1,21 @@ + + + + Exe + + + + + + + + + + + + + + + + + diff --git a/.github/templates/domain/Database/Migrations/000001-create-schema.sql.template b/.github/templates/domain/Database/Migrations/000001-create-schema.sql.template new file mode 100644 index 00000000..318ca703 --- /dev/null +++ b/.github/templates/domain/Database/Migrations/000001-create-schema.sql.template @@ -0,0 +1 @@ +CREATE SCHEMA [{Domain}] diff --git a/.github/templates/domain/Database/Migrations/000101-create-entitystatus.sql.template b/.github/templates/domain/Database/Migrations/000101-create-entitystatus.sql.template new file mode 100644 index 00000000..c989a971 --- /dev/null +++ b/.github/templates/domain/Database/Migrations/000101-create-entitystatus.sql.template @@ -0,0 +1,18 @@ +-- Migration Script + +BEGIN TRANSACTION + +CREATE TABLE [{Domain}].[{Entity}Status] ( + [{Entity}StatusId] NVARCHAR(50) NOT NULL PRIMARY KEY, + [Code] NVARCHAR(50) NOT NULL UNIQUE, + [Text] NVARCHAR(250) NULL, + [IsActive] BIT NULL, + [SortOrder] INT NULL, + [RowVersion] TIMESTAMP NOT NULL, + [CreatedBy] NVARCHAR(250) NULL, + [CreatedOn] DATETIMEOFFSET NULL, + [UpdatedBy] NVARCHAR(250) NULL, + [UpdatedOn] DATETIMEOFFSET NULL +); + +COMMIT TRANSACTION diff --git a/.github/templates/domain/Database/Migrations/000201-create-entity.sql.template b/.github/templates/domain/Database/Migrations/000201-create-entity.sql.template new file mode 100644 index 00000000..8d5d4c81 --- /dev/null +++ b/.github/templates/domain/Database/Migrations/000201-create-entity.sql.template @@ -0,0 +1,16 @@ +-- Migration Script + +BEGIN TRANSACTION + +CREATE TABLE [{Domain}].[{Entity}] ( + [{Entity}Id] NVARCHAR(50) NOT NULL PRIMARY KEY, + [CustomerId] NVARCHAR(100) NOT NULL, + [StatusCode] NVARCHAR(50) NOT NULL, + [CreatedBy] NVARCHAR(250) NULL, + [CreatedOn] DATETIMEOFFSET NULL, + [UpdatedBy] NVARCHAR(250) NULL, + [UpdatedOn] DATETIMEOFFSET NULL, + [RowVersion] TIMESTAMP NOT NULL +); + +COMMIT TRANSACTION diff --git a/.github/templates/domain/Database/Migrations/000202-create-childentity.sql.template b/.github/templates/domain/Database/Migrations/000202-create-childentity.sql.template new file mode 100644 index 00000000..e887cf3b --- /dev/null +++ b/.github/templates/domain/Database/Migrations/000202-create-childentity.sql.template @@ -0,0 +1,17 @@ +-- Migration Script + +BEGIN TRANSACTION + +CREATE TABLE [{Domain}].[{ChildEntity}] ( + [{ChildEntity}Id] NVARCHAR(50) NOT NULL PRIMARY KEY, + [{Entity}Id] NVARCHAR(50) NOT NULL FOREIGN KEY REFERENCES [{Domain}].[{Entity}]([{Entity}Id]), + [ProductId] NVARCHAR(100) NOT NULL, + [Quantity] DECIMAL(18, 4) NOT NULL DEFAULT 0, + [UnitPrice] DECIMAL(18, 4) NOT NULL DEFAULT 0, + [CreatedBy] NVARCHAR(250) NULL, + [CreatedOn] DATETIMEOFFSET NULL, + [UpdatedBy] NVARCHAR(250) NULL, + [UpdatedOn] DATETIMEOFFSET NULL +); + +COMMIT TRANSACTION diff --git a/.github/templates/domain/Database/Migrations/000301-create-outbox-tables.sql.template b/.github/templates/domain/Database/Migrations/000301-create-outbox-tables.sql.template new file mode 100644 index 00000000..f120a547 --- /dev/null +++ b/.github/templates/domain/Database/Migrations/000301-create-outbox-tables.sql.template @@ -0,0 +1,29 @@ +BEGIN TRANSACTION + +CREATE TABLE [{Domain}].[Outbox] ( + [OutboxId] BIGINT IDENTITY (1, 1) NOT NULL PRIMARY KEY, + [TenantId] NVARCHAR(255) NOT NULL, + [PartitionId] INT NOT NULL, + [Status] TINYINT NOT NULL DEFAULT 0, + [EnqueuedUtc] DATETIME2 NOT NULL, + [AvailableUtc] DATETIME2 NOT NULL, + [DequeuedUtc] DATETIME2 NULL, + [Attempts] INT NOT NULL DEFAULT 0, + [Destination] NVARCHAR(255) NULL, + [Event] NVARCHAR(MAX) NOT NULL, + [LeaseId] UNIQUEIDENTIFIER NULL, + [LeaseUntilUtc] DATETIME2 NULL +); + +CREATE INDEX [IX_{Domain}_Outbox_Claim] ON [{Domain}].[Outbox] ([TenantId], [PartitionId], [Status], [OutboxId], [AvailableUtc], [LeaseUntilUtc]); + +CREATE TABLE [{Domain}].[OutboxLease] ( + [TenantId] NVARCHAR(255) NOT NULL, + [PartitionId] INT NOT NULL, + [LeaseId] UNIQUEIDENTIFIER NULL, + [LeaseUntilUtc] DATETIME2 NULL, + + CONSTRAINT PK_{Domain}_OutboxLease PRIMARY KEY (TenantId, PartitionId) +); + +COMMIT TRANSACTION diff --git a/.github/templates/domain/Database/Program.cs.template b/.github/templates/domain/Database/Program.cs.template new file mode 100644 index 00000000..170168b9 --- /dev/null +++ b/.github/templates/domain/Database/Program.cs.template @@ -0,0 +1,26 @@ +using CoreEx.Database; +using DbEx.Migration; +using DbEx.SqlServer.Console; + +namespace {Solution}.{Domain}.Database; + +public class Program +{ + public static Task Main(string[] args) => SqlServerMigrationConsole + .Create("Data Source=127.0.0.1,1433;Initial Catalog=Contoso;User id=sa;Password=yourStrong(!)Password;TrustServerCertificate=true") + .Configure(c => ConfigureMigrationArgs(c.Args)) + .RunAsync(args); + + public static MigrationArgs ConfigureMigrationArgs(MigrationArgs args) + { + args.AddAssembly().AddAssembly() + .IncludeExtendedSchemaScripts() + .DataParserArgs + .RefDataColumnDefault("SortOrder", _ => 0) + .RefDataColumnDefault("Scale", _ => 0); + + args.DataResetFilterPredicate = ts => ts.Schema == "{Domain}"; + + return args; + } +} diff --git a/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxBatchCancel.g.sql.template b/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxBatchCancel.g.sql.template new file mode 100644 index 00000000..6c41280b --- /dev/null +++ b/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxBatchCancel.g.sql.template @@ -0,0 +1,46 @@ +CREATE OR ALTER PROCEDURE [{Domain}].[spOutboxBatchCancel] + @LeaseId UNIQUEIDENTIFIER, + @BackoffSeconds INT +AS +BEGIN + SET NOCOUNT ON; + SET XACT_ABORT ON; + SET LOCK_TIMEOUT 5000; + SET TRANSACTION ISOLATION LEVEL READ COMMITTED; + + DECLARE @Now DATETIME2 = SYSUTCDATETIME(); + + BEGIN TRY + BEGIN TRAN; + + UPDATE o + SET o.[Status] = 0, + o.[Attempts] = o.[Attempts] + 1, + o.[AvailableUtc] = DATEADD(SECOND, @BackoffSeconds, @Now), + o.[LeaseId] = NULL, + o.[LeaseUntilUtc] = NULL + FROM [{Domain}].[Outbox] AS o WITH (UPDLOCK, ROWLOCK) + WHERE o.[LeaseId] = @LeaseId + AND o.[Status] = 1; + + IF (@@ROWCOUNT = 0) + BEGIN + COMMIT; + RETURN -1; + END + + COMMIT; + + BEGIN TRY + EXEC [{Domain}].[spOutboxLeaseRelease] @LeaseId; + END TRY + BEGIN CATCH + END CATCH + + RETURN 0; + END TRY + BEGIN CATCH + IF (XACT_STATE() <> 0) ROLLBACK; + THROW; + END CATCH +END diff --git a/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxBatchClaim.g.sql.template b/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxBatchClaim.g.sql.template new file mode 100644 index 00000000..b02f953b --- /dev/null +++ b/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxBatchClaim.g.sql.template @@ -0,0 +1,96 @@ +CREATE OR ALTER PROCEDURE [{Domain}].[spOutboxBatchClaim] + @TenantId NVARCHAR(255) = NULL, + @PartitionId INT, + @BatchSize INT, + @LeaseId UNIQUEIDENTIFIER, + @LeaseSeconds INT +AS +BEGIN + SET NOCOUNT ON; + SET XACT_ABORT ON; + + DECLARE @Now DATETIME2 = SYSUTCDATETIME(); + DECLARE @LeaseUntilUtc DATETIME2; + DECLARE @EffectiveTenantId NVARCHAR(255) = COALESCE(@TenantId, '(none)'); + + SET TRANSACTION ISOLATION LEVEL READ COMMITTED; + SET LOCK_TIMEOUT 5000; + + DECLARE @RC INT; + EXEC @RC = [{Domain}].[spOutboxLeaseAcquire] @EffectiveTenantId, @PartitionId, @LeaseId, @LeaseSeconds, @LeaseUntilUtc OUTPUT; + IF (@RC < 0) RETURN -3; + + BEGIN TRY + BEGIN TRAN; + + DECLARE @HeadId BIGINT; + DECLARE @BlockerId BIGINT; + + SELECT @HeadId = MIN(o.OutboxId) + FROM [{Domain}].[Outbox] o WITH (UPDLOCK) + WHERE o.[TenantId] = @EffectiveTenantId + AND o.[PartitionId] = @PartitionId + AND o.[Status] IN (0, 1) + OPTION (RECOMPILE); + + IF @HeadId IS NULL + BEGIN + COMMIT; + EXEC [{Domain}].[spOutboxLeaseRelease] @LeaseId; + RETURN -2; + END + + SELECT @BlockerId = MIN(o.OutboxId) + FROM [{Domain}].[Outbox] o WITH (READPAST, UPDLOCK) + WHERE o.[TenantId] = @EffectiveTenantId + AND o.[PartitionId] = @PartitionId + AND o.[OutboxId] >= @HeadId + AND ((o.Status = 1 AND o.[LeaseUntilUtc] IS NOT NULL AND o.[LeaseUntilUtc] > @Now) + OR (o.Status = 0 AND o.[AvailableUtc] > @Now)) + OPTION (RECOMPILE); + + ;WITH claim AS + ( + SELECT TOP (@BatchSize) + o.[OutboxId], o.[TenantId], o.[Status], o.[PartitionId], o.[Destination], o.[Event], + o.[Attempts], o.[EnqueuedUtc], o.[AvailableUtc], o.[LeaseId], o.[LeaseUntilUtc] + FROM [{Domain}].[Outbox] o WITH (READPAST, UPDLOCK, ROWLOCK) + WHERE o.[TenantId] = @EffectiveTenantId + AND o.[PartitionId] = @PartitionId + AND o.[OutboxId] >= @HeadId + AND (@BlockerId IS NULL OR o.[OutboxId] < @BlockerId) + AND ((o.[Status] = 0 AND o.[AvailableUtc] <= @Now) + OR (o.[Status] = 1 AND (o.[LeaseUntilUtc] IS NULL OR o.[LeaseUntilUtc] <= @Now))) + ORDER BY o.OutboxId + ) + UPDATE claim + SET [Status] = 1, + [LeaseId] = @LeaseId, + [LeaseUntilUtc] = @LeaseUntilUtc + OUTPUT + inserted.[OutboxId], + inserted.[TenantId], + inserted.[Status], + inserted.[PartitionId], + inserted.[Destination], + inserted.[Event], + inserted.[Attempts], + inserted.[EnqueuedUtc], + inserted.[AvailableUtc], + inserted.[LeaseUntilUtc]; + + IF (@@ROWCOUNT = 0) + BEGIN + COMMIT; + EXEC [{Domain}].[spOutboxLeaseRelease] @LeaseId; + RETURN -1; + END + + COMMIT; + RETURN 0; + END TRY + BEGIN CATCH + IF (XACT_STATE() <> 0) ROLLBACK; + THROW; + END CATCH +END diff --git a/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxBatchComplete.g.sql.template b/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxBatchComplete.g.sql.template new file mode 100644 index 00000000..aca350ab --- /dev/null +++ b/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxBatchComplete.g.sql.template @@ -0,0 +1,50 @@ +CREATE OR ALTER PROCEDURE [{Domain}].[spOutboxBatchComplete] + @LeaseId UNIQUEIDENTIFIER, + @DequeuedUtc DATETIME2 NULL +AS +BEGIN + SET NOCOUNT ON; + SET XACT_ABORT ON; + SET LOCK_TIMEOUT 5000; + SET TRANSACTION ISOLATION LEVEL READ COMMITTED; + + DECLARE @Now DATETIME2 = SYSUTCDATETIME(); + DECLARE @Completed TABLE (TenantId NVARCHAR(255), PartitionId INT); + + BEGIN TRY + BEGIN TRAN; + + UPDATE o + SET o.[Status] = 2, + o.[LeaseId] = NULL, + o.[LeaseUntilUtc] = NULL, + o.[DequeuedUtc] = COALESCE(@DequeuedUtc, @Now) + OUTPUT + deleted.[TenantId], + deleted.[PartitionId] + INTO @Completed + FROM [{Domain}].[Outbox] AS o WITH (UPDLOCK, ROWLOCK) + WHERE o.[LeaseId] = @LeaseId + AND o.[Status] = 1; + + IF (@@ROWCOUNT = 0) + BEGIN + COMMIT; + RETURN -1; + END + + COMMIT; + + BEGIN TRY + EXEC [{Domain}].[spOutboxLeaseRelease] @LeaseId; + END TRY + BEGIN CATCH + END CATCH + + RETURN 0; + END TRY + BEGIN CATCH + IF (XACT_STATE() <> 0) ROLLBACK; + THROW; + END CATCH +END diff --git a/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxEnqueue.g.sql.template b/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxEnqueue.g.sql.template new file mode 100644 index 00000000..881dd973 --- /dev/null +++ b/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxEnqueue.g.sql.template @@ -0,0 +1,18 @@ +CREATE OR ALTER PROCEDURE [{Domain}].[spOutboxEnqueue] + @TenantId AS NVARCHAR(255) = NULL, + @PartitionId AS INT, + @Destination AS NVARCHAR(255), + @Event AS NVARCHAR(MAX), + @EnqueuedUtc AS DATETIME2 = NULL, + @AvailableUtc AS DATETIME2 = NULL +AS +BEGIN + SET NOCOUNT ON; + SET XACT_ABORT ON; + + DECLARE @Now DATETIME2 = SYSUTCDATETIME(); + DECLARE @EffectiveTenantId NVARCHAR(255) = COALESCE(@TenantId, '(none)'); + + INSERT INTO [{Domain}].[Outbox] ([TenantId], [PartitionId], [Destination], [Event], [EnqueuedUtc], [AvailableUtc]) + VALUES (@EffectiveTenantId, @PartitionId, @Destination, @Event, COALESCE(@EnqueuedUtc, @Now), COALESCE(@AvailableUtc, COALESCE(@EnqueuedUtc, @Now))); +END diff --git a/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxLeaseAcquire.g.sql.template b/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxLeaseAcquire.g.sql.template new file mode 100644 index 00000000..d29d14ba --- /dev/null +++ b/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxLeaseAcquire.g.sql.template @@ -0,0 +1,49 @@ +CREATE OR ALTER PROCEDURE [{Domain}].[spOutboxLeaseAcquire] + @TenantId NVARCHAR(255) = NULL, + @PartitionId INT, + @LeaseId UNIQUEIDENTIFIER, + @LeaseSeconds INT, + @LeaseUntilUtc DATETIME2 OUTPUT +AS +BEGIN + SET NOCOUNT ON; + SET XACT_ABORT ON; + SET LOCK_TIMEOUT 5000; + SET TRANSACTION ISOLATION LEVEL READ COMMITTED; + + DECLARE @Now DATETIME2 = SYSUTCDATETIME(); + DECLARE @Until DATETIME2 = DATEADD(SECOND, @LeaseSeconds, @Now); + DECLARE @EffectiveTenantId NVARCHAR(255) = COALESCE(@TenantId, '(none)'); + + BEGIN TRY + BEGIN TRAN; + + IF NOT EXISTS (SELECT 1 FROM [{Domain}].[OutboxLease] WITH (UPDLOCK, HOLDLOCK) WHERE [TenantId] = @EffectiveTenantId AND [PartitionId] = @PartitionId) + INSERT INTO [{Domain}].[OutboxLease] ([TenantId], [PartitionId]) VALUES (@EffectiveTenantId, @PartitionId); + + UPDATE ol + SET ol.[LeaseId] = @LeaseId, + ol.[LeaseUntilUtc] = @Until + FROM [{Domain}].[OutboxLease] AS ol WITH (UPDLOCK, ROWLOCK) + WHERE ol.[PartitionId] = @PartitionId + AND ol.[TenantId] = @EffectiveTenantId + AND (ol.[LeaseUntilUtc] IS NULL OR ol.[LeaseUntilUtc] <= @Now) + OPTION (RECOMPILE); + + DECLARE @Rows INT = @@ROWCOUNT; + COMMIT; + + IF @Rows = 1 + BEGIN + SET @LeaseUntilUtc = @Until; + RETURN 0; + END + + SET @LeaseUntilUtc = NULL; + RETURN -1; + END TRY + BEGIN CATCH + IF (XACT_STATE() <> 0) ROLLBACK; + THROW; + END CATCH +END diff --git a/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxLeaseRelease.g.sql.template b/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxLeaseRelease.g.sql.template new file mode 100644 index 00000000..10fa2995 --- /dev/null +++ b/.github/templates/domain/Database/Schema/Stored Procedures/spOutboxLeaseRelease.g.sql.template @@ -0,0 +1,29 @@ +CREATE OR ALTER PROCEDURE [{Domain}].[spOutboxLeaseRelease] + @LeaseId UNIQUEIDENTIFIER +AS +BEGIN + SET NOCOUNT ON; + SET XACT_ABORT ON; + SET LOCK_TIMEOUT 5000; + SET TRANSACTION ISOLATION LEVEL READ COMMITTED; + + BEGIN TRY + BEGIN TRAN; + + UPDATE ol + SET ol.[LeaseId] = NULL, + ol.[LeaseUntilUtc] = NULL + FROM [{Domain}].[OutboxLease] AS ol WITH (UPDLOCK, ROWLOCK) + WHERE ol.[LeaseId] = @LeaseId; + + DECLARE @Rows INT = @@ROWCOUNT; + COMMIT; + + IF @Rows = 1 RETURN 0; + RETURN -1; + END TRY + BEGIN CATCH + IF (XACT_STATE() <> 0) ROLLBACK; + THROW; + END CATCH +END diff --git a/.github/templates/domain/Database/dbex.yaml.template b/.github/templates/domain/Database/dbex.yaml.template new file mode 100644 index 00000000..7268ba82 --- /dev/null +++ b/.github/templates/domain/Database/dbex.yaml.template @@ -0,0 +1,8 @@ +outbox: true +tables: +# Reference-data +- name: {Entity}Status + +# Transactional-data +- name: {Entity} +- name: {ChildEntity} diff --git a/.github/templates/domain/DomainScaffold.checklist.md b/.github/templates/domain/DomainScaffold.checklist.md new file mode 100644 index 00000000..4f292c83 --- /dev/null +++ b/.github/templates/domain/DomainScaffold.checklist.md @@ -0,0 +1,74 @@ +# Domain Scaffold Checklist + +Use this checklist after scaffolding a new domain from templates/prompts. + +## Inputs Confirmed + +- [ ] Solution prefix confirmed. +- [ ] Domain name confirmed. +- [ ] Root entity name confirmed. +- [ ] Child entity name confirmed (or explicitly omitted). +- [ ] CRUD operations confirmed. +- [ ] Reference data/status values confirmed. +- [ ] Event subjects confirmed. + +## Projects Created + +- [ ] All domain projects are grouped under a Visual Studio solution folder named {Domain} (for example, Orders). +- [ ] All new domain projects are added to the solution file. +- [ ] {Solution}.{Domain}.Contracts. +- [ ] {Solution}.{Domain}.Application. +- [ ] {Solution}.{Domain}.Infrastructure. +- [ ] {Solution}.{Domain}.Api. +- [ ] {Solution}.{Domain}.Database. +- [ ] {Solution}.{Domain}.Test.Unit. +- [ ] {Solution}.{Domain}.Test.Api. + +## Contracts Layer + +- [ ] [Contract] classes are partial. +- [ ] Id, ETag, ChangeLog are [ReadOnly(true)]. +- [ ] ReferenceData code properties are partial. +- [ ] Reference data classes and collections exist. + +## Application Layer + +- [ ] Interfaces for service/read-service/repository created. +- [ ] Validator created and invoked in mutate methods. +- [ ] All mutate methods wrapped in _unitOfWork.ExecuteAsync. +- [ ] Outbox events added in WhereMutated callbacks. +- [ ] All awaited calls use ConfigureAwait(false). + +## Infrastructure Layer + +- [ ] Persistence models created. +- [ ] Mapper(s) created and wired. +- [ ] EfDb + DbContext created and configured. +- [ ] Repository implementation includes QueryArgsConfig. +- [ ] Outbox publisher points to [{Domain}].[spOutboxEnqueue]. + +## API Layer + +- [ ] Mutation and read controllers split. +- [ ] POST endpoints use [IdempotencyKey]. +- [ ] GET/HEAD dual route used for get-by-id. +- [ ] PATCH implemented with get + put delegates. +- [ ] Program.cs includes cache, SQL, outbox, OpenAPI, telemetry, health checks. + +## Database Layer + +- [ ] dbex.yaml includes all required tables. +- [ ] Schema + table migrations created. +- [ ] Outbox tables migration created. +- [ ] All six outbox stored procedures created. +- [ ] Reference data seed file created. +- [ ] Program.cs DataResetFilterPredicate scoped to schema. + +## Final Validation + +- [ ] Diagnostics check returns no errors. +- [ ] Project compiles. +- [ ] Unit tests run and pass for {Solution}.{Domain}.Test.Unit. +- [ ] Api tests run and pass for {Solution}.{Domain}.Test.Api. +- [ ] Added to solution file and organized under the {Domain} solution folder (including test projects). +- [ ] README/docs updated where required. diff --git a/.github/templates/domain/Infrastructure/Domain.Infrastructure.csproj.template b/.github/templates/domain/Infrastructure/Domain.Infrastructure.csproj.template new file mode 100644 index 00000000..05aa7fe4 --- /dev/null +++ b/.github/templates/domain/Infrastructure/Domain.Infrastructure.csproj.template @@ -0,0 +1,12 @@ + + + + + + + + + + + + diff --git a/.github/templates/domain/Infrastructure/GlobalUsing.cs.template b/.github/templates/domain/Infrastructure/GlobalUsing.cs.template new file mode 100644 index 00000000..b9f42c29 --- /dev/null +++ b/.github/templates/domain/Infrastructure/GlobalUsing.cs.template @@ -0,0 +1,19 @@ +global using {Solution}.{Domain}.Application.Repositories; +global using {Solution}.{Domain}.Infrastructure.Mapping; +global using CoreEx; +global using CoreEx.Data; +global using CoreEx.Data.Models; +global using CoreEx.Data.Querying; +global using CoreEx.Database; +global using CoreEx.Database.SqlServer; +global using CoreEx.Database.SqlServer.Outbox; +global using CoreEx.DependencyInjection; +global using CoreEx.Entities; +global using CoreEx.EntityFrameworkCore; +global using CoreEx.EntityFrameworkCore.Converters; +global using CoreEx.Events; +global using CoreEx.Events.Publishing; +global using CoreEx.Mapping; +global using Microsoft.EntityFrameworkCore; +global using Microsoft.Extensions.Logging; +global using System.Text.Json; diff --git a/.github/templates/domain/Infrastructure/Mapping/EntityMapper.cs.template b/.github/templates/domain/Infrastructure/Mapping/EntityMapper.cs.template new file mode 100644 index 00000000..067e968b --- /dev/null +++ b/.github/templates/domain/Infrastructure/Mapping/EntityMapper.cs.template @@ -0,0 +1,33 @@ +namespace {Solution}.{Domain}.Infrastructure.Mapping; + +internal class {Entity}Mapper : BiDirectionMapper +{ + protected override Persistence.{Entity} OnMap(Contracts.{Entity} source) => new() + { + Id = source.Id, + CustomerId = source.CustomerId, + StatusCode = source.Status?.Code, + Items = source.Items?.Select(i => new Persistence.{ChildEntity} + { + Id = i.Id, + {Entity}Id = source.Id, + ProductId = i.ProductId, + Quantity = i.Quantity, + UnitPrice = i.UnitPrice + }).ToList() ?? [] + }; + + protected override Contracts.{Entity} OnMap(Persistence.{Entity} source) => new() + { + Id = source.Id, + CustomerId = source.CustomerId, + StatusCode = source.StatusCode, + Items = source.Items?.Select(i => new Contracts.{ChildEntity} + { + Id = i.Id, + ProductId = i.ProductId, + Quantity = i.Quantity, + UnitPrice = i.UnitPrice + }).ToList() ?? [] + }; +} diff --git a/.github/templates/domain/Infrastructure/Mapping/EntityStatusMapper.cs.template b/.github/templates/domain/Infrastructure/Mapping/EntityStatusMapper.cs.template new file mode 100644 index 00000000..98500d89 --- /dev/null +++ b/.github/templates/domain/Infrastructure/Mapping/EntityStatusMapper.cs.template @@ -0,0 +1,16 @@ +namespace {Solution}.{Domain}.Infrastructure.Mapping; + +internal class {Entity}StatusMapper : BiDirectionMapper +{ + protected override Persistence.{Entity}Status OnMap(Contracts.{Entity}Status source) => throw new NotImplementedException(); + + protected override Contracts.{Entity}Status OnMap(Persistence.{Entity}Status source) => new() + { + Id = source.Id!, + Code = source.Code, + Text = source.Text, + SortOrder = source.SortOrder, + IsInactive = !source.IsActive, + ETag = source.ETag + }; +} diff --git a/.github/templates/domain/Infrastructure/Persistence/ChildEntity.cs.template b/.github/templates/domain/Infrastructure/Persistence/ChildEntity.cs.template new file mode 100644 index 00000000..96f2b705 --- /dev/null +++ b/.github/templates/domain/Infrastructure/Persistence/ChildEntity.cs.template @@ -0,0 +1,12 @@ +namespace {Solution}.{Domain}.Infrastructure.Persistence; + +public partial class {ChildEntity} : ModelBase +{ + public string? {Entity}Id { get; set; } + + public string? ProductId { get; set; } + + public decimal Quantity { get; set; } + + public decimal UnitPrice { get; set; } +} diff --git a/.github/templates/domain/Infrastructure/Persistence/Entity.cs.template b/.github/templates/domain/Infrastructure/Persistence/Entity.cs.template new file mode 100644 index 00000000..2004a1f8 --- /dev/null +++ b/.github/templates/domain/Infrastructure/Persistence/Entity.cs.template @@ -0,0 +1,10 @@ +namespace {Solution}.{Domain}.Infrastructure.Persistence; + +public partial class {Entity} : ModelBase +{ + public string? CustomerId { get; set; } + + public string? StatusCode { get; set; } + + public virtual ICollection<{ChildEntity}> Items { get; set; } = []; +} diff --git a/.github/templates/domain/Infrastructure/Persistence/EntityStatus.cs.template b/.github/templates/domain/Infrastructure/Persistence/EntityStatus.cs.template new file mode 100644 index 00000000..7f27d618 --- /dev/null +++ b/.github/templates/domain/Infrastructure/Persistence/EntityStatus.cs.template @@ -0,0 +1,3 @@ +namespace {Solution}.{Domain}.Infrastructure.Persistence; + +public partial class {Entity}Status : ReferenceDataModelBase { } diff --git a/.github/templates/domain/Infrastructure/Repositories/DomainDbContext.cs.template b/.github/templates/domain/Infrastructure/Repositories/DomainDbContext.cs.template new file mode 100644 index 00000000..bc020773 --- /dev/null +++ b/.github/templates/domain/Infrastructure/Repositories/DomainDbContext.cs.template @@ -0,0 +1,64 @@ +namespace {Solution}.{Domain}.Infrastructure.Repositories; + +public class {Domain}DbContext(DbContextOptions<{Domain}DbContext> options, SqlServerDatabase database) : DbContext(options), IEfDbContext +{ + public IDatabase BaseDatabase { get; } = database.ThrowIfNull(); + + protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder) + { + base.OnConfiguring(optionsBuilder); + + if (!optionsBuilder.IsConfigured) + optionsBuilder.UseSqlServer(BaseDatabase.Connection); + } + + protected override void OnModelCreating(ModelBuilder modelBuilder) + { + modelBuilder.ThrowIfNull().Entity(e => + { + e.ToTable("{Entity}", "{Domain}"); + e.HasKey(p => p.Id); + e.Property(p => p.Id).HasColumnName("{Entity}Id").HasColumnType("NVARCHAR(50)"); + e.Property(p => p.CustomerId).HasColumnName("CustomerId").HasColumnType("NVARCHAR(100)"); + e.Property(p => p.StatusCode).HasColumnName("StatusCode").HasColumnType("NVARCHAR(50)"); + e.Property(p => p.CreatedBy).HasColumnName("CreatedBy").HasColumnType("NVARCHAR(250)"); + e.Property(p => p.CreatedOn).HasColumnName("CreatedOn").HasColumnType("DATETIMEOFFSET"); + e.Property(p => p.UpdatedBy).HasColumnName("UpdatedBy").HasColumnType("NVARCHAR(250)"); + e.Property(p => p.UpdatedOn).HasColumnName("UpdatedOn").HasColumnType("DATETIMEOFFSET"); + e.Property(p => p.ETag).HasColumnName("RowVersion").HasColumnType("TIMESTAMP").IsRowVersion().HasConversion(StringBase64Converter.Default); + e.HasMany(p => p.Items).WithOne().HasForeignKey(i => i.{Entity}Id).OnDelete(DeleteBehavior.Cascade); + }); + + modelBuilder.ThrowIfNull().Entity(e => + { + e.ToTable("{ChildEntity}", "{Domain}"); + e.HasKey(p => p.Id); + e.Property(p => p.Id).HasColumnName("{ChildEntity}Id").HasColumnType("NVARCHAR(50)"); + e.Property(p => p.{Entity}Id).HasColumnName("{Entity}Id").HasColumnType("NVARCHAR(50)"); + e.Property(p => p.ProductId).HasColumnName("ProductId").HasColumnType("NVARCHAR(100)"); + e.Property(p => p.Quantity).HasColumnName("Quantity").HasColumnType("DECIMAL(18,4)"); + e.Property(p => p.UnitPrice).HasColumnName("UnitPrice").HasColumnType("DECIMAL(18,4)"); + e.Property(p => p.CreatedBy).HasColumnName("CreatedBy").HasColumnType("NVARCHAR(250)"); + e.Property(p => p.CreatedOn).HasColumnName("CreatedOn").HasColumnType("DATETIMEOFFSET"); + e.Property(p => p.UpdatedBy).HasColumnName("UpdatedBy").HasColumnType("NVARCHAR(250)"); + e.Property(p => p.UpdatedOn).HasColumnName("UpdatedOn").HasColumnType("DATETIMEOFFSET"); + }); + + modelBuilder.ThrowIfNull().Entity(e => + { + e.ToTable("{Entity}Status", "{Domain}"); + e.HasKey(p => p.Id); + e.Property(p => p.Id).HasColumnName("{Entity}StatusId").HasColumnType("NVARCHAR(50)"); + e.Property(p => p.Code).HasColumnName("Code").HasColumnType("NVARCHAR(50)"); + e.Property(p => p.Text).HasColumnName("Text").HasColumnType("NVARCHAR(250)"); + e.Property(p => p.SortOrder).HasColumnName("SortOrder").HasColumnType("INT"); + e.Property(p => p.IsActive).HasColumnName("IsActive").HasColumnType("BIT"); + e.Property(p => p.CreatedBy).HasColumnName("CreatedBy").HasColumnType("NVARCHAR(250)"); + e.Property(p => p.CreatedOn).HasColumnName("CreatedOn").HasColumnType("DATETIMEOFFSET"); + e.Property(p => p.UpdatedBy).HasColumnName("UpdatedBy").HasColumnType("NVARCHAR(250)"); + e.Property(p => p.UpdatedOn).HasColumnName("UpdatedOn").HasColumnType("DATETIMEOFFSET"); + e.Property(p => p.ETag).HasColumnName("RowVersion").HasColumnType("TIMESTAMP").IsRowVersion().HasConversion(StringBase64Converter.Default); + e.Ignore(p => p.Description).Ignore(p => p.StartsOn).Ignore(p => p.EndsOn); + }); + } +} diff --git a/.github/templates/domain/Infrastructure/Repositories/DomainEfDb.cs.template b/.github/templates/domain/Infrastructure/Repositories/DomainEfDb.cs.template new file mode 100644 index 00000000..832ceb30 --- /dev/null +++ b/.github/templates/domain/Infrastructure/Repositories/DomainEfDb.cs.template @@ -0,0 +1,9 @@ +namespace {Solution}.{Domain}.Infrastructure.Repositories; + +public sealed class {Domain}EfDb({Domain}DbContext dbContext) : EfDb<{Domain}DbContext>(dbContext) +{ + public EfDbModel {Entity}Statuses => Model(); + + public EfDbMappedModel {EntityPlural} + => Model().ToMappedModel({Entity}Mapper.Default); +} diff --git a/.github/templates/domain/Infrastructure/Repositories/DomainOutboxPublisher.cs.template b/.github/templates/domain/Infrastructure/Repositories/DomainOutboxPublisher.cs.template new file mode 100644 index 00000000..c7c4c885 --- /dev/null +++ b/.github/templates/domain/Infrastructure/Repositories/DomainOutboxPublisher.cs.template @@ -0,0 +1,7 @@ +namespace {Solution}.{Domain}.Infrastructure.Repositories; + +public class {Domain}OutboxPublisher(SqlServerDatabase database, IDestinationProvider? destinationProvider = null, IEventFormatter? formatter = null, ILogger<{Domain}OutboxPublisher>? logger = null) + : SqlServerOutboxPublisher(database, destinationProvider, formatter, logger) +{ + public override SqlStatement Statement { get; set; } = SqlStatement.StoredProcedure("[{Domain}].[spOutboxEnqueue]"); +} diff --git a/.github/templates/domain/Infrastructure/Repositories/EntityRepository.cs.template b/.github/templates/domain/Infrastructure/Repositories/EntityRepository.cs.template new file mode 100644 index 00000000..bc54bac9 --- /dev/null +++ b/.github/templates/domain/Infrastructure/Repositories/EntityRepository.cs.template @@ -0,0 +1,38 @@ +namespace {Solution}.{Domain}.Infrastructure.Repositories; + +[ScopedService] +public class {Entity}Repository({Domain}EfDb ef) : I{Entity}Repository +{ + private readonly {Domain}EfDb _ef = ef.ThrowIfNull(); + + private static readonly QueryArgsConfig _queryConfig = QueryArgsConfig.Create() + .WithFilter(filter => filter + .WithDefaultModelPrefix("{Entity}") + .AddField(nameof(Contracts.{Entity}Base.CustomerId), c => c.WithOperators(QueryFilterOperator.EqualityOperators | QueryFilterOperator.StartsWith)) + .AddReferenceDataField(nameof(Contracts.{Entity}Base.Status), "StatusCode")) + .WithOrderBy(orderby => orderby + .WithDefaultModelPrefix("{Entity}") + .AddField(nameof(Contracts.{Entity}Base.CustomerId), c => c.WithDefault().WithAlwaysInclude())); + + public Task GetAsync(string id) => _ef.{EntityPlural}.GetAsync(id); + + public Task> CreateAsync(Contracts.{Entity} entity) => _ef.{EntityPlural}.CreateAsync(entity); + + public Task> UpdateAsync(Contracts.{Entity} entity) => _ef.{EntityPlural}.UpdateAsync(entity); + + public Task DeleteAsync(string id) => _ef.{EntityPlural}.DeleteAsync(id); + + public async Task> QueryAsync(QueryArgs? query, PagingArgs? paging) + { + var parsed = _queryConfig.Parse(query).ThrowOnError(); + var entities = _ef.{EntityPlural}.Model.Query(); + + return await entities.Where(parsed).OrderBy(parsed).ToMappedItemsResultAsync(x => new Contracts.{Entity}Lite + { + Id = x.Id, + CustomerId = x.CustomerId, + StatusCode = x.StatusCode, + ChangeLog = new ChangeLog { CreatedBy = x.CreatedBy, CreatedOn = x.CreatedOn, UpdatedBy = x.UpdatedBy, UpdatedOn = x.UpdatedOn } + }, paging).ConfigureAwait(false); + } +} diff --git a/.github/templates/domain/Infrastructure/Repositories/ReferenceDataRepository.cs.template b/.github/templates/domain/Infrastructure/Repositories/ReferenceDataRepository.cs.template new file mode 100644 index 00000000..2d0a5aee --- /dev/null +++ b/.github/templates/domain/Infrastructure/Repositories/ReferenceDataRepository.cs.template @@ -0,0 +1,10 @@ +namespace {Solution}.{Domain}.Infrastructure.Repositories; + +[ScopedService] +public class ReferenceDataRepository({Domain}EfDb ef) : IReferenceDataRepository +{ + private readonly {Domain}EfDb _ef = ef.ThrowIfNull(); + + public Task GetAll{Entity}StatusesAsync() + => _ef.{Entity}Statuses.Query().ToMappedItemsAsync({Entity}StatusMapper.From); +} diff --git a/CoreEx.sln b/CoreEx.sln index b5488a0b..7c157588 100644 --- a/CoreEx.sln +++ b/CoreEx.sln @@ -160,6 +160,38 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Contoso.Shopping.Outbox.Rel EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Contoso.Products.Test.Subscribe", "samples\tests\Contoso.Products.Test.Subscribe\Contoso.Products.Test.Subscribe.csproj", "{4B987914-01EE-48B4-B645-A6F469297853}" EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Orders", "Orders", "{4033DC3B-5F3E-4D69-AEC0-97D5BA4DB370}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{95780E7B-43ED-4404-8917-A46D4DC30083}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "tests", "tests", "{D2F88DCC-0DDB-4B25-BA0C-975D437F633D}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "tools", "tools", "{8658F459-F63E-443E-90CA-B9FFE463996B}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "hosts", "hosts", "{848DC6FA-94E0-4805-9107-501102BC4A6D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Contoso.Orders.Contracts", "samples\src\Contoso.Orders.Contracts\Contoso.Orders.Contracts.csproj", "{5FA63BE3-43AB-47E4-9479-FC6AE30760E8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Contoso.Orders.Database", "samples\src\Contoso.Orders.Database\Contoso.Orders.Database.csproj", "{BCEA1093-FB6F-4B58-AA8C-FF01DD5CB574}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Contoso.Orders.Infrastructure", "samples\src\Contoso.Orders.Infrastructure\Contoso.Orders.Infrastructure.csproj", "{D271F0DB-79CC-4878-9922-482FA7C49333}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Contoso.Orders.Application", "samples\src\Contoso.Orders.Application\Contoso.Orders.Application.csproj", "{37285E51-9DEE-4344-9B0E-B3185016600A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Contoso.Orders.Api", "samples\src\Contoso.Orders.Api\Contoso.Orders.Api.csproj", "{E4E396DA-90FB-489C-A40C-1B22563CF203}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Contoso.Orders.Test.Common", "samples\tests\Contoso.Orders.Test.Common\Contoso.Orders.Test.Common.csproj", "{8C369124-BA40-4B77-8DCB-588700C97430}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Contoso.Orders.Test.Api", "samples\tests\Contoso.Orders.Test.Api\Contoso.Orders.Test.Api.csproj", "{D1CF8953-16EB-4B3A-92F9-ABA545F87D76}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Contoso.Orders.Test.Unit", "samples\tests\Contoso.Orders.Test.Unit\Contoso.Orders.Test.Unit.csproj", "{ECC8BFF1-12F9-4F7B-90FE-4FA454683C32}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Contoso.Order.Workflow.Client", "samples\src\Contoso.Order.Workflow.Client\Contoso.Order.Workflow.Client.csproj", "{8CB05C57-7A07-4228-AE5F-2AC88792062A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Contoso.Order.Workflow.Worker", "samples\src\Contoso.Order.Workflow.Worker\Contoso.Order.Workflow.Worker.csproj", "{70E70027-97A1-4862-953F-32874684A334}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Contoso.Order.Workflow.Workflow", "samples\src\Contoso.Order.Workflow.Workflow\Contoso.Order.Workflow.Workflow.csproj", "{09E56536-DD59-49BD-A48F-41299C5B7ABF}" +EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CoreEx.Generator", "gen\CoreEx.Generator\CoreEx.Generator.csproj", "{54CD8587-0F45-2C2C-7AE4-BB92254202F5}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "gen", "gen", "{570F3635-BEB1-4067-B10F-33DD890BDBD4}" @@ -786,6 +818,138 @@ Global {4B987914-01EE-48B4-B645-A6F469297853}.Release|x64.Build.0 = Release|Any CPU {4B987914-01EE-48B4-B645-A6F469297853}.Release|x86.ActiveCfg = Release|Any CPU {4B987914-01EE-48B4-B645-A6F469297853}.Release|x86.Build.0 = Release|Any CPU + {5FA63BE3-43AB-47E4-9479-FC6AE30760E8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5FA63BE3-43AB-47E4-9479-FC6AE30760E8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5FA63BE3-43AB-47E4-9479-FC6AE30760E8}.Debug|x64.ActiveCfg = Debug|Any CPU + {5FA63BE3-43AB-47E4-9479-FC6AE30760E8}.Debug|x64.Build.0 = Debug|Any CPU + {5FA63BE3-43AB-47E4-9479-FC6AE30760E8}.Debug|x86.ActiveCfg = Debug|Any CPU + {5FA63BE3-43AB-47E4-9479-FC6AE30760E8}.Debug|x86.Build.0 = Debug|Any CPU + {5FA63BE3-43AB-47E4-9479-FC6AE30760E8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5FA63BE3-43AB-47E4-9479-FC6AE30760E8}.Release|Any CPU.Build.0 = Release|Any CPU + {5FA63BE3-43AB-47E4-9479-FC6AE30760E8}.Release|x64.ActiveCfg = Release|Any CPU + {5FA63BE3-43AB-47E4-9479-FC6AE30760E8}.Release|x64.Build.0 = Release|Any CPU + {5FA63BE3-43AB-47E4-9479-FC6AE30760E8}.Release|x86.ActiveCfg = Release|Any CPU + {5FA63BE3-43AB-47E4-9479-FC6AE30760E8}.Release|x86.Build.0 = Release|Any CPU + {BCEA1093-FB6F-4B58-AA8C-FF01DD5CB574}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BCEA1093-FB6F-4B58-AA8C-FF01DD5CB574}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BCEA1093-FB6F-4B58-AA8C-FF01DD5CB574}.Debug|x64.ActiveCfg = Debug|Any CPU + {BCEA1093-FB6F-4B58-AA8C-FF01DD5CB574}.Debug|x64.Build.0 = Debug|Any CPU + {BCEA1093-FB6F-4B58-AA8C-FF01DD5CB574}.Debug|x86.ActiveCfg = Debug|Any CPU + {BCEA1093-FB6F-4B58-AA8C-FF01DD5CB574}.Debug|x86.Build.0 = Debug|Any CPU + {BCEA1093-FB6F-4B58-AA8C-FF01DD5CB574}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BCEA1093-FB6F-4B58-AA8C-FF01DD5CB574}.Release|Any CPU.Build.0 = Release|Any CPU + {BCEA1093-FB6F-4B58-AA8C-FF01DD5CB574}.Release|x64.ActiveCfg = Release|Any CPU + {BCEA1093-FB6F-4B58-AA8C-FF01DD5CB574}.Release|x64.Build.0 = Release|Any CPU + {BCEA1093-FB6F-4B58-AA8C-FF01DD5CB574}.Release|x86.ActiveCfg = Release|Any CPU + {BCEA1093-FB6F-4B58-AA8C-FF01DD5CB574}.Release|x86.Build.0 = Release|Any CPU + {D271F0DB-79CC-4878-9922-482FA7C49333}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D271F0DB-79CC-4878-9922-482FA7C49333}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D271F0DB-79CC-4878-9922-482FA7C49333}.Debug|x64.ActiveCfg = Debug|Any CPU + {D271F0DB-79CC-4878-9922-482FA7C49333}.Debug|x64.Build.0 = Debug|Any CPU + {D271F0DB-79CC-4878-9922-482FA7C49333}.Debug|x86.ActiveCfg = Debug|Any CPU + {D271F0DB-79CC-4878-9922-482FA7C49333}.Debug|x86.Build.0 = Debug|Any CPU + {D271F0DB-79CC-4878-9922-482FA7C49333}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D271F0DB-79CC-4878-9922-482FA7C49333}.Release|Any CPU.Build.0 = Release|Any CPU + {D271F0DB-79CC-4878-9922-482FA7C49333}.Release|x64.ActiveCfg = Release|Any CPU + {D271F0DB-79CC-4878-9922-482FA7C49333}.Release|x64.Build.0 = Release|Any CPU + {D271F0DB-79CC-4878-9922-482FA7C49333}.Release|x86.ActiveCfg = Release|Any CPU + {D271F0DB-79CC-4878-9922-482FA7C49333}.Release|x86.Build.0 = Release|Any CPU + {37285E51-9DEE-4344-9B0E-B3185016600A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {37285E51-9DEE-4344-9B0E-B3185016600A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {37285E51-9DEE-4344-9B0E-B3185016600A}.Debug|x64.ActiveCfg = Debug|Any CPU + {37285E51-9DEE-4344-9B0E-B3185016600A}.Debug|x64.Build.0 = Debug|Any CPU + {37285E51-9DEE-4344-9B0E-B3185016600A}.Debug|x86.ActiveCfg = Debug|Any CPU + {37285E51-9DEE-4344-9B0E-B3185016600A}.Debug|x86.Build.0 = Debug|Any CPU + {37285E51-9DEE-4344-9B0E-B3185016600A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {37285E51-9DEE-4344-9B0E-B3185016600A}.Release|Any CPU.Build.0 = Release|Any CPU + {37285E51-9DEE-4344-9B0E-B3185016600A}.Release|x64.ActiveCfg = Release|Any CPU + {37285E51-9DEE-4344-9B0E-B3185016600A}.Release|x64.Build.0 = Release|Any CPU + {37285E51-9DEE-4344-9B0E-B3185016600A}.Release|x86.ActiveCfg = Release|Any CPU + {37285E51-9DEE-4344-9B0E-B3185016600A}.Release|x86.Build.0 = Release|Any CPU + {E4E396DA-90FB-489C-A40C-1B22563CF203}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E4E396DA-90FB-489C-A40C-1B22563CF203}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E4E396DA-90FB-489C-A40C-1B22563CF203}.Debug|x64.ActiveCfg = Debug|Any CPU + {E4E396DA-90FB-489C-A40C-1B22563CF203}.Debug|x64.Build.0 = Debug|Any CPU + {E4E396DA-90FB-489C-A40C-1B22563CF203}.Debug|x86.ActiveCfg = Debug|Any CPU + {E4E396DA-90FB-489C-A40C-1B22563CF203}.Debug|x86.Build.0 = Debug|Any CPU + {E4E396DA-90FB-489C-A40C-1B22563CF203}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E4E396DA-90FB-489C-A40C-1B22563CF203}.Release|Any CPU.Build.0 = Release|Any CPU + {E4E396DA-90FB-489C-A40C-1B22563CF203}.Release|x64.ActiveCfg = Release|Any CPU + {E4E396DA-90FB-489C-A40C-1B22563CF203}.Release|x64.Build.0 = Release|Any CPU + {E4E396DA-90FB-489C-A40C-1B22563CF203}.Release|x86.ActiveCfg = Release|Any CPU + {E4E396DA-90FB-489C-A40C-1B22563CF203}.Release|x86.Build.0 = Release|Any CPU + {8C369124-BA40-4B77-8DCB-588700C97430}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8C369124-BA40-4B77-8DCB-588700C97430}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8C369124-BA40-4B77-8DCB-588700C97430}.Debug|x64.ActiveCfg = Debug|Any CPU + {8C369124-BA40-4B77-8DCB-588700C97430}.Debug|x64.Build.0 = Debug|Any CPU + {8C369124-BA40-4B77-8DCB-588700C97430}.Debug|x86.ActiveCfg = Debug|Any CPU + {8C369124-BA40-4B77-8DCB-588700C97430}.Debug|x86.Build.0 = Debug|Any CPU + {8C369124-BA40-4B77-8DCB-588700C97430}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8C369124-BA40-4B77-8DCB-588700C97430}.Release|Any CPU.Build.0 = Release|Any CPU + {8C369124-BA40-4B77-8DCB-588700C97430}.Release|x64.ActiveCfg = Release|Any CPU + {8C369124-BA40-4B77-8DCB-588700C97430}.Release|x64.Build.0 = Release|Any CPU + {8C369124-BA40-4B77-8DCB-588700C97430}.Release|x86.ActiveCfg = Release|Any CPU + {8C369124-BA40-4B77-8DCB-588700C97430}.Release|x86.Build.0 = Release|Any CPU + {D1CF8953-16EB-4B3A-92F9-ABA545F87D76}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D1CF8953-16EB-4B3A-92F9-ABA545F87D76}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D1CF8953-16EB-4B3A-92F9-ABA545F87D76}.Debug|x64.ActiveCfg = Debug|Any CPU + {D1CF8953-16EB-4B3A-92F9-ABA545F87D76}.Debug|x64.Build.0 = Debug|Any CPU + {D1CF8953-16EB-4B3A-92F9-ABA545F87D76}.Debug|x86.ActiveCfg = Debug|Any CPU + {D1CF8953-16EB-4B3A-92F9-ABA545F87D76}.Debug|x86.Build.0 = Debug|Any CPU + {D1CF8953-16EB-4B3A-92F9-ABA545F87D76}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D1CF8953-16EB-4B3A-92F9-ABA545F87D76}.Release|Any CPU.Build.0 = Release|Any CPU + {D1CF8953-16EB-4B3A-92F9-ABA545F87D76}.Release|x64.ActiveCfg = Release|Any CPU + {D1CF8953-16EB-4B3A-92F9-ABA545F87D76}.Release|x64.Build.0 = Release|Any CPU + {D1CF8953-16EB-4B3A-92F9-ABA545F87D76}.Release|x86.ActiveCfg = Release|Any CPU + {D1CF8953-16EB-4B3A-92F9-ABA545F87D76}.Release|x86.Build.0 = Release|Any CPU + {ECC8BFF1-12F9-4F7B-90FE-4FA454683C32}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {ECC8BFF1-12F9-4F7B-90FE-4FA454683C32}.Debug|Any CPU.Build.0 = Debug|Any CPU + {ECC8BFF1-12F9-4F7B-90FE-4FA454683C32}.Debug|x64.ActiveCfg = Debug|Any CPU + {ECC8BFF1-12F9-4F7B-90FE-4FA454683C32}.Debug|x64.Build.0 = Debug|Any CPU + {ECC8BFF1-12F9-4F7B-90FE-4FA454683C32}.Debug|x86.ActiveCfg = Debug|Any CPU + {ECC8BFF1-12F9-4F7B-90FE-4FA454683C32}.Debug|x86.Build.0 = Debug|Any CPU + {ECC8BFF1-12F9-4F7B-90FE-4FA454683C32}.Release|Any CPU.ActiveCfg = Release|Any CPU + {ECC8BFF1-12F9-4F7B-90FE-4FA454683C32}.Release|Any CPU.Build.0 = Release|Any CPU + {ECC8BFF1-12F9-4F7B-90FE-4FA454683C32}.Release|x64.ActiveCfg = Release|Any CPU + {ECC8BFF1-12F9-4F7B-90FE-4FA454683C32}.Release|x64.Build.0 = Release|Any CPU + {ECC8BFF1-12F9-4F7B-90FE-4FA454683C32}.Release|x86.ActiveCfg = Release|Any CPU + {ECC8BFF1-12F9-4F7B-90FE-4FA454683C32}.Release|x86.Build.0 = Release|Any CPU + {8CB05C57-7A07-4228-AE5F-2AC88792062A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8CB05C57-7A07-4228-AE5F-2AC88792062A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8CB05C57-7A07-4228-AE5F-2AC88792062A}.Debug|x64.ActiveCfg = Debug|Any CPU + {8CB05C57-7A07-4228-AE5F-2AC88792062A}.Debug|x64.Build.0 = Debug|Any CPU + {8CB05C57-7A07-4228-AE5F-2AC88792062A}.Debug|x86.ActiveCfg = Debug|Any CPU + {8CB05C57-7A07-4228-AE5F-2AC88792062A}.Debug|x86.Build.0 = Debug|Any CPU + {8CB05C57-7A07-4228-AE5F-2AC88792062A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8CB05C57-7A07-4228-AE5F-2AC88792062A}.Release|Any CPU.Build.0 = Release|Any CPU + {8CB05C57-7A07-4228-AE5F-2AC88792062A}.Release|x64.ActiveCfg = Release|Any CPU + {8CB05C57-7A07-4228-AE5F-2AC88792062A}.Release|x64.Build.0 = Release|Any CPU + {8CB05C57-7A07-4228-AE5F-2AC88792062A}.Release|x86.ActiveCfg = Release|Any CPU + {8CB05C57-7A07-4228-AE5F-2AC88792062A}.Release|x86.Build.0 = Release|Any CPU + {70E70027-97A1-4862-953F-32874684A334}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {70E70027-97A1-4862-953F-32874684A334}.Debug|Any CPU.Build.0 = Debug|Any CPU + {70E70027-97A1-4862-953F-32874684A334}.Debug|x64.ActiveCfg = Debug|Any CPU + {70E70027-97A1-4862-953F-32874684A334}.Debug|x64.Build.0 = Debug|Any CPU + {70E70027-97A1-4862-953F-32874684A334}.Debug|x86.ActiveCfg = Debug|Any CPU + {70E70027-97A1-4862-953F-32874684A334}.Debug|x86.Build.0 = Debug|Any CPU + {70E70027-97A1-4862-953F-32874684A334}.Release|Any CPU.ActiveCfg = Release|Any CPU + {70E70027-97A1-4862-953F-32874684A334}.Release|Any CPU.Build.0 = Release|Any CPU + {70E70027-97A1-4862-953F-32874684A334}.Release|x64.ActiveCfg = Release|Any CPU + {70E70027-97A1-4862-953F-32874684A334}.Release|x64.Build.0 = Release|Any CPU + {70E70027-97A1-4862-953F-32874684A334}.Release|x86.ActiveCfg = Release|Any CPU + {70E70027-97A1-4862-953F-32874684A334}.Release|x86.Build.0 = Release|Any CPU + {09E56536-DD59-49BD-A48F-41299C5B7ABF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {09E56536-DD59-49BD-A48F-41299C5B7ABF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {09E56536-DD59-49BD-A48F-41299C5B7ABF}.Debug|x64.ActiveCfg = Debug|Any CPU + {09E56536-DD59-49BD-A48F-41299C5B7ABF}.Debug|x64.Build.0 = Debug|Any CPU + {09E56536-DD59-49BD-A48F-41299C5B7ABF}.Debug|x86.ActiveCfg = Debug|Any CPU + {09E56536-DD59-49BD-A48F-41299C5B7ABF}.Debug|x86.Build.0 = Debug|Any CPU + {09E56536-DD59-49BD-A48F-41299C5B7ABF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {09E56536-DD59-49BD-A48F-41299C5B7ABF}.Release|Any CPU.Build.0 = Release|Any CPU + {09E56536-DD59-49BD-A48F-41299C5B7ABF}.Release|x64.ActiveCfg = Release|Any CPU + {09E56536-DD59-49BD-A48F-41299C5B7ABF}.Release|x64.Build.0 = Release|Any CPU + {09E56536-DD59-49BD-A48F-41299C5B7ABF}.Release|x86.ActiveCfg = Release|Any CPU + {09E56536-DD59-49BD-A48F-41299C5B7ABF}.Release|x86.Build.0 = Release|Any CPU {54CD8587-0F45-2C2C-7AE4-BB92254202F5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {54CD8587-0F45-2C2C-7AE4-BB92254202F5}.Debug|Any CPU.Build.0 = Debug|Any CPU {54CD8587-0F45-2C2C-7AE4-BB92254202F5}.Debug|x64.ActiveCfg = Debug|Any CPU @@ -865,6 +1029,22 @@ Global {48955503-ECE7-4E3E-A1A1-8A04AA133724} = {4943BF85-38ED-4E0D-9FD7-1CC3D24FE106} {E47B85E3-AB64-C985-5561-3BA06AEB256F} = {E01B4001-A2EB-4134-8AA8-8A32F06F53FE} {4B987914-01EE-48B4-B645-A6F469297853} = {86C93AEE-6E67-44EB-BE96-168DFD26311D} + {4033DC3B-5F3E-4D69-AEC0-97D5BA4DB370} = {5B5342D2-2392-4EB8-9933-A21DB9416534} + {95780E7B-43ED-4404-8917-A46D4DC30083} = {4033DC3B-5F3E-4D69-AEC0-97D5BA4DB370} + {D2F88DCC-0DDB-4B25-BA0C-975D437F633D} = {4033DC3B-5F3E-4D69-AEC0-97D5BA4DB370} + {8658F459-F63E-443E-90CA-B9FFE463996B} = {4033DC3B-5F3E-4D69-AEC0-97D5BA4DB370} + {848DC6FA-94E0-4805-9107-501102BC4A6D} = {4033DC3B-5F3E-4D69-AEC0-97D5BA4DB370} + {5FA63BE3-43AB-47E4-9479-FC6AE30760E8} = {95780E7B-43ED-4404-8917-A46D4DC30083} + {BCEA1093-FB6F-4B58-AA8C-FF01DD5CB574} = {8658F459-F63E-443E-90CA-B9FFE463996B} + {D271F0DB-79CC-4878-9922-482FA7C49333} = {95780E7B-43ED-4404-8917-A46D4DC30083} + {37285E51-9DEE-4344-9B0E-B3185016600A} = {95780E7B-43ED-4404-8917-A46D4DC30083} + {E4E396DA-90FB-489C-A40C-1B22563CF203} = {848DC6FA-94E0-4805-9107-501102BC4A6D} + {8C369124-BA40-4B77-8DCB-588700C97430} = {D2F88DCC-0DDB-4B25-BA0C-975D437F633D} + {D1CF8953-16EB-4B3A-92F9-ABA545F87D76} = {D2F88DCC-0DDB-4B25-BA0C-975D437F633D} + {ECC8BFF1-12F9-4F7B-90FE-4FA454683C32} = {D2F88DCC-0DDB-4B25-BA0C-975D437F633D} + {8CB05C57-7A07-4228-AE5F-2AC88792062A} = {95780E7B-43ED-4404-8917-A46D4DC30083} + {70E70027-97A1-4862-953F-32874684A334} = {848DC6FA-94E0-4805-9107-501102BC4A6D} + {09E56536-DD59-49BD-A48F-41299C5B7ABF} = {95780E7B-43ED-4404-8917-A46D4DC30083} {54CD8587-0F45-2C2C-7AE4-BB92254202F5} = {570F3635-BEB1-4067-B10F-33DD890BDBD4} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution diff --git a/Directory.Packages.props b/Directory.Packages.props index 81e385c5..f648c311 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -11,6 +11,9 @@ + + + diff --git a/README.md b/README.md index 31e5a0f4..270403e1 100644 --- a/README.md +++ b/README.md @@ -47,11 +47,13 @@ The included [change log](CHANGELOG.md) details all key changes per published ve ## Samples -The following samples are provided to guide usage: +The repository includes Contoso reference samples that demonstrate CoreEx across API, database, outbox, subscriber, orchestration, and testing scenarios. See [samples/README.md](./samples/README.md) for the runnable topology, prerequisites, and commands. Sample | Description -|- -[My.Hr](./samples/My.Hr) | A sample to demonstrate the usage of _CoreEx_ within the context of a fictitious Human Resources solution. The main intent is to show how _CoreEx_ can be leveraged to build Web APIs and Azure Functions. Additionally, the unit testing provided within demonstrates the thoroughness of testing that can be achieved with some of the other repos mentioned below. +[Contoso Products](./samples/README.md) | Reference microservice showing API, database migrations, transactional outbox, relay, subscriber, and test coverage for a product/inventory domain. +[Contoso Shopping](./samples/README.md) | Reference microservice showing aggregate-centric application design, cross-service HTTP integration, hybrid caching, messaging, and integration testing. +[Contoso Orders / Order.Workflow](./samples/README.md) | Additional sample areas for order processing and Durable Task orchestration that are currently in progress.
diff --git a/azure/AGENTS.md b/azure/AGENTS.md index 1ad7023a..07433fe5 100644 --- a/azure/AGENTS.md +++ b/azure/AGENTS.md @@ -1,3 +1,9 @@ +--- +description: "Operational guidance for AI agents deploying Contoso sample services to Azure via azd/Bicep or Terraform" +scope: "azure/" +tags: ["azure", "deployment", "iac", "bicep", "terraform"] +--- + # AGENTS.md — Azure Deployment Operational guide for AI agents working in the `azure/` folder of this repository. This deploys the Contoso sample services (under `samples/src/`) to Azure using either **Azure Developer CLI (azd) + Bicep** or **Terraform**. diff --git a/docker-compose.yml b/docker-compose.yml index a477b9a3..3ceb0bc7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -41,4 +41,13 @@ services: ACCEPT_EULA: "Y" ports: - "5672:5672" # AMQP - - "5300:5300" # Management / HTTP \ No newline at end of file + - "5300:5300" # Management / HTTP + + + dts-emulator: + image: mcr.microsoft.com/dts/dts-emulator:latest + environment: + DTS_TASK_HUB_NAMES: "default,order" + ports: + - "8080:8080" + - "8082:8082" \ No newline at end of file diff --git a/docs/JsonDataReader-vs-JsonNodeDataReader.md b/docs/JsonDataReader-vs-JsonNodeDataReader.md new file mode 100644 index 00000000..02539b9f --- /dev/null +++ b/docs/JsonDataReader-vs-JsonNodeDataReader.md @@ -0,0 +1,189 @@ +# JsonDataReader vs JsonNodeDataReader + +## Overview + +`JsonNodeDataReader` is the **mutable** alternative to `JsonDataReader`, using `JsonNode` instead of `JsonElement`. + +## Key Differences + +| Feature | JsonDataReader | JsonNodeDataReader | +|---------|----------------|-------------------| +| **Underlying Type** | `JsonElement` (struct) | `JsonNode` (class hierarchy) | +| **Mutability** | Immutable/Read-only | Mutable - can modify returned nodes | +| **Memory Model** | Backed by `JsonDocument` | Standalone object graph | +| **Disposal** | Requires `IDisposable` | No disposal needed | +| **Root Property** | `RootElement` (JsonElement) | `RootNode` (JsonNode) | +| **Type Checking** | `ValueKind` enum | Type patterns (`is JsonObject`) | +| **Performance** | Lower memory overhead | Higher allocation cost | +| **Thread Safety** | Safe (immutable) | Unsafe (mutable) | + +## API Comparison + +### Creating Instances + +```csharp +// JsonDataReader +var reader = JsonDataReader.ParseJson(jsonString); +using (reader) // Must dispose +{ + var root = reader.RootElement; +} + +// JsonNodeDataReader +var nodeReader = JsonNodeDataReader.ParseJson(jsonString); +// No disposal needed +var root = nodeReader.RootNode; +``` + +### Getting Data + +```csharp +// JsonDataReader +if (reader.TryGetPath("path.to.data", out JsonElement element)) +{ + // element is read-only +} + +// JsonNodeDataReader +if (nodeReader.TryGetPath("path.to.data", out JsonNode? node)) +{ + // node can be modified + if (node is JsonObject obj) + { + obj["newProperty"] = "new value"; // Mutation! + } +} +``` + +### Creating Data with Parameters + +```csharp +// JsonDataReader +if (reader.TryCreateData("data", out JsonElement? result, properties, parameters)) +{ + // result is immutable +} + +// JsonNodeDataReader +if (nodeReader.TryCreateData("data", out JsonNode? result, properties, parameters)) +{ + // result can be modified after creation + if (result is JsonArray arr) + { + arr.Add(JsonValue.Create("new item")); // Can add items! + } +} +``` + +## When to Use Each + +### Use JsonDataReader When: +- ✅ You need maximum performance and minimal allocations +- ✅ Read-only access is sufficient +- ✅ Working with large JSON documents +- ✅ Thread-safety is important + +### Use JsonNodeDataReader When: +- ✅ You need to mutate the JSON after loading +- ✅ Building or modifying JSON structures dynamically +- ✅ Working with smaller datasets where allocation overhead is acceptable +- ✅ You prefer working with object-oriented APIs + +## Migration Path + +To migrate from `JsonDataReader` to `JsonNodeDataReader`: + +1. Change the type: + ```csharp + // Before + var reader = JsonDataReader.ParseJson(json); + + // After + var reader = JsonNodeDataReader.ParseJson(json); + ``` + +2. Update property access: + ```csharp + // Before + JsonElement root = reader.RootElement; + + // After + JsonNode root = reader.RootNode; + ``` + +3. Remove disposal (if using `using`): + ```csharp + // Before + using var reader = JsonDataReader.ParseJson(json); + + // After (no disposal needed) + var reader = JsonNodeDataReader.ParseJson(json); + ``` + +4. Update type checking: + ```csharp + // Before + if (element.ValueKind == JsonValueKind.Object) + + // After + if (node is JsonObject obj) + ``` + +5. Update parameter functions: + ```csharp + // Before + var params = new Dictionary>(); + + // After + var params = new Dictionary>(); + ``` + +## Internal Implementation Differences + +### JsonDataReaderArgs vs JsonNodeDataReaderArgs + +```csharp +// JsonDataReaderArgs +public class JsonDataReaderArgs +{ + public JsonElement Root { get; init; } + public JsonProperty Current { get; internal set; } // Struct with Name + Value +} + +// JsonNodeDataReaderArgs +public class JsonNodeDataReaderArgs +{ + public JsonNode? Root { get; init; } + public string? CurrentPropertyName { get; internal set; } // Separate name + public JsonNode? CurrentValue { get; internal set; } // and value +} +``` + +### Copying Logic + +- **JsonDataReader**: Uses `Utf8JsonWriter` to write, then re-parses to `JsonElement` +- **JsonNodeDataReader**: Creates new `JsonObject`/`JsonArray`/`JsonValue` instances directly + +## Performance Considerations + +```csharp +// JsonDataReader - minimal allocations +var reader = JsonDataReader.ParseJson(largeJson); +var element = reader.RootElement; // Zero-copy access to document + +// JsonNodeDataReader - object allocations +var nodeReader = JsonNodeDataReader.ParseJson(largeJson); +var node = nodeReader.RootNode; // Allocates object graph +``` + +## Deprecation Timeline + +- **Phase 1** (Current): Both APIs available side-by-side +- **Phase 2** (Future): Mark `JsonDataReader` as `[Obsolete]` with migration guidance +- **Phase 3** (Major version): Remove `JsonDataReader` if JsonNodeDataReader proves superior + +## Examples + +See unit tests in: +- `tests/CoreEx.Data.Test.Unit/Json/JsonDataReaderTests.cs` (existing) +- `tests/CoreEx.Data.Test.Unit/Json/JsonNodeDataReaderTests.cs` (to be created) diff --git a/docs/agent-interaction-guide.md b/docs/agent-interaction-guide.md new file mode 100644 index 00000000..ff749b5e --- /dev/null +++ b/docs/agent-interaction-guide.md @@ -0,0 +1,277 @@ +# CoreEx Agent Interaction Guide + +This guide is for **consulting delivery teams consuming CoreEx from the starter or NuGet packages**. Its purpose is to help you use the agent as an interactive partner for learning the framework, understanding the sample architecture, and implementing features safely. + +This is not a framework reference. Use it when you want to know **how to ask**, **what to ask**, and **which skill or prompt to use**. + +## Start with the Right Kind of Request + +Most unhelpful agent conversations start with a request that is too vague. Tell the agent which of these modes you want: + +| If you want to... | Ask for... | Example | +|---|---|---| +| Understand how the repo works. | **Explanation / discovery.** | `Map the Shopping domain and explain its layers.` | +| Understand a pattern. | **Sample-backed explanation.** | `Explain outbox publishing in this repo using the Product sample.` | +| Decide what to build. | **Solution shaping.** | `Given this use case, what is the smallest CoreEx shape I should scaffold?` | +| Create a new domain. | **Greenfield scaffolding.** | `Generate a new Orders domain with validation and SQL Server persistence.` | +| Add capabilities to an existing domain. | **Capability retrofit.** | `Inspect this domain and add the missing messaging pieces.` | +| Change working code. | **Implementation.** | `Inspect the existing implementation first, then add support for X using current CoreEx patterns.` | + +## Ask for Repo-Grounded Answers + +When you are new to the framework, avoid asking for generic platform advice when what you really want is **how this repo does it**. + +Prefer: + +- `Explain validation in this repo with examples from the samples.` +- `Compare API + relay versus API-only using the Product and Shopping samples.` +- `Show me where ETag handling belongs in CoreEx service/controller flow.` + +Avoid: + +- `How should .NET APIs do validation?` +- `What is the best eventing architecture?` + +Those broader questions tend to produce generic answers instead of CoreEx-specific guidance. + +## Ask Capability Questions Directly + +Yes — you should absolutely ask direct capability questions such as: + +- `What is the idempotency key feature in CoreEx?` +- `What problem does it solve?` +- `When should I use it?` +- `How would I implement it in my solution?` + +That is a good way to learn the framework. In practice, the best version of that question asks for four things together: + +1. **What the capability is.** +2. **What problem it solves.** +3. **When to use it versus not use it.** +4. **How it is implemented in this repo or in my current solution.** + +Good examples: + +- `Explain the idempotency key feature in CoreEx. What problem does it solve, when should I use it, and how is it implemented in this repo?` +- `Explain ETag handling in CoreEx, what risks it addresses, and how I would add it to my current API.` +- `Explain the outbox pattern in this repo, what failure mode it prevents, and what I would need to add to my solution to support it.` + +This same pattern works well for: + +- idempotency +- ETags +- validation +- reference data orchestration +- FusionCache / Redis +- outbox relay +- subscriber hosts +- orchestration + +## Tell the Agent What Outcome You Want + +The same topic can produce very different outcomes. State the intended result explicitly: + +- **Explanation only** — no code changes. +- **Plan only** — propose the approach before editing. +- **Implement it** — make the changes. +- **Implement it and align to the samples** — make the changes using the current repo conventions. + +Examples: + +- `Explain how subscribers work here. No code changes.` +- `Plan the smallest safe way to add reliable event publishing to this domain.` +- `Implement this feature using existing CoreEx patterns only.` + +For quick capability or pattern questions that you do not want to add to the main conversation flow, use the Copilot CLI **`/ask`** command as a lightweight side question. + +Examples: + +- `/ask What is the idempotency key feature in CoreEx and what problem does it solve?` +- `/ask When is outbox sufficient and when do I need orchestration?` +- `/ask Does this repo treat ETag support as optional or expected for mutable entities?` + +## Ask the Agent to Inspect Before Recommending + +For existing solutions, ask the agent to inspect the current state before it suggests changes. This is especially important for consulting work, where a domain may already be partially set up. + +Good examples: + +- `Inspect this domain and explain what capabilities are already present before recommending changes.` +- `Check whether this service already has outbox, subscribers, or relay support before proposing a retrofit.` +- `Map the current host shape first, then tell me what is missing for this use case.` + +This is the safest way to avoid duplicate hosts, redundant packages, or advice that ignores what the project already has. + +## Use the Right Skill or Prompt + +The repo already exposes several entry points. Use them intentionally. + +| Need | Best fit | Why | +|---|---|---| +| Understand an unfamiliar repo or area. | `acquire-codebase-knowledge` | Produces structured codebase documentation and evidence-backed discovery. | +| Create a new solution shape. | `coreex-project-bootstrap` | Best for solution-level bootstrapping from requirements. | +| Create a new custom domain. | `/generate-domain` | Best when the agent needs to reason about fields, validation, and event naming. | +| Create a template-shaped domain quickly. | `/scaffold-domain-from-templates` | Best for deterministic, template-aligned output. | +| Add capabilities to an existing domain. | `/add-capability` | Best for incremental retrofits such as relay, subscribe, and messaging alignment. | +| Start local dependencies or sample runtime. | `init`, `setup`, or Aspire tooling | Best for environment and sample execution workflows. | + +## A Good Question Usually Includes Four Things + +A strong request usually includes: + +1. **The use case.** +2. **The current context.** +3. **The desired outcome.** +4. **Any constraints.** + +Template: + +```text +I am working on . +Please inspect first. +I want . +Use existing CoreEx patterns and align to . +``` + +Example: + +```text +I am working on an Orders domain. +Inspect the current domain first. +I want a plan for adding reliable integration-event publishing. +Use existing CoreEx patterns and align to the Product sample. +``` + +## Questions That Work Well for New Developers + +### Learn the architecture + +- `Map the Products sample and explain the role of each project.` +- `Explain how Contracts, Application, Infrastructure, Api, Subscribe, and Outbox.Relay fit together here.` +- `Show me the request flow for a Product create in the samples.` + +### Learn a practice + +- `Explain where validation belongs in CoreEx and show the sample pattern.` +- `Explain why this repo uses outbox relay instead of publishing directly from the API.` +- `Show me how reference data is modeled and consumed.` + +### Learn a capability feature + +- `What is the idempotency key feature in CoreEx, what problem does it solve, and how would I implement it in my solution?` +- `What is ETag support in CoreEx, what issue does it prevent, and where does it belong in the API flow?` +- `What is FusionCache used for here, when should I add it, and what would the minimal implementation look like?` +- `What is the reference data orchestration feature, when is it worth adding, and how would it fit in this solution shape?` + +### Decide what to scaffold + +- `Given this use case, do I need API only, API + relay, API + subscribe, or orchestration?` +- `What is the smallest CoreEx shape that supports this requirement?` +- `Should this domain use /generate-domain or /add-capability?` + +### Prepare to implement + +- `Inspect the current domain and list what is already set up before recommending changes.` +- `Compare my use case to Product, Shopping, and Order.Workflow and tell me which pattern is closest.` +- `Plan the feature in terms of Contracts, Application, Infrastructure, Api, and hosts.` + +## When to Ask for Comparisons + +Comparative questions are especially useful when you are still learning the framework. + +Examples: + +- `Compare API-only versus API + relay for this use case.` +- `Compare subscriber host versus orchestration worker for this business flow.` +- `Compare /generate-domain versus /scaffold-domain-from-templates for this new domain.` +- `Compare adding capability retroactively versus scaffolding the full host shape from day one.` + +This helps you learn the framework decision points instead of only getting one recommendation. + +## Ask for Sample Alignment Explicitly + +If you want implementation help that matches repo conventions, say so directly. + +Useful phrases: + +- `Align to the Product sample.` +- `Use the same host wiring pattern as Shopping.Subscribe.` +- `Follow the existing CoreEx instructions and sample conventions.` +- `Prefer the current repo pattern over generic alternatives.` + +## Tell the Agent How Conservative to Be + +In consulting projects, you often want the **smallest safe change**, not a broad redesign. + +Useful phrases: + +- `Prefer the smallest safe change.` +- `Do not restructure the domain unless required.` +- `Preserve the current layering and naming unless there is a clear mismatch.` +- `Add only the missing capability pieces.` + +## Good Framing for Feature Requests + +When asking for implementation help, frame the feature in business terms first, then let the agent translate that into CoreEx capabilities. + +Good: + +- `This service needs to publish an event after a successful write. What CoreEx capabilities should I add?` +- `This domain must react to upstream product updates. How should that be implemented in this repo?` +- `This process waits for external approval. Is this a subscriber problem or an orchestration problem?` + +Less effective: + +- `Add Service Bus.` +- `Use Redis.` +- `Add CoreEx.Events.` + +Package-driven questions are less useful than use-case-driven questions. + +## Ask for Boundaries, Not Just Answers + +When you are uncertain, ask the agent to explain **what is in scope and what is out of scope** for a pattern. + +Examples: + +- `When is outbox sufficient and when do I need orchestration?` +- `When should I stop at API-only instead of adding subscribers?` +- `What does /add-capability handle today, and what would still need manual work?` + +That helps you understand the framework’s decision boundaries instead of only the happy path. + +For capability questions, useful follow-ups are: + +- `When should I avoid this feature?` +- `What is the smallest version of this capability I can add first?` +- `Does my current solution already have part of this set up?` +- `Which files or hosts would change if I add it?` + +## Common Anti-Patterns + +Avoid these when interacting with the agent: + +- Asking for a package before stating the use case. +- Asking for implementation without asking the agent to inspect the current state first. +- Asking for generic .NET advice when you need CoreEx-specific guidance. +- Asking for “the best architecture” instead of comparing concrete CoreEx shapes. +- Asking for a full redesign when you only need a capability retrofit. + +## Suggested Learning Sequence + +If you are new to CoreEx, this sequence works well: + +1. Read `README.md`. +2. Read `docs/application-scaffolding-guide.md`. +3. Read `docs/capabilities.md`. +4. Use the agent to map one sample domain. +5. Ask the agent to explain one end-to-end request flow. +6. Ask the agent to compare two implementation shapes for your real use case. +7. Move to planning or implementation only after that. + +## Where to Go Next + +- Use `docs/agent-prompt-recipes.md` for copy/paste prompt starters. +- Use `docs/application-scaffolding-guide.md` to choose the right host/capability shape. +- Use `docs/capabilities.md` for deeper pattern explanations. +- Use `docs/orchestration.md` when the use case goes beyond request/response plus outbox. diff --git a/docs/agent-prompt-recipes.md b/docs/agent-prompt-recipes.md new file mode 100644 index 00000000..0029625b --- /dev/null +++ b/docs/agent-prompt-recipes.md @@ -0,0 +1,337 @@ +# CoreEx Agent Prompt Recipes + +This guide gives **copy/paste prompt patterns** for consultant delivery teams using CoreEx from the starter or NuGet packages. Adapt the wording to your domain, but keep the structure. + +If you want to ask one of these as a quick side question in Copilot CLI without adding it to the main conversation flow, you can also prefix it with **`/ask`**. + +## 1. Understand the Codebase + +### Map a domain + +```text +Map the area of this repo for me. +Explain the role of Contracts, Application, Infrastructure, Api, Database, Subscribe, and Outbox.Relay if present. +Use concrete file references from this repo. +No code changes. +``` + +### Explain a request flow + +```text +Show me the end-to-end request flow for in the samples. +Use the actual sample code and explain which layer owns each step. +No code changes. +``` + +### Compare two samples + +```text +Compare the Product and Shopping samples for . +Focus on the differences in host shape, messaging, and domain behavior. +No code changes. +``` + +## 2. Learn a Pattern + +### Capability explainer template + +```text +Explain the feature in CoreEx. +Tell me: +- what it is +- what problem it solves +- when to use it and when not to use it +- how it is implemented in this repo +- how I would add it to my solution + +Use sample-backed guidance where possible. +No code changes. +``` + +### Idempotency key + +```text +What is the idempotency key feature in CoreEx? +What problem does it solve? +When should I use it? +How would I implement it in my solution? +Use the repo patterns and samples where relevant. +No code changes. +``` + +Quick CLI variant: + +```text +/ask What is the idempotency key feature in CoreEx, what problem does it solve, and when should I use it? +``` + +### Reference data orchestration + +```text +Explain the reference data orchestration feature in CoreEx. +What problem does it solve? +When is it worth adding? +How would it fit into my current solution shape? +``` + +### FusionCache / Redis + +```text +Explain the FusionCache and Redis pattern used in this repo. +What problem does it solve? +When should I add it? +What is the smallest implementation I could add first? +``` + +### Validation + +```text +Explain how validation is implemented in this repo. +Show me where the validator lives, where it is called, and how that differs from controller validation. +Use sample-backed examples only. +``` + +### Outbox + +```text +Explain how reliable event publishing works in this repo. +Use the Product sample and describe API host, outbox tables, relay host, and subscriber flow. +No code changes. +``` + +### ETag and concurrency + +```text +Explain ETag and optimistic concurrency handling in this repo. +Show me the contract, service, and API implications using an existing sample. +``` + +### Orchestration + +```text +Compare request/response plus outbox versus orchestration for this use case: +. +Use the Order.Workflow sample where relevant. +No code changes. +``` + +## 3. Shape a New Solution + +### Choose the smallest CoreEx shape + +```text +Given this use case: + + +Recommend the smallest CoreEx application shape that supports it. +Tell me whether I need API only, API + relay, API + subscribe, or orchestration. +Explain why. +``` + +### Choose the right scaffolding entry point + +```text +I need to implement this: + + +Tell me whether I should use coreex-project-bootstrap, /generate-domain, /scaffold-domain-from-templates, or /add-capability. +Explain the tradeoffs using the current repo guidance. +``` + +## 4. Plan a Feature + +### Plan before coding + +```text +Inspect the current implementation first. +Then create a plan for adding . +Use existing CoreEx patterns only and align to the closest sample in this repo. +Do not implement yet. +``` + +### Ask for layer-by-layer impact + +```text +For this feature: + + +Tell me what should change in Contracts, Application, Infrastructure, Api, and any hosts. +Use the current repo conventions. +No code changes yet. +``` + +### Ask for capability guidance + +```text +This feature needs: + + +Tell me which CoreEx capabilities are actually needed and which should be deferred. +Prefer the smallest safe change. +``` + +## 5. Implement a Feature Safely + +### Smallest safe change + +```text +Inspect the current domain first. +Implement using the smallest safe change. +Preserve the current layering and naming unless a restructure is required. +Use existing CoreEx patterns only. +``` + +### Sample-aligned implementation + +```text +Implement in this domain. +Align to the closest existing sample in this repo. +Explain briefly which sample you followed and why. +``` + +### Conservative enhancement + +```text +Enhance the existing implementation to support . +Do not regenerate the domain. +Do not add unrelated capabilities. +Inspect what is already present before editing. +``` + +## 6. Retrofit Existing Domains + +### Add reliable event publishing + +```text +Inspect this domain and determine whether it already has outbox, relay, or event publisher wiring. +Then add the missing pieces required for reliable integration-event publishing. +Use current CoreEx messaging patterns only. +``` + +### Add subscriber support + +```text +Inspect this domain for any existing Subscribe host or Service Bus wiring. +Then add the missing pieces required to consume . +Keep subscriber logic thin and aligned to repo conventions. +``` + +### Use the retrofit skill intentionally + +```text +Use /add-capability for this existing domain. +Inspect the current state first, then add . +Treat SQL Server and Azure Service Bus as defaults unless you find evidence otherwise. +``` + +## 7. Review an Existing Design + +### Convention check + +```text +Review this implementation against the current repo conventions. +Focus on layering, CoreEx usage, validation placement, host wiring, and messaging patterns. +Ignore style-only feedback. +``` + +### Compare against the samples + +```text +Compare this implementation to the closest sample in the repo. +Tell me what is aligned, what is drifting, and what matters functionally. +``` + +### Ask for missing capabilities + +```text +Inspect this domain and list which CoreEx capabilities appear to be missing for this use case: + + +Explain which are required now versus optional later. +``` + +## 8. Debug Architecture or Modeling Uncertainty + +### Is this CRUD, messaging, or orchestration? + +```text +I am not sure whether this requirement should be modeled as: +- a normal API write +- API + outbox event publishing +- subscriber-driven reaction +- orchestration + +Use the current repo patterns to compare those options for this use case: + +``` + +### Should I add a new host? + +```text +Inspect the current domain shape. +Tell me whether this requirement justifies adding a new host or can be handled in the existing ones. +Prefer the smallest safe architecture. +``` + +## 9. Learn by Asking Better Follow-Ups + +When the first answer is not enough, use follow-ups like these: + +- `Show me the actual files that demonstrate that pattern.` +- `Which sample is the closest fit for this advice?` +- `What is the smallest version of this that I can implement first?` +- `What would you defer until later?` +- `What files or hosts would change if I added this capability?` +- `Does my current solution already have part of this capability set up?` +- `What would change if this were an existing domain instead of a new one?` +- `What should I ask you next if I want you to implement this safely?` + +## 10. Prompt Framing Patterns That Usually Work + +### Explanation only + +```text +Explain in the context of this repo. +Use sample-backed evidence. +No code changes. +``` + +### Plan only + +```text +Inspect the current implementation first. +Then create a plan for . +Do not implement yet. +``` + +### Implement + +```text +Inspect the current implementation first. +Then implement using existing CoreEx conventions and the closest sample pattern. +Prefer the smallest safe change. +``` + +### Implement and verify scope + +```text +Implement . +Before changing code, tell me which hosts/layers you expect to modify and why. +Then proceed with the smallest safe change. +``` + +## 11. What to Include in Your Prompt + +For the best results, include: + +- the **use case** +- whether the code already exists +- whether you want **explanation**, **plan**, or **implementation** +- whether the result should align to a specific sample +- any constraints such as “smallest safe change”, “no new host unless necessary”, or “use current SQL Server/Service Bus defaults” + +## Where to Go Next + +- Use `docs/agent-interaction-guide.md` to understand how to interact with the agent effectively. +- Use `docs/application-scaffolding-guide.md` to choose the right CoreEx shape. +- Use `docs/capabilities.md` to dive deeper into the underlying patterns. diff --git a/docs/application-scaffolding-guide.md b/docs/application-scaffolding-guide.md new file mode 100644 index 00000000..d79edd0f --- /dev/null +++ b/docs/application-scaffolding-guide.md @@ -0,0 +1,361 @@ +# CoreEx Application Scaffolding Guide + +This guide helps a new team decide **what to scaffold first**, **which hosts to include**, and **which CoreEx capabilities to add now versus later**. It is intentionally decision-oriented: `docs/capabilities.md` explains what the framework can do, while this guide explains how to turn that into an application shape that fits your use case. If you want help learning how to ask the agent the right questions while making these decisions, see the [Agent Interaction Guide](agent-interaction-guide.md) and [Agent Prompt Recipes](agent-prompt-recipes.md). + +## Understand the Defaults vs the Abstractions + +In this repository, **SQL Server** and **Azure Service Bus** are the default scaffolding targets because they are the most complete sample implementations and the primary host wiring demonstrated in the starter and Contoso samples. + +That should not be read as "CoreEx only works with SQL Server and Service Bus." A better mental model is: + +- CoreEx provides **application and integration patterns** first. +- This repo currently provides **default implementation paths** for those patterns using SQL Server and Azure Service Bus. +- Alternative databases or brokers should be introduced when the **use case requires them**, not because teams want to abstract everything up front. + +Examples: + +- If SQL Server fits the operational and data requirements, use the standard SQL Server projects and migrations because that path is the most proven in this repo. +- If a domain truly requires a different database backend, CoreEx patterns such as contracts, services, validation, unit-of-work boundaries, and event workflows still matter; only the implementation plumbing changes. +- If Azure Service Bus fits the messaging needs, use the repo's default publisher/subscriber/relay wiring. +- If a use case requires another broker, the `EventData` abstraction and event-oriented architecture remain relevant even when the transport changes. + +## Start with the Smallest Useful Shape + +The starter and sample architecture support a modular domain layout built from: + +- `Contracts` +- `Application` +- `Infrastructure` +- `Api` +- `Database` +- optionally `Subscribe` +- optionally `Outbox.Relay` +- optionally a separate worker or orchestration host + +The sample host shapes also include **OpenTelemetry-compatible telemetry wiring** via the standard CoreEx/OpenTelemetry setup shown in the sample `Program.cs` files, so observability can be added as part of the normal host composition rather than as a separate architecture track. + +For most teams, the right question is not "Which CoreEx packages exist?" but "Which responsibilities does this application need on day one?" + +Use this progression: + +1. Start with a **single API domain** when the service owns its own data and synchronous CRUD-style operations are the primary need. +2. Add an **outbox relay** when the API must publish integration events reliably after database commits. +3. Add a **subscriber host** when the service must react to events or commands from other services. +4. Add a **worker or orchestration host** when the business process is long-running, stateful, batch-oriented, externally coordinated, or compensation-heavy. + +## Which Scaffolding Path to Use + +| Need | Best starting point | Why | +|---|---|---| +| New implementation solution with one or more domains and standard hosts. | `coreex-project-bootstrap` | The starter is built to scaffold solution structure, package choices, standard `Program.cs` wiring, tests, and layered projects. | +| Existing domain needs new messaging/integration capability added incrementally. | `/add-capability` | Best when the domain already exists and you want to retrofit capabilities such as `Outbox.Relay`, `Subscribe`, Service Bus wiring, or subscriber scaffolding without re-scaffolding the whole domain. | +| New domain that mostly fits the standard template shape. | `/scaffold-domain-from-templates` | Fastest path when the entity shape is conventional and you want deterministic output. | +| New domain with custom rules, non-trivial fields, validation nuance, query behavior, or event naming decisions. | `/generate-domain` | Best when the agent needs to reason about the model and apply CoreEx conventions instead of copying templates verbatim. | + +## Recommended Application Shapes + +### 1. API-Only Domain + +Choose this when: + +- The service mainly exposes synchronous HTTP operations. +- It owns its own schema and data lifecycle. +- Cross-service integration is limited or can be added later. + +Scaffold: + +- `Contracts` +- `Application` +- `Infrastructure` +- `Api` +- `Database` +- matching API and common test projects + +Pull in early: + +- `CoreEx` +- `CoreEx.AspNetCore` +- `CoreEx.AspNetCore.NSwag` +- `CoreEx.Database.SqlServer` +- `CoreEx.EntityFrameworkCore` + +Default implementation note: + +- SQL Server is the default starting point because it has the strongest scaffolding and sample coverage in this repo. +- Treat that as the recommended initial implementation, not as a rule that every CoreEx application must use SQL Server forever. + +Usually add immediately: + +- `ETag` and change-log support for mutable entities. +- `ProblemDetails`/CoreEx exception handling. +- OpenAPI and health checks. + +Good fit: + +- Product master data. +- Reference-data-backed CRUD domains. +- Internal line-of-business APIs that do not yet need async integration. + +### 2. API + Outbox Relay + +Choose this when: + +- The API updates business data and must publish integration events reliably. +- You need to avoid dual writes to database plus broker. +- Other services depend on ordered or guaranteed event publication. + +Scaffold: + +- API-only domain shape, plus `Outbox.Relay` + +Pull in early: + +- `CoreEx.Events` +- `CoreEx.Database.SqlServer` +- `CoreEx.Azure.Messaging.ServiceBus` + +Default implementation note: + +- SQL Server plus Azure Service Bus is the standard initial combination for reliable integration-event publication in this repo. +- Choose a different database or broker only when the business, platform, compliance, latency, throughput, tenancy, or deployment constraints justify that divergence. + +Usually add immediately: + +- Unit-of-work with outbox. +- Event formatter. +- Outbox tables and stored procedures in the database project. +- Relay host telemetry and health checks. + +Good fit: + +- Product, catalog, pricing, customer, or order domains that publish state-change events. +- Any service where "database committed but event not published" is unacceptable. + +### 3. API + Subscribe + Outbox Relay + +Choose this when: + +- The service both publishes its own events and consumes events or commands from other services. +- You are building a distributed service, not just a standalone API. +- You need asynchronous integration boundaries with explicit host separation. + +Scaffold: + +- `Contracts` +- `Application` +- `Infrastructure` +- `Api` +- `Database` +- `Subscribe` +- `Outbox.Relay` + +Pull in early: + +- `CoreEx.Events` +- `CoreEx.Azure.Messaging.ServiceBus` +- `CoreEx.Caching.FusionCache` when the service caches reference or replica data + +Default implementation note: + +- The sample architecture uses Azure Service Bus because the repo already demonstrates subscriber and relay hosts around it. +- The broader architectural pattern is still publish/subscribe with `EventData`; the broker choice is an implementation decision driven by the integration use case. + +Usually add immediately: + +- Subscriber classes per message subject. +- Shared error-handling strategy for known recoverable subscriber failures. +- Reference data orchestration if incoming messages rely on code tables or shared reference sets. + +Good fit: + +- Inventory availability projections. +- Shopping or basket domains that react to product or reservation events. +- Services that maintain local replicas of upstream data. + +### 4. API + Worker / Orchestration + +Choose this when: + +- The core business process spans multiple steps, services, or time boundaries. +- You need retries, timers, external-event waits, fan-out/fan-in, batching, or compensation. +- A request/response API plus pub/sub is not enough to model the process safely. + +Scaffold: + +- Core domain projects +- API host if the workflow is externally started or queried over HTTP +- separate worker/orchestration host +- supporting infrastructure for the workflow backend + +Pull in when needed: + +- Durable Task SDK + DTS patterns described in `docs/orchestration.md` +- CoreEx telemetry and health checks in the worker host + +Usually add immediately: + +- Explicit orchestration contracts. +- Activity boundaries around external calls. +- Client endpoint or service to start/query workflow instances. + +Good fit: + +- Order submission and approval flows. +- Long-running fulfilment or settlement processes. +- Human approval, callback-driven, or scheduled business operations. + +## Capability-by-Capability Guidance + +The biggest mistake new teams make is enabling every framework feature up front. Prefer enabling capabilities because the **use case demands them**, not because the package exists. + +| Capability | Add it when | Skip or defer when | +|---|---|---| +| **Validation** | The API accepts business input that must be checked consistently before persistence or orchestration. | The host is read-only or input shape is trivial and temporary. | +| **ETag / optimistic concurrency** | Multiple users or systems can update the same resource and lost updates matter. | Data is append-only or single-writer. | +| **Idempotency key** | Clients may retry POST requests, especially across unstable networks or user-driven retries. | The endpoint is naturally idempotent already or not externally retried. | +| **FusionCache + Redis** | Reads are hot, repeated, cross-instance, or expensive; you need hybrid L1/L2 caching and graceful degraded reads. | Data changes too frequently to benefit, or the service is small and latency/load do not justify cache complexity yet. | +| **Reference data orchestration** | The domain uses shared code tables, statuses, categories, units of measure, or other read-heavy lookup sets. | The values are local-only, short-lived, or not managed as reference data. | +| **Unit-of-work + outbox** | Data writes and integration-event publication must succeed together from a business perspective. | The service has no async integration boundary yet. | +| **Azure Service Bus integration** | You publish or consume messages across service boundaries and want the repo's standard broker pattern. | The application is strictly synchronous or local-only. | +| **Subscriber host** | The service reacts to upstream events/commands independently of user HTTP traffic. | Integration is outbound only. | +| **Outbox relay** | The service publishes integration events from committed business transactions. | The service consumes only and does not publish. | +| **Result pipelines** | You are modeling expected business failures or domain flows compositionally, especially around aggregates/workflows. | Exception-style services are clearer and the flow is simple CRUD. | +| **DomainDriven aggregate patterns** | The domain has invariants across child entities or rich mutation rules. | The service is mostly thin CRUD over simple records. | +| **Dynamic query / paging / filtering** | List endpoints need flexible API-side filtering, ordering, and projection. | Consumers only need a few fixed queries. | +| **Workflow orchestration** | A business process is long-running, resumable, externally coordinated, or compensation-heavy. | Simple CRUD plus event publication already covers the need. | + +## Choosing Defaults vs Diverging from Them + +Start with the repo defaults unless there is a concrete reason not to: + +- **Use SQL Server by default** because the database projects, migration tooling, outbox procedures, and sample hosts are already shaped around it. +- **Use Azure Service Bus by default** because the relay and subscriber patterns in this repo are already demonstrated around it. + +Diverge when the use case clearly demands it, for example: + +- Existing enterprise platform standards require another database or broker. +- Required operational characteristics are a poor fit for the default choice. +- A managed service, deployment target, or regulatory boundary constrains the technology selection. +- A specific integration landscape already centers on another messaging platform. + +When you do diverge, preserve the CoreEx patterns first: + +- keep the layered project shape +- keep `EventData` and integration-event conventions +- keep unit-of-work and outbox thinking where reliable publication still matters +- keep validation, execution context, HTTP semantics, and contract patterns + +In other words, the **use case should drive the backend**, not the other way around. + +## A Practical "What Should I Scaffold?" Checklist + +### If your application is mostly CRUD over owned data + +Scaffold a standard API domain first. Add: + +- contracts with `IIdentifier`, `IETag`, and `IChangeLog` where appropriate +- application service plus validator +- infrastructure repository and SQL Server database project +- API controllers with CoreEx `WebApi` helper style + +Do **not** start with orchestration or subscriber hosts unless there is an immediate business requirement. + +### If your application must notify other services after changes + +Start with the standard API domain, but include outbox and relay from the beginning. That gives you: + +- reliable post-commit event publication +- a clean boundary between request handling and broker delivery +- room to add subscribers later without redesigning the write path + +### If your application depends heavily on upstream domain data + +Scaffold a subscriber host early. This is a strong signal that the service is part of an event-driven landscape and should not rely only on synchronous API calls to other domains. + +Typical example: + +- Shopping depends on product and inventory-related events. +- The service keeps local state aligned through subscribers while still serving its own API. + +### If your application has approvals, callbacks, batches, or days-long flows + +Scaffold orchestration intentionally rather than forcing that logic into controllers, subscribers, or background timers. A plain background service can run repeated work, but it is not a substitute for durable workflow state, replay, timers, external events, or compensation logic. + +## How to Think About Layering + +CoreEx is most effective when you keep the responsibilities sharp: + +| Layer | Put this here | Do not put this here | +|---|---|---| +| Contracts | DTOs, identifiers, ETags, change logs, reference-data code properties. | Domain rules, persistence logic, service calls. | +| Application | Validation, unit-of-work orchestration, business use cases, event creation, adapters as interfaces. | HTTP plumbing, EF details, transport-specific code. | +| Infrastructure | Repositories, EF mapping, query config, typed HTTP clients, adapter implementations, outbox publisher plumbing. | Controller concerns and user-facing endpoint logic. | +| API | Routing, request/response behavior, `WebApi` helper usage, OpenAPI metadata, idempotency and HTTP semantics. | Rich business rules or database composition. | +| Subscribe / Worker / Relay | Message consumption, hosted background processing, relay mechanics, orchestration workers. | User-driven request/response logic. | + +If a capability changes the transport or execution model, it usually belongs in a host project. If it changes business rules or persistence orchestration, it usually belongs in Application or Infrastructure. + +## Opinionated Defaults That Usually Pay Off + +For a new greenfield CoreEx service, these defaults are usually worth keeping: + +- Use the layered domain shape instead of collapsing everything into the API. +- Use SQL Server first unless you have a strong reason to diverge. +- Use validators rather than ad hoc controller checks. +- Use ETags on mutable resources. +- Use OpenAPI, ProblemDetails, execution context, and health endpoints from the start. +- Use outbox if you already know the service will publish integration events. +- Use reference data orchestration if statuses, categories, or codes are central to the model. + +## Things to Avoid Scaffolding Too Early + +- A subscriber host when the domain does not actually consume messages yet. +- Orchestration for simple CRUD plus single-event publication. +- Rich aggregate patterns for record-centric admin data with no real invariants. +- Redis/FusionCache before there is either shared-cache need or measurable read pressure. +- CQRS split for every service when read and write concerns are still simple. + +## Suggested First Questions for a New Domain + +Before scaffolding, answer these: + +1. Does this service own its own database schema? +2. Will it publish integration events after writes? +3. Will it consume events or commands from other services? +4. Does it need shared reference data? +5. Are updates concurrent enough to require ETags? +6. Are POST retries likely enough to require idempotency? +7. Is the core business flow synchronous, eventually consistent, or orchestrated over time? +8. Does the model behave like true aggregates with invariants, or mostly like CRUD records? + +Those answers usually determine the host set, package set, and scaffold depth more reliably than entity field lists alone. + +## Suggested Starter Combinations + +| Use case | Scaffold | CoreEx capabilities to prioritize | +|---|---|---| +| Internal admin CRUD API. | API domain + database. | Validation, ETag, change log, OpenAPI, paging/filtering. | +| Master-data service that other domains depend on. | API + database + outbox relay. | Validation, reference data, outbox, Service Bus publisher, idempotency. | +| Event-driven domain maintaining local replicas or reacting to commands. | API + subscribe + outbox relay. | Service Bus subscriber/publisher, outbox, reference data, cache, health/telemetry. | +| Long-running business process or approval workflow. | API + worker/orchestration host, optionally plus outbox/subscribers. | Durable orchestration, telemetry, external-event waits, retries, compensation. | +| Rich aggregate domain with nested rules. | Domain scaffold via `/generate-domain`. | DomainDriven patterns, validators, Result pipelines where appropriate, explicit mapping. | +| Straightforward conventional entity. | Domain scaffold via `/scaffold-domain-from-templates`. | Standard contracts/application/infrastructure/API/database shape with minimal custom reasoning. | + +## Where to Go Next + +- Read `coreex-starter/README.md` for starter/bootstrap expectations. +- Read `docs/capabilities.md` for the underlying framework features and patterns. +- Read `docs/orchestration.md` before adding a workflow worker. +- Use the Product and Shopping samples as the concrete reference architecture for API, subscribe, and outbox relay hosts. + +## Evidence + +- `coreex-starter/README.md` +- `coreex-starter/.github/skills/coreex-project-bootstrap/SKILL.md` +- `.github/skills/generate-domain/SKILL.md` +- `.github/prompts/scaffold-domain-from-templates.prompt.md` +- `docs/capabilities.md` +- `docs/orchestration.md` +- `samples/src/Contoso.Products.Api/Program.cs` +- `samples/src/Contoso.Products.Subscribe/Program.cs` +- `samples/src/Contoso.Products.Outbox.Relay/Program.cs` diff --git a/docs/capabilities.md b/docs/capabilities.md new file mode 100644 index 00000000..8b0136c7 --- /dev/null +++ b/docs/capabilities.md @@ -0,0 +1,1109 @@ +# CoreEx Capabilities & Patterns Guide + +This document provides detailed explanations of CoreEx capabilities and common patterns to help developers understand the value and appropriate use cases for each feature. If you are deciding what to scaffold for a new service or domain, start with the [Application Scaffolding Guide](application-scaffolding-guide.md) and then use this document for the deeper capability details. If you are still learning how to ask the agent about these patterns effectively, also see the [Agent Interaction Guide](agent-interaction-guide.md) and [Agent Prompt Recipes](agent-prompt-recipes.md). + +## Table of Contents + +- [General Capabilities](#general-capabilities) + - [Exception-Based Error Handling](#exception-based-error-handling) + - [Dynamic Dependency Injection](#dynamic-dependency-injection) + - [Entity Patterns](#entity-patterns) + - [Roslyn Source Generation](#roslyn-source-generation) + - [Instrumentation & Health Checks](#instrumentation--health-checks) + - [Hybrid Caching (L1 + L2)](#hybrid-caching-l1--l2) + - [Hosted Services](#hosted-services-timer--synchronized) + - [Reference Data](#reference-data) + - [JSON Filtering & Merge-Patch](#json-filtering--merge-patch) + - [Validation](#validation) + - [Mapping Helpers](#mapping-helpers) + - [Globalization & Localization](#globalization--localization) + - [Railway-Oriented Programming](#railway-oriented-programming-with-resultt) +- [API & HTTP Features](#api--http-features) + - [Web API Styles](#web-api-styles-minimal--mvc) + - [RFC 7386 Merge-Patch](#rfc-7386-merge-patch-applicationmerge-patchjson) + - [Response JSON Filtering](#response-json-filtering) + - [Error Handling with ProblemDetails](#error-handling-with-problemdetails) + - [Conditional Request Semantics](#conditional-request-semantics-if-match) + - [Idempotency-Key](#idempotency-key) + - [Health Check Endpoints](#health-check-endpoints) + - [OpenAPI Integration](#openapi-integration-nswag) + - [CQRS](#cqrs-command-query-responsibility-segregation) +- [Data Access & Persistence](#data-access--persistence) + - [Unit-of-Work with Integrated Outbox](#unit-of-work-with-integrated-outbox) + - [Paging & Enumeration](#paging--enumeration) + - [Dynamic Query](#dynamic-query-odata-style) + - [Multi-Tenancy](#multi-tenancy) + - [Type Discriminators](#type-discriminators) +- [Database Support](#database-support) + - [SQL Server](#sql-server) + - [PostgreSQL](#postgresql) + - [ADO.NET Command & Parameter Extensions](#adonet-command--parameter-extensions) + - [Entity Framework Integration](#entity-framework-integration) +- [Messaging & Events](#messaging--events) + - [EventData Abstraction](#eventdata-abstraction) + - [CloudEvent Interoperability](#cloudevent-interoperability) + - [Publish + Subscribe Patterns](#publish--subscribe-patterns) + - [Azure Service Bus Integration](#azure-service-bus-integration) + - [Outbox Relay](#outbox-relay-with-partitioning) + - [Workflow Orchestration (Durable Task SDK + DTS)](#workflow-orchestration-durable-task-sdk--dts) +- [Domain-Driven Design](#domain-driven-design) + - [Aggregate & Entity Modeling](#aggregate--entity-modeling) + - [Value Objects](#value-objects) + - [Integration Events Only](#integration-events-only) +- [Putting It All Together](#putting-it-all-together-a-typical-request-flow) +- [Summary](#summary) + +--- + +## General Capabilities + +### Exception-Based Error Handling + +**Pattern:** CoreEx defines specific exception types that map to HTTP status codes automatically. + +CoreEx exception types include: +- `NotFoundException` — Resource not found (404). +- `ValidationException` — Validation failure (400). +- `ConcurrencyException` — ETag/version conflict (409). +- `BusinessException` — Domain rule violation (400). +- `AuthenticationException` — Unauthorized (401). +- `AuthorizationException` — Forbidden (403). + +**Why it matters:** Instead of throwing generic `Exception` or returning error codes, you throw domain-specific exceptions that middlewares automatically convert to RFC 9457 ProblemDetails responses. This keeps error handling logic centralized and consistent across APIs. + +**Example:** +```csharp +public async Task GetProductAsync(Guid id) +{ + var product = await _repository.GetByIdAsync(id); + if (product == null) + throw new NotFoundException($"Product '{id}' not found."); + return product; +} + +// Middleware automatically converts to: +// HTTP 404 with ProblemDetails JSON +``` + +### Dynamic Dependency Injection + +**Pattern:** Register and resolve services without a traditional DI container through dynamic composition. + +CoreEx provides extension methods like `AddExecutionContext()`, `AddMvcWebApi()`, `AddHttpWebApi()` that setup services with sensible defaults. You can layer additional registrations on top without heavyweight container configuration. + +**Why it matters:** Reduces boilerplate, makes service composition explicit, and keeps middleware stacks clean and understandable. + +**Example:** +```csharp +builder.Services + .AddExecutionContext() // Execution tenant/user context + .AddMvcWebApi() // MVC + exception handling + .AddHttpWebApi() // Minimal API + exception handling + .AddSqlServerDatabase() + .AddOutbox() + .AddFusionCache(); +``` + +### Entity Patterns + +**Identifiers & Composite Keys** + +CoreEx supports two patterns for entity identity: + +1. **Single Identifier** — Most entities have a single ID (GUID, int, string). + ```csharp + public interface IIdentifier + { + object? Id { get; } + } + + public class Product : IIdentifier + { + public Guid Id { get; set; } + public string Sku { get; set; } + } + ``` + +2. **Composite Key** — Some entities have multi-part identity (e.g., tenant + entityId). + ```csharp + public interface ICompositeKey + { + object?[] CompositeKeys { get; } + } + + public class TenantProduct : ICompositeKey + { + public Guid TenantId { get; set; } + public Guid ProductId { get; set; } + + public object?[] CompositeKeys => new object[] { TenantId, ProductId }; + } + ``` + +**ETags (Optimistic Concurrency)** + +ETags prevent lost-update conflicts in optimistic concurrency scenarios: + +```csharp +public interface IETag +{ + string? ETag { get; set; } +} + +public class Product : IETag +{ + public Guid Id { get; set; } + public string? ETag { get; set; } + public decimal Price { get; set; } +} + +// API usage: +// GET /api/products/123 returns Product with ETag: "abc123" +// PUT /api/products/123 with IF-MATCH: abc123 header +// If another request updated the product first, PUT returns 409 Conflict +``` + +**Change Logs (Audit Metadata)** + +Track when entities were created and last modified: + +```csharp +public interface IChangeLog +{ + ChangeLog? ChangeLog { get; set; } +} + +public class ChangeLog +{ + public DateTime? CreatedDate { get; set; } + public string? CreatedBy { get; set; } + public DateTime? UpdatedDate { get; set; } + public string? UpdatedBy { get; set; } +} + +// Automatically populated by repository on insert/update +public class Product : IChangeLog +{ + public Guid Id { get; set; } + public ChangeLog? ChangeLog { get; set; } +} +``` + +**Deep Compare** + +Compare two entities for equality considering all properties recursively: + +```csharp +var original = await _repo.GetByIdAsync(id); +// ... user modifies entity +var modified = new Product { Id = id, Name = "New Name", /* ... */ }; + +bool hasChanged = original.DeepEquals(modified); +if (!hasChanged) return NoContent(); // 204 +``` + +### Roslyn Source Generation + +**Pattern:** Auto-generate boilerplate code (e.g., serialization, mapping, contracts) at compile time using Roslyn analyzers. + +CoreEx includes a contract generator that creates DTOs, mapping, and validation code from domain models using source generation. This eliminates manual mapping code and keeps serialization fast. + +**Why it matters:** Reduces hand-written boilerplate, ensures domain model and contracts stay in sync, and improves startup performance via compile-time code generation. + +### Instrumentation & Health Checks + +**Pattern:** Built-in OpenTelemetry integration and standard health check endpoints. + +CoreEx middleware automatically emits traces, metrics, and logs. Health checks are exposed on `/health/live` and `/health/ready` endpoints: + +```csharp +app.MapHealthChecks("/health/live"); // Liveness (app running?) +app.MapHealthChecks("/health/ready"); // Readiness (ready to receive traffic?) +``` + +These endpoints integrate with Kubernetes and container orchestrators for probes and graceful shutdown. + +### Hybrid Caching (L1 + L2) + +**Pattern:** Distributed cache with local in-process backup for fault tolerance. + +CoreEx uses **FusionCache** with optional **Redis** backplane: +- **L1:** In-process memory cache (fast, shared scope, ~1MB typical). +- **L2:** Redis (slower, shared across all service instances). +- **Fallback:** If Redis is down, L1 cache continues serving stale data. + +```csharp +builder.Services.AddFusionCache() + .WithRedisBackplane("localhost:6379"); + +// Usage in services: +var product = await _cache.GetOrSetAsync( + key: $"product:{id}", + factory: async ct => await _repository.GetByIdAsync(id, ct), + duration: TimeSpan.FromHours(1), + cancellationToken: ct +); +``` + +**Why it matters:** Dramatically improves performance (milliseconds vs. seconds), reduces database load, and handles Redis failures gracefully. + +### Hosted Services (Timer & Synchronized) + +**Pattern:** Background work scheduled at intervals or synchronized across multiple instances. + +Use `IHostedService` implementation for: +- **Timer-based work** — Run a task every N seconds (e.g., cleanup, health checks). +- **Synchronized work** — Coordinate jobs across multiple instances using distributed locks. + +```csharp +public class InventoryAdjustmentService : BackgroundService +{ + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + var timer = new PeriodicTimer(TimeSpan.FromMinutes(5)); + while (await timer.WaitForNextTickAsync(stoppingToken)) + { + await AdjustInventoryAsync(stoppingToken); + } + } +} + +builder.Services.AddHostedService(); +``` + +### Reference Data + +**Pattern:** Load, cache, and orchestrate reference datasets (enums, lookup tables) with transactional integrity. + +Reference data (like product categories, statuses, coupons) is typically read-heavy and must sync across services. CoreEx provides: + +```csharp +// Define reference data +public class Category : ReferenceData +{ + public int Code { get; set; } + public string? Description { get; set; } +} + +// Load and cache +var categories = await _refData.GetCollectionAsync(); + +// Automatic caching with orchestration +// All instances see the same data +// Invalidation on source updates +``` + +**Why it matters:** Eliminates N+1 query problems, ensures consistency, and simplifies dependency management in distributed systems. + +### JSON Filtering & Merge-Patch + +**Pattern:** Dynamically exclude fields from responses and support RFC 7386 PATCH. + +**Response Filtering** — Control which fields appear in JSON based on query parameters or roles: + +```csharp +// GET /api/products/123?$fields=id,name +// Returns only id and name, omitting price, cost, margin +``` + +**Merge-Patch** — RFC 7386 PATCH for partial updates: + +```csharp +// PATCH /api/products/123 +// Content-Type: application/merge-patch+json +// {"name": "New Name"} // other fields unchanged +``` + +Both use `System.Text.Json` without external dependencies. + +### Validation + +**Pattern:** Built-in validation rules as alternative to FluentValidation frameworks. + +CoreEx provides validation decorators and APIs without forcing a particular framework: + +```csharp +public class ProductValidator +{ + public static void Validate(Product p) + { + var errors = new List(); + if (string.IsNullOrEmpty(p.Sku)) + errors.Add("Sku is required."); + if (p.Price < 0) + errors.Add("Price must be non-negative."); + + if (errors.Any()) + throw new ValidationException(errors); + } +} +``` + +### Mapping Helpers + +**Pattern:** Explicit one-way or bi-directional mapping without external frameworks. + +CoreEx provides mapping utilities that make transformations explicit and traceable: + +```csharp +var productDto = mapper.Map(product); +// or bi-directional +var product = mapper.MapFrom(dto); +``` + +No AutoMapper dependency means simpler dependencies and explicit code paths. + +### Globalization & Localization + +**Pattern:** Culture-aware text and formatting throughout requests. + +`ExecutionContext` carries culture information per request: + +```csharp +var currentCulture = ExecutionContext.Current?.CultureInfo; // e.g., "en-US" +var formattedPrice = product.Price.ToString("C", currentCulture); +``` + +Enables multi-language APIs without routing changes. + +### Railway-Oriented Programming with Result + +**Pattern:** Composable error flow using `Result` instead of exceptions for expected errors. + +`Result` represents success (Ok) or failure (Error) without throwing: + +```csharp +public Result ValidateProduct(Product p) +{ + if (string.IsNullOrEmpty(p.Name)) + return Result.Error("Name is required."); + + return Result.Ok(p); +} + +// Usage - chain results without try/catch +var result = ValidateProduct(product) + .Then(p => _repository.SaveAsync(p)) + .Then(p => MapToDto(p)); + +if (!result.IsSuccessful) + throw new ValidationException(result.Error); + +return result.Value; +``` + +--- + +## API & HTTP Features + +### Web API Styles (Minimal & MVC) + +**Pattern:** Support both minimal APIs and MVC controllers with unified middleware. + +CoreEx works with both styles seamlessly: + +**Minimal API:** +```csharp +app.MapGet("/api/products/{id}", GetProduct) + .WithName("GetProduct") + .WithOpenApi(); + +async Task GetProduct(Guid id, IProductService service) + => await service.GetProductAsync(id); +``` + +**MVC Controller:** +```csharp +[ApiController] +[Route("api/[controller]")] +public class ProductsController : ControllerBase +{ + [HttpGet("{id}")] + public async Task> GetProduct(Guid id) + => await _service.GetProductAsync(id); +} +``` + +Both use the same exception handling, logging, and middleware. + +### RFC 7386 Merge-Patch (application/merge-patch+json) + +**Pattern:** Partial updates with semantic merge semantics. + +Instead of PUT (must send whole resource) or ad-hoc PATCH, use standard merge-patch: + +```csharp +// Partial update - unspecified fields unchanged +PATCH /api/products/123 +Content-Type: application/merge-patch+json + +{ + "name": "New Name" + // price, category, etc. remain unchanged +} +``` + +Safer and more predictable than custom PATCH semantics. + +### Response JSON Filtering + +**Pattern:** Dynamically exclude fields from responses. + +Reduces payload size and hides sensitive fields: + +```csharp +// GET /api/products?$fields=id,name,price +// Response omits cost, margin, internalNotes + +// GET /api/products?$fields=id,name +// Response omits all other fields +``` + +Implemented via middleware with zero manual code per endpoint. + +### Error Handling with ProblemDetails + +**Pattern:** RFC 9457 standard error responses everywhere. + +CoreEx exception middleware automatically converts exceptions to ProblemDetails: + +```json +{ + "type": "https://example.com/problems/not-found", + "title": "Not Found", + "status": 404, + "detail": "Product 'xyz' not found.", + "traceId": "0HN4..." +} +``` + +Consistent error format across all endpoints and all APIs. + +### Conditional Request Semantics (IF-MATCH) + +**Pattern:** Prevent lost updates and concurrent modifications via ETags. + +GET returns an ETag; PUT/PATCH require IF-MATCH header: + +```http +GET /api/products/123 +200 OK +ETag: "v2-abc123" + +{product json} + +--- + +PUT /api/products/123 +IF-MATCH: v2-abc123 +{updated fields} +200 OK + +--- + +// If stale: +PUT /api/products/123 +IF-MATCH: v1-old +{updated fields} +409 Conflict // Another request updated it +``` + +Prevents lost-update anomalies in concurrent scenarios. + +### Idempotency-Key + +**Pattern:** Automatic deduplication of POST operations. + +Clients send a unique `Idempotency-Key` header; CoreEx ensures the operation runs exactly once: + +```http +POST /api/baskets/123/checkout +Idempotency-Key: client-request-id-abc123 + +201 Created / 200 OK +``` + +If the same key is resubmitted, CoreEx returns the cached response without re-executing. + +**Why it matters:** Safe retries in unreliable networks; critical for payment systems, order placement, etc. + +### Health Check Endpoints + +**Pattern:** Expose `/health/live` and `/health/ready` for orchestration. + +Kubernetes, Docker Swarm, and load balancers probe these endpoints: + +``` +GET /health/live +200 OK (app is running) + +GET /health/ready +503 Service Unavailable (database down, not ready for traffic) +``` + +Typical ready checks include database connectivity, cache availability, and broker connectivity. + +### OpenAPI Integration (NSwag) + +**Pattern:** Auto-generate OpenAPI schemas for use in Swagger UI and clients. + +CoreEx integrates with NSwag to produce accurate OpenAPI 3.0+ schemas: + +```csharp +builder.Services.AddOpenApiDocument(opts => +{ + opts.Title = "Product API"; + opts.Version = "v1"; +}); + +app.UseOpenApi(); // Serves /swagger/v1/openapi.json +app.UseSwaggerUI(); // Serves Swagger UI +``` + +**Why it matters:** Automatically generated API docs that stay in sync; enables client code generation. + +### CQRS (Command Query Responsibility Segregation) + +**Pattern:** Separate read and write services when architectures demand it. + +Typical microservice uses a single domain model. For complex systems: + +- **Commands (Write)** — ProductMutationService handles create/update/delete. +- **Queries (Read)** — ProductQueryService handles all reads with separate caching. + +```csharp +// Write model +public class ProductMutationService +{ + public async Task CreateAsync(CreateProductRequest req) { ... } + public async Task UpdateAsync(Guid id, UpdateProductRequest req) { ... } +} + +// Read model +public class ProductQueryService +{ + public async Task GetAsync(Guid id) { ... } + public async Task> QueryAsync(FilterOptions opts) { ... } +} +``` + +Useful for event-sourced or high-scale systems; adds complexity otherwise. + +--- + +## Data Access & Persistence + +### Unit-of-Work with Integrated Outbox + +**Pattern:** Transactional boundary ensuring database writes and event publishing are atomic. + +The unit-of-work wraps all database operations and maintains an outbox table for events: + +```csharp +public async Task CreateProductAsync(CreateProductRequest req) +{ + using var uow = _unitOfWorkFactory.Create(); + + var product = new Product { Name = req.Name, Price = req.Price }; + await uow.Products.SaveAsync(product); + + // Event added to UoW, written to [Products].[Outbox] + uow.Events.Add(new ProductCreated { ProductId = product.Id }); + + // All database writes flushed atomically + await uow.CommitAsync(); + + // Separate relay process reads [Products].[Outbox] + // and publishes to Service Bus +} +``` + +**Why it matters:** If you crash after committing to the database, events are guaranteed to be published (via relay). Eliminates the dual-write problem. + +### Paging & Enumeration + +**Pattern:** Skip/take pagination with total count for OData-like APIs. + +Pagination is stateless and works with dynamic filtering: + +```csharp +public class PagingArgs +{ + public int Skip { get; set; } // 0-based offset + public int Take { get; set; } // page size, usually 10–100 +} + +public async Task<(IEnumerable, long TotalCount)> QueryAsync( + PagingArgs paging, + FilterOptions? filter = null) +{ + var products = await _repository.QueryAsync(paging, filter); + var totalCount = await _repository.CountAsync(filter); + return (products, totalCount); +} + +// HTTP usage: +// GET /api/products?$skip=0&$take=20 +// Returns 20 products + X-Total-Count: 1543 header +``` + +### Dynamic Query (OData-Style) + +**Pattern:** User-provided filtering and ordering without hardcoding every combination. + +CoreEx translates query parameters to SQL dynamically: + +``` +GET /api/products?$filter=price gt 100 and category eq 'Bikes'&$orderby=name&$fields=id,name,price +``` + +Supports: +- Comparison operators: `eq`, `ne`, `gt`, `ge`, `lt`, `le` +- Logical operators: `and`, `or` +- Functions: `contains`, `startswith`, `endswith` +- Ordering: `$orderby=field1,field2 desc` +- Projection: `$fields=id,name` (response filtering) + +### Multi-Tenancy + +**Pattern:** Isolate data per tenant transparently via `ExecutionContext`. + +Each request carries tenant identity in `ExecutionContext`: + +```csharp +var tenantId = ExecutionContext.Current?.TenantId; + +// Repositories automatically filter by tenant +var products = await _repository.QueryAsync(); // Only this tenant's products +``` + +Database rows include a `TenantId` column; queries are filtered in the WHERE clause automatically. + +### Type Discriminators + +**Pattern:** Model polymorphic or partitioned data sets using discriminator columns. + +When entities might be subtypes (e.g., `Product` might be `PhysicalProduct` or `DigitalProduct`): + +```csharp +public abstract class Product +{ + public Guid Id { get; set; } + public string Type { get; set; } // Discriminator +} + +public class PhysicalProduct : Product +{ + public decimal Weight { get; set; } + public string Dimensions { get; set; } +} + +public class DigitalProduct : Product +{ + public Uri DownloadUrl { get; set; } + public int MaxDownloads { get; set; } +} +``` + +Stored in one table with a `Type` column; ORM automatically hydrates correct subclass. + +--- + +## Database Support + +### SQL Server + +**Pattern:** Primary database target with full feature support. + +CoreEx ships with `CoreEx.Database.SqlServer` providing: +- Migrations via **DbEx** (custom migration runner). +- Data seeding from YAML files. +- Outbox relay with partitioning. +- Full TSQL support. + +In this repository, SQL Server is the **default initial implementation** and the most complete scaffolding target. That reflects current sample coverage and tooling depth, not a claim that CoreEx patterns are inherently SQL Server-only. + +### PostgreSQL + +**Pattern:** Secondary/evolving support. + +PostgreSQL support depends on the package (marked with `*` in documentation). Many CoreEx features work, but SQL Server is the first-class target. + +### ADO.NET Command & Parameter Extensions + +**Pattern:** Fluent ADO.NET helpers reduce boilerplate SQL composition. + +Instead of manual `SqlCommand` construction: + +```csharp +// Manual +var cmd = new SqlCommand("SELECT * FROM [Products] WHERE Id = @Id", connection); +cmd.Parameters.AddWithValue("@Id", id); + +// CoreEx extension +var cmd = connection.CreateCommand("SELECT * FROM [Products] WHERE Id = @Id") + .ParamWithValue("@Id", id); +``` + +Safer, more readable, less repetitive. + +### Entity Framework Integration + +**Pattern:** CoreEx works with EF Core repositories and unit-of-work patterns. + +`CoreEx.EntityFrameworkCore` provides: +- Base repository classes wrapping `DbSet`. +- Unit-of-work with EF's `SaveChangesAsync()`. +- Outbox integration. + +```csharp +public class ProductRepository : Repository +{ + public ProductRepository(DbContext context) : base(context) { } + + public async Task GetBySkuAsync(string sku) + => await _context.Products.SingleOrDefaultAsync(p => p.Sku == sku); +} +``` + +--- + +## Messaging & Events + +### EventData Abstraction + +**Pattern:** Format-agnostic event envelope that decouples event definition from transport. + +Events are serialized into `EventData` and can be published to any broker: + +```csharp +public class ProductCreatedEvent +{ + public Guid ProductId { get; set; } + public string Name { get; set; } + public DateTime CreatedAt { get; set; } +} + +// Wrapped in EventData +var eventData = new EventData +{ + Subject = "contoso.products.product", + Action = "created", + Version = "v1", + Data = JsonSerializer.SerializeToElement(new ProductCreatedEvent { ... }) +}; + +// Can publish to Service Bus, RabbitMQ, Kafka, etc. +await _eventPublisher.PublishAsync(eventData); +``` + +### CloudEvent Interoperability + +**Pattern:** Automatic conversion to CNCF CloudEvents format. + +`EventData` can be serialized/deserialized as CloudEvents for standards compliance: + +```json +{ + "specversion": "1.0", + "type": "com.example.products.created", + "source": "https://example.com/products", + "id": "abc123", + "time": "2024-01-15T12:34:56Z", + "data": { "productId": "xyz", "name": "Bike" } +} +``` + +Enables interop with other CloudEvents consumers (AWS EventBridge, etc.). + +### Publish + Subscribe Patterns + +**Pattern:** Per-message subscription with configurable consumption strategy. + +Subscribers join a topic/queue and consume messages from a specific position: + +- **From Beginning** — Consume all historical events. +- **From End** — Consume only new events from now on. +- **Latest Checkpoint** — Resume from where the subscriber last left off. + +```csharp +public class ProductModifySubscriber : SubscriberHost +{ + protected override async Task OnEventAsync( + EventData eventData, + ProductCreatedEvent data, + CancellationToken cancellationToken) + { + // React to product creation + // E.g., sync to Read Model, update search index + await _searchIndex.IndexAsync(data.ProductId, cancellationToken); + } +} +``` + +### Azure Service Bus Integration + +**Pattern:** Native Service Bus topic/subscription support with partitioning. + +CoreEx provides `IEventPublisher` and `ISubscriber` implementations for Service Bus: + +```csharp +builder.Services.AddServiceBusEventPublisher("Endpoint=..."); +builder.Services.AddServiceBusSubscriber("products"); +``` + +Handles: +- Topic/subscription creation. +- Automatic serialization. +- Partition affinity (partition key = session ID for ordered processing). + +In this repository, Azure Service Bus is the **default initial broker implementation** because the sample relay/subscriber hosts are wired around it. The surrounding event model is broader than that specific broker choice: `EventData` is transport-oriented rather than Service Bus-specific. + +### Outbox Relay (with Partitioning) + +**Pattern:** Dedicated host that reads from database outbox and publishes to broker. + +Each domain has its own relay process: + +1. Business logic writes events to `[Schema].[Outbox]` table within transaction. +2. Separate **Outbox.Relay** host polls the table every N seconds. +3. Relay fetches unpublished outbox rows and publishes to Service Bus. +4. On success, marks rows as published. +5. On failure, retries with exponential backoff. + +``` +┌─────────────────┐ +│ API Process │ +│ Writes events │ +│ to Outbox │ +└────────┬────────┘ + │ + [DB Outbox] + │ +┌────────▼────────┐ +│ Outbox.Relay │ +│ Polls every │ +│ 5 seconds │ +└────────┬────────┘ + │ + [Service Bus] + │ +┌────────▼───────────┐ +│ Subscribe Services │ +│ React to events │ +└────────────────────┘ +``` + +**Partitioning:** If an event has a `PartitionKey`, relay publishes to the same partition in Service Bus for ordered processing. + +**Why it matters:** Guarantees events are published even if relay crashes; decouples API availability from messaging; enables ordered processing of related events. + +### Workflow Orchestration (Durable Task SDK + DTS) + +**Pattern:** Durable workflow coordination for long-running, stateful, and business-critical process flows. + +Use orchestration when a process needs one or more of these characteristics: + +- Long-running steps that must survive restarts. +- Fan-out or fan-in aggregation across parallel work items. +- Batch processing with retries and controlled concurrency. +- Compensation paths when downstream operations fail. +- External-event waits, timers, and human-approval checkpoints. +- Full execution audit trail and replay semantics. + +CoreEx samples include orchestration hosted in standard .NET worker processes using the Durable Task SDK with a DTS backend, including local emulator support and containerized hosting alignment. + +See [Orchestration with the Durable Task SDK](orchestration.md) for detailed guidance and examples. + +--- + +## Domain-Driven Design + +### Aggregate & Entity Modeling + +**Pattern:** Implement aggregates as root objects that enforce invariants and encapsulate child entities. + +An **aggregate root** orchestrates its entities and ensures consistency: + +```csharp +public class Basket : IIdentifier, IETag, IChangeLog +{ + public Guid Id { get; set; } + public string? ETag { get; set; } + public ChangeLog? ChangeLog { get; set; } + + public Guid CustomerId { get; set; } + public string StatusCode { get; set; } = "Active"; + + // Child entities + private List _items = new(); + public IReadOnlyList Items => _items.AsReadOnly(); + + // Business rules + public void AddItem(Guid productId, int quantity) + { + if (StatusCode != "Active") + throw new BusinessException("Cannot add item to checked-out basket."); + + var existing = _items.FirstOrDefault(i => i.ProductId == productId); + if (existing != null) + existing.Quantity += quantity; + else + _items.Add(new BasketItem { ProductId = productId, Quantity = quantity }); + } + + public void Checkout() + { + if (!_items.Any()) + throw new ValidationException("Cannot checkout empty basket."); + + StatusCode = "CheckedOut"; + } +} + +public class BasketItem // Child entity, not an aggregate root +{ + public Guid ProductId { get; set; } + public int Quantity { get; set; } +} +``` + +Aggregate roots: +- Own their entities (users modify through the root). +- Enforce invariants (business rules). +- Publish integration events. +- Are the transactional boundary. + +### Value Objects + +**Pattern:** Implement as immutable C# record classes with semantic equality. + +Value objects have no identity, only their values matter: + +```csharp +public record ItemPricing( + string UnitOfMeasure, + int Quantity, + decimal UnitPrice) +{ + public decimal Total => Quantity * UnitPrice; +} + +// Usage +var pricing = new ItemPricing("ea", 5, 19.99m); + +// Equality is by value +var pricing2 = new ItemPricing("ea", 5, 19.99m); +Assert.AreEqual(pricing, pricing2); // True + +// Immutable +// pricing.Quantity = 10; // CS8852: Init-only property +``` + +Record classes automatically provide: +- Value-based equality (`Equals`, `GetHashCode`). +- `ToString()` for debugging. +- Deconstruction. + +### Integration Events Only + +**Pattern:** Focus on integration events (published to outbox/broker) rather than domain events (in-process messaging). + +**Integration Events** — Published to an external event broker; subscribers in other services react. +```csharp +public class ProductCreatedIntegrationEvent +{ + public Guid ProductId { get; set; } + public string Name { get; set; } + public DateTime CreatedAt { get; set; } +} + +// Published to Service Bus for other services to consume +await _unitOfWork.Events.Add(new EventData { ... }); +``` + +**Avoid:** Domain Events + MediatR for in-process messaging. +```csharp +// NOT recommended in CoreEx style +public class ProductCreatedDomainEvent { ... } +_mediator.Publish(new ProductCreatedDomainEvent(...)); // In-process +``` + +**Why:** Keep services decoupled and independent. If you need cross-domain orchestration, use integration events and let services react asynchronously. + +--- + +## Putting It All Together: A Typical Request Flow + +To illustrate how these patterns work together, here's a typical request flow based on the **Contoso sample architecture in this repository**, not a mandatory flow for every CoreEx application. + +This example assumes the samples' **full outboxing and messaging setup**: + +- an API host handling the request +- a database-backed unit-of-work writing to an outbox table +- a separate `Outbox.Relay` host publishing to Azure Service Bus +- another service consuming the resulting integration event + +``` +1. Client: POST /api/products + with Idempotency-Key header + +2. CoreEx Middleware: + - Extract ExecutionContext (tenant, user, culture) + - Check Idempotency-Key (cached response if duplicate) + - Route to controller + +3. ProductController: + - Validate input (ValidationException if invalid) + - Call ProductService + +4. ProductService: + - Create Product entity + - Apply domain rules (throw BusinessException if violated) + - Create UnitOfWork + - Save to repository (within transaction) + - Add integration event to UoW.Events + - Call UoW.CommitAsync() — atomically saves product + event to Outbox + +5. Repository: + - Execute INSERT on [Products].[Product] + - Add row to [Products].[Outbox] + - Assign ETag, ChangeLog + - Transaction commits + +6. Separate Outbox.Relay Process: + - Poll [Products].[Outbox] every N seconds + - Find unpublished events + - Publish to Service Bus + - Mark as published + +7. Other Services Subscribe: + - Shopping.Subscribe consumes ProductCreated event + - Syncs product replica to [Shopping].[Product] + +8. CoreEx Response Handler: + - Convert Product to ProductDto (response filtering) + - Apply $fields projection + - Return 201 Created + - Include ETag header and Location header + +9. Client: + - Receives 201 with ETag + - For future updates, uses IF-MATCH: {ETag} header +``` + +--- + +## Summary + +CoreEx provides a cohesive set of patterns and utilities that work together to enable: + +- **Consistent API behavior** across minimal APIs and MVC. +- **Reliable messaging** via transactional outboxes. +- **Durable workflow orchestration** for long-running, compensating, and replayable process flows. +- **Multi-tenancy** and **concurrency** handling built-in. +- **Event-driven architecture** with integration events. +- **Clear separation of concerns** (aggregates, value objects, services). +- **Type-safe operations** (exceptions, Result types, source generation). + +The framework is particularly well-suited for distributed microservices architectures where consistency, reliability, and maintainability are critical. diff --git a/docs/codebase/ARCHITECTURE.md b/docs/codebase/ARCHITECTURE.md new file mode 100644 index 00000000..7ca127cf --- /dev/null +++ b/docs/codebase/ARCHITECTURE.md @@ -0,0 +1,65 @@ +# Architecture + +## Core Sections (Required) + +### 1) Architectural Style + +- Primary style: modular layered architecture for the reusable framework, plus event-driven microservice samples. +- Why this classification: sample domains are split into Api, Application, Infrastructure, Domain, Database, Outbox.Relay, and Subscribe projects, and the runtime flow combines synchronous HTTP with outbox-driven Service Bus messaging. +- Primary constraints: multi-target library packaging across net8/net9/net10; SQL Server-backed unit-of-work and outbox flows in sample hosts; CoreEx-centric patterns such as Result-based orchestration, ETag/idempotency, and dynamic service registration. + +### 2) System Flow + +```text +HTTP controller -> application service -> domain aggregate or repository -> SQL Server / HTTP adapter -> outbox or direct publisher -> subscriber/consumer -> response +``` + +Describe the flow in 4-6 steps using file-backed evidence. + +1. A controller receives an HTTP request and delegates through CoreEx.WebApi helpers, for example ProductController.PostAsync and PatchAsync. +2. The application service validates input, loads current state when needed, and coordinates a unit-of-work, for example ProductService and BasketService. +3. For Shopping mutations, domain behavior is applied on Basket and BasketItem before persistence. +4. Infrastructure repositories translate between domain/contracts and EF-backed persistence models, for example BasketRepository and ProductRepository. +5. Cross-service behavior happens through a typed HTTP client and adapter for real-time reservation, plus outbox messages or direct Service Bus publishing for async commands. +6. Relay and subscriber hosts move outbox records to Azure Service Bus and consume messages back into application services. + +### 3) Layer/Module Responsibilities + +| Layer or module | Owns | Must not own | Evidence | +|-----------------|------|--------------|----------| +| API controllers | HTTP routes, request/response semantics, idempotency attributes, WebApi delegation | Domain rules and persistence queries | samples/src/Contoso.Products.Api/Controllers/ProductController.cs | +| Application services | Validation, orchestration, Result pipelines, unit-of-work/event creation | ASP.NET startup and EF entity tracking details | samples/src/Contoso.Products.Application/ProductService.cs; samples/src/Contoso.Shopping.Application/BasketService.cs | +| Domain | Aggregate/entity invariants and mutation rules | Transport and infrastructure concerns | samples/src/Contoso.Shopping.Domain/Basket.cs; samples/src/Contoso.Shopping.Domain/BasketItem.cs | +| Infrastructure | EF DbContext access, query config, mapping, typed clients, adapters | Public HTTP endpoint definitions | samples/src/Contoso.Products.Infrastructure/Repositories/ProductRepository.cs; samples/src/Contoso.Shopping.Infrastructure/Repositories/BasketRepository.cs; samples/src/Contoso.Shopping.Infrastructure/Adapters/ProductAdapter.cs | +| Relay/subscriber hosts | Background message movement and consumption | Business orchestration for user-facing HTTP requests | samples/src/Contoso.Products.Outbox.Relay/Program.cs; samples/src/Contoso.Products.Subscribe/Program.cs | + +### 4) Reused Patterns + +| Pattern | Where found | Why it exists | +|---------|-------------|---------------| +| Unit of Work + Outbox | ProductService, BasketService, Products/Shopping relay hosts | Persist state and enqueue events/commands atomically before broker delivery | +| Repository | ProductRepository, BasketRepository | Separate data access/mapping from application services | +| Aggregate / Entity / Value Object | Basket, BasketItem, ItemPricing | Keep mutation rules and consistency checks in the domain model | +| Anti-corruption / adapter | ProductAdapter, ProductsHttpClient | Isolate Shopping from Products API and message semantics | +| Dynamic service registration | AddDynamicServicesUsing in API and subscriber hosts | Reduce explicit DI wiring across layered sample projects | +| Roslyn source generation | gen/CoreEx.Gen, generated .g.cs persistence files | Generate boilerplate and analyzer-time artifacts | + +### 5) Known Architectural Risks + +- Sample host bootstrapping is repeated across Products, Shopping, and Orders hosts; the repeated AddExecutionContext/AddMvcWebApi/cache/SQL/OpenTelemetry wiring increases configuration-drift risk. +- Shopping checkout intentionally mixes a transactional outbox path with a direct broker fallback on failure; that keeps reservations from being stranded, but it also creates two publication paths that must stay behaviorally aligned. + +### 6) Evidence + +- samples/src/Contoso.Products.Api/Controllers/ProductController.cs +- samples/src/Contoso.Products.Api/Program.cs +- samples/src/Contoso.Shopping.Api/Program.cs +- samples/src/Contoso.Products.Application/ProductService.cs +- samples/src/Contoso.Shopping.Application/BasketService.cs +- samples/src/Contoso.Shopping.Domain/Basket.cs +- samples/src/Contoso.Products.Infrastructure/Repositories/ProductRepository.cs +- samples/src/Contoso.Shopping.Infrastructure/Repositories/BasketRepository.cs +- samples/src/Contoso.Shopping.Infrastructure/Adapters/ProductAdapter.cs +- samples/src/Contoso.Products.Outbox.Relay/Program.cs +- samples/src/Contoso.Products.Subscribe/Program.cs +- gen/CoreEx.Gen/CoreEx.Gen.csproj diff --git a/docs/codebase/CONCERNS.md b/docs/codebase/CONCERNS.md new file mode 100644 index 00000000..37bfa572 --- /dev/null +++ b/docs/codebase/CONCERNS.md @@ -0,0 +1,63 @@ +# Codebase Concerns + +## Core Sections (Required) + +### 1) Top Risks (Prioritized) + +| Severity | Concern | Evidence | Impact | Suggested action | +|----------|---------|----------|--------|------------------| +| high | Sample credentials and connection strings are committed in local/dev artifacts. | docker-compose.yml; samples/src/Contoso.Products.Database/Program.cs; samples/tests/Contoso.E2E.Runner/appsettings.json | Increases the chance that local-only credentials are reused or copied into non-local environments. | Move sample secrets to user-secrets or env-template files and keep checked-in values obviously non-reusable. | +| medium | Multi-backend intent exists, but the current concrete implementation and scaffolding are SQL Server-centric; this can be misread as either SQL-only or equally mature multi-provider support. | README.md; docs/capabilities.md; src/CoreEx.Database.SqlServer/CoreEx.Database.SqlServer.csproj; docker-compose.yml | Onboarding and design decisions may assume the wrong provider maturity level. | Document provider strategy explicitly as SQL Server-primary with other backends added when needed. | +| medium | Sample host startup is duplicated across multiple API, relay, and subscriber Program.cs files. | samples/src/Contoso.Products.Api/Program.cs; samples/src/Contoso.Shopping.Api/Program.cs; samples/src/Contoso.Orders.Api/Program.cs; samples/src/Contoso.Products.Outbox.Relay/Program.cs; samples/src/Contoso.Products.Subscribe/Program.cs | Repeated bootstrap code can drift across domains and host types. | Extract common host-registration extensions or add tests/assertions around expected startup composition. | +| medium | Shopping checkout uses both transactional outbox publication and a direct broker fallback path. | samples/src/Contoso.Shopping.Application/BasketService.cs; samples/src/Contoso.Shopping.Infrastructure/Adapters/ProductAdapter.cs | Two message publication paths must remain semantically aligned during failure handling. | Add focused tests and documentation around compensation/fallback behavior. | + +### 2) Technical Debt + +| Debt item | Why it exists | Where | Risk if ignored | Suggested fix | +|-----------|---------------|-------|-----------------|---------------| +| Repeated host wiring | Each sample host configures overlapping CoreEx, cache, SQL, OpenTelemetry, and health-check setup inline | samples/src/Contoso.Products.Api/Program.cs; samples/src/Contoso.Shopping.Api/Program.cs; samples/src/Contoso.Orders.Api/Program.cs; samples/src/Contoso.Products.Subscribe/Program.cs | Behavioral drift between services and hosts becomes harder to spot | Introduce shared extension methods for common host composition | +| Sample status visibility | README.md describes two complete reference solutions, while the solution also contains Orders and order-workflow sample projects that are still in progress | README.md; CoreEx.sln | New contributors may misclassify in-progress samples as production-ready references | Label in-progress sample status in top-level docs and sample READMEs | +| Secret handling in examples | Sample-local secrets are embedded directly in repo files | docker-compose.yml; samples/src/Contoso.Products.Database/Program.cs; samples/tests/Contoso.E2E.Runner/appsettings.json | Normalizes insecure copy/paste patterns | Replace checked-in secrets with placeholders and env-driven overrides | + +### 3) Security Concerns + +| Risk | OWASP category (if applicable) | Evidence | Current mitigation | Gap | +|------|--------------------------------|----------|--------------------|-----| +| Checked-in passwords/connection strings in sample assets | A02 Cryptographic Failures / Secrets Management | docker-compose.yml; samples/src/Contoso.Products.Database/Program.cs; samples/tests/Contoso.E2E.Runner/appsettings.json | These appear scoped to local/dev usage only | The repo does not provide a committed env template or explicit secret-handling guardrail for these values | +| Local Aspire dashboard allows anonymous access | A01 Broken Access Control | docker-compose.yml | This is clearly configured for local development only | No environment-specific guard in the committed compose file other than the setting name itself | +| Internal Products API client shows no explicit auth configuration in inspected code | A01 Broken Access Control | samples/src/Contoso.Shopping.Infrastructure/Clients/ProductsHttpClient.cs; samples/src/Contoso.Shopping.Api/Program.cs | [TODO] auth may be applied elsewhere, but it was not visible in inspected files | The inspected client/host files do not show authentication or authorization for the inter-service call | + +### 4) Performance and Scaling Concerns + +| Concern | Evidence | Current symptom | Scaling risk | Suggested improvement | +|---------|----------|-----------------|-------------|-----------------------| +| Repeated cache/telemetry/bootstrap configuration per host | samples/src/Contoso.Products.Api/Program.cs; samples/src/Contoso.Shopping.Api/Program.cs; samples/src/Contoso.Products.Subscribe/Program.cs | Configuration parity depends on copy/paste discipline | One host can lag behind others in cache or telemetry behavior | Centralize shared startup composition | +| Checkout performs a synchronous cross-service reservation call before finalizing the transaction | samples/src/Contoso.Shopping.Application/BasketService.cs; samples/src/Contoso.Shopping.Infrastructure/Adapters/ProductAdapter.cs | Checkout latency includes remote API availability and response time | Higher latency or transient failures in Products directly affect Shopping checkout | Add explicit resilience policy configuration and document timeout/retry expectations | +| No explicit timeout/retry settings were found in inspected HTTP client wiring | samples/src/Contoso.Shopping.Api/Program.cs; samples/src/Contoso.Shopping.Infrastructure/Clients/ProductsHttpClient.cs | Runtime behavior depends on defaults or hidden configuration | Failure recovery characteristics are hard to reason about | Add explicit resilience/timeout configuration in host setup | + +### 5) Fragile/High-Churn Areas + +| Area | Why fragile | Churn signal | Safe change strategy | +|------|-------------|-------------|----------------------| +| samples/src/*/Program.cs host bootstraps | Several hosts repeat nearly the same registration pattern with small variations | Structural duplication is visible in inspected Program.cs files; 90-day and 365-day git queries over src and samples/src produced a flat result with no clear hotspot above 1 touched commit per listed file | Change one host pattern, then compare every sibling host and run the corresponding sample tests | +| src/CoreEx.Validation/* | Validation files are the most visible source family in the one-year churn sample, but the signal is still flat at 1 touched commit per listed file | Terminal git log --since='365 days ago' sample returned multiple CoreEx.Validation files, each with count 1 | Keep changes small and run the related unit test projects immediately | + +### 6) [ASK USER] Questions + +1. No open [ASK USER] items remain for this pass. + +### 7) Evidence + +- README.md +- docs/capabilities.md +- CoreEx.sln +- docker-compose.yml +- src/CoreEx.Database.SqlServer/CoreEx.Database.SqlServer.csproj +- samples/src/Contoso.Products.Api/Program.cs +- samples/src/Contoso.Shopping.Api/Program.cs +- samples/src/Contoso.Orders.Api/Program.cs +- samples/src/Contoso.Products.Subscribe/Program.cs +- samples/src/Contoso.Shopping.Application/BasketService.cs +- samples/src/Contoso.Shopping.Infrastructure/Adapters/ProductAdapter.cs +- samples/src/Contoso.Products.Database/Program.cs +- samples/tests/Contoso.E2E.Runner/appsettings.json diff --git a/docs/codebase/CONVENTIONS.md b/docs/codebase/CONVENTIONS.md new file mode 100644 index 00000000..afd63392 --- /dev/null +++ b/docs/codebase/CONVENTIONS.md @@ -0,0 +1,51 @@ +# Coding Conventions + +## Core Sections (Required) + +### 1) Naming Rules + +| Item | Rule | Example | Evidence | +|------|------|---------|----------| +| Files | PascalCase filenames for C# types and test files | ProductService.cs, BasketRepository.cs, ExceptionTests.cs | samples/src/Contoso.Products.Application/ProductService.cs; samples/src/Contoso.Shopping.Infrastructure/Repositories/BasketRepository.cs; tests/CoreEx.Test.Unit/ExceptionTests.cs | +| Functions/methods | PascalCase methods; async methods usually end with Async | CreateAsync, CheckoutAsync, DeleteAsync | samples/src/Contoso.Products.Application/ProductService.cs; samples/src/Contoso.Shopping.Application/BasketService.cs | +| Types/interfaces | Types use PascalCase; interfaces use I-prefix | Basket, ProductController, IProductService, IBasketRepository | samples/src/Contoso.Shopping.Domain/Basket.cs; samples/src/Contoso.Products.Api/Controllers/ProductController.cs; samples/src/Contoso.Products.Application/Interfaces/IProductService.cs; samples/src/Contoso.Shopping.Application/Repositories/IBasketRepository.cs | +| Constants/env vars | Environment variables are uppercase or configuration-key style | TASKHUB, dts-endpoint, E2E__Products__BaseAddress | samples/src/Contoso.Order.Workflow.Worker/Program.cs; samples/README.md | + +### 2) Formatting and Linting + +- Formatter: .editorconfig defines spaces, 4-space indentation for .cs, and 2-space indentation for json/xml/yaml/props/csproj/sln/sql. +- Linter: [TODO] no dedicated style linter config such as StyleCop or Roslyn ruleset file was found in the inspected repo files; analyzer packages exist for the generator project, and build settings enforce warnings as errors. +- Most relevant enforced rules: Nullable enabled, ImplicitUsings enabled, LangVersion preview, TreatWarningsAsErrors true. +- Run commands: dotnet build CoreEx.sln; dotnet test CoreEx.sln. + +### 3) Import and Module Conventions + +- Import grouping/order: using directives sit at the top of the file and projects commonly centralize repeated imports in GlobalUsing.cs files. +- Alias vs relative import policy: standard project references and namespace imports are used; no alternate aliasing scheme was found beyond a test-only alias for ExecutionContext. +- Public exports/barrel policy: GlobalUsing.cs is used per project; [TODO] no broader documented export policy was found. + +### 4) Error and Logging Conventions + +- Error strategy by layer: application and domain code use CoreEx exceptions and Result/BusinessError/NotFoundError flows; API hosts apply UseCoreExExceptionHandler so exceptions map to HTTP responses. +- Logging style and required context fields: typed ILogger is used where explicit logging appears, and sample host appsettings set Logging:LogLevel with Default and category overrides. +- Sensitive-data redaction rules: [TODO] no explicit redaction policy or sanitizer configuration was found in the inspected files. + +### 5) Testing Conventions + +- Test file naming/location rule: tests live under tests/ and samples/tests/; filenames commonly end in Tests.cs or split partial suites such as ProductMutateTests.Create.cs. +- Mocking strategy norm: UnitTestEx tester base classes, expected outbox publisher wrappers, and MockHttpClientFactory for downstream HTTP isolation are used in samples. +- Coverage expectation: coverlet.collector is referenced; [TODO] no committed coverage threshold or reporting gate was found. + +### 6) Evidence + +- .editorconfig +- src/Directory.Build.props +- tests/CoreEx.Test.Unit/CoreEx.Test.Unit.csproj +- tests/CoreEx.Test.Unit/ExceptionTests.cs +- samples/src/Contoso.Products.Api/GlobalUsing.cs +- samples/src/Contoso.Products.Api/Controllers/ProductController.cs +- samples/src/Contoso.Products.Application/ProductService.cs +- samples/src/Contoso.Shopping.Application/BasketService.cs +- samples/src/Contoso.Order.Workflow.Worker/Program.cs +- samples/tests/Contoso.Products.Test.Api/ProductMutateTests.Create.cs +- samples/README.md diff --git a/docs/codebase/INTEGRATIONS.md b/docs/codebase/INTEGRATIONS.md new file mode 100644 index 00000000..6a12c5c0 --- /dev/null +++ b/docs/codebase/INTEGRATIONS.md @@ -0,0 +1,54 @@ +# External Integrations + +## Core Sections (Required) + +### 1) Integration Inventory + +| System | Type (API/DB/Queue/etc) | Purpose | Auth model | Criticality | Evidence | +|--------|---------------------------|---------|------------|-------------|----------| +| SQL Server | DB | Primary persistence for sample domains, outbox tables, and migration utilities | Connection string-based | High | docker-compose.yml; samples/src/Contoso.Products.Database/Program.cs; samples/src/Contoso.Products.Api/Program.cs | +| Redis | Cache/backplane | L2 distributed cache and FusionCache backplane | Connection configured through Aspire/registered ConfigurationOptions | Medium | docker-compose.yml; samples/src/Contoso.Products.Api/Program.cs; samples/src/Contoso.Shopping.Api/Program.cs | +| Azure Service Bus | Queue/topic broker | Async event publishing, relay, and subscriber processing | Connection configured through Aspire/host config; emulator config committed for local use | High | servicebus/Config.json; samples/src/Contoso.Products.Outbox.Relay/Program.cs; samples/src/Contoso.Products.Subscribe/Program.cs | +| Products API from Shopping | Internal HTTP API | Real-time inventory reservation during checkout | [TODO] no explicit auth configuration was found in the inspected Shopping client code | High | samples/src/Contoso.Shopping.Infrastructure/Clients/ProductsHttpClient.cs; samples/src/Contoso.Shopping.Infrastructure/Adapters/ProductAdapter.cs | +| OTLP / Aspire dashboard | Observability endpoint | Export traces from sample hosts and inspect them locally | No auth found in local compose config; dashboard is configured for anonymous local access | Medium | docker-compose.yml; samples/src/Contoso.Products.Api/Program.cs; samples/src/Contoso.Products.Outbox.Relay/Program.cs | +| Durable Task Scheduler | Workflow runtime | Order workflow worker orchestration sample | Connection string assembled from endpoint/task hub and auth mode | Medium | samples/src/Contoso.Order.Workflow.Worker/Program.cs | + +### 2) Data Stores + +| Store | Role | Access layer | Key risk | Evidence | +|-------|------|--------------|----------|----------| +| SQL Server | Transactional domain storage, outbox, and migration target | EF-backed repositories and DbEx console utilities | Sample connection strings and passwords are committed in local/dev artifacts | samples/src/Contoso.Shopping.Infrastructure/Contoso.Shopping.Infrastructure.csproj; samples/src/Contoso.Products.Database/Program.cs; samples/tests/Contoso.E2E.Runner/appsettings.json | +| Redis | Hybrid cache and backplane | FusionCache + AddRedisDistributedCache + CoreEx hybrid cache abstractions | Cache invalidation/consistency depends on host parity across repeated startup code | samples/src/Contoso.Products.Api/Program.cs; samples/src/Contoso.Shopping.Api/Program.cs | + +### 3) Secrets and Credentials Handling + +- Credential sources: docker-compose environment variables, appsettings.json for samples/E2E runner, UserSecretsId in the Aspire host, and runtime configuration/environment variables in the workflow worker. +- Hardcoding checks: committed sample credentials and connection strings are present in docker-compose.yml, samples/src/Contoso.Products.Database/Program.cs, and samples/tests/Contoso.E2E.Runner/appsettings.json. +- Rotation or lifecycle notes: [TODO] no secret-rotation guidance or secret-manager policy file was found. + +### 4) Reliability and Failure Behavior + +- Retry/backoff behavior: transactional outbox relays are implemented for Products and Shopping; [TODO] no explicit HTTP retry/backoff policy configuration was found in the inspected host files. +- Timeout policy: [TODO] no explicit timeout configuration was found in the inspected host or client files. +- Circuit-breaker or fallback behavior: Shopping checkout falls back to direct broker publication for reservation cancellation if the transactional path fails; Service Bus subscriber sessions set MaxConcurrentSessions and emulator MaxDeliveryCount is configured. + +### 5) Observability for Integrations + +- Logging around external calls: yes, host-level logging is configured via appsettings and the checkout failure path logs an error before sending a direct cancellation command. +- Metrics/tracing coverage: yes, sample APIs, relays, subscribers, and the workflow worker all add OpenTelemetry tracing and OTLP export. +- Missing visibility gaps: [TODO] no committed alerting, dashboard provisioning, or SLO configuration was found beyond the local Aspire dashboard container. + +### 6) Evidence + +- docker-compose.yml +- servicebus/Config.json +- samples/aspire/Contoso.Aspire/Contoso.Aspire.csproj +- samples/src/Contoso.Products.Api/Program.cs +- samples/src/Contoso.Shopping.Api/Program.cs +- samples/src/Contoso.Products.Outbox.Relay/Program.cs +- samples/src/Contoso.Products.Subscribe/Program.cs +- samples/src/Contoso.Shopping.Infrastructure/Clients/ProductsHttpClient.cs +- samples/src/Contoso.Shopping.Infrastructure/Adapters/ProductAdapter.cs +- samples/src/Contoso.Order.Workflow.Worker/Program.cs +- samples/src/Contoso.Products.Database/Program.cs +- samples/tests/Contoso.E2E.Runner/appsettings.json diff --git a/docs/codebase/STACK.md b/docs/codebase/STACK.md new file mode 100644 index 00000000..3ecb0ed7 --- /dev/null +++ b/docs/codebase/STACK.md @@ -0,0 +1,68 @@ +# Technology Stack + +## Core Sections (Required) + +### 1) Runtime Summary + +| Area | Value | Evidence | +|------|-------|----------| +| Primary language | C# | src/Directory.Build.props; CoreEx.sln | +| Runtime + version | Reusable libraries target net8.0, net9.0, and net10.0; sample hosts use net10.0; generator targets netstandard2.0 | src/Directory.Build.props; samples/Directory.Build.props; samples/aspire/Contoso.Aspire/Contoso.Aspire.csproj; gen/CoreEx.Gen/CoreEx.Gen.csproj | +| Package manager | NuGet with Central Package Management | Directory.Packages.props | +| Module/build system | MSBuild project/solution build with SDK-style .csproj files | CoreEx.sln; src/CoreEx/CoreEx.csproj | + +### 2) Production Frameworks and Dependencies + +| Dependency | Version | Role in system | Evidence | +|------------|---------|----------------|----------| +| ASP.NET Core | 8.0.24 / 9.0.13 / 10.0.3 | Web API hosting, controllers, OpenAPI support | Directory.Packages.props; src/CoreEx.AspNetCore/CoreEx.AspNetCore.csproj | +| Entity Framework Core + SQL Server | 8.0.22-10.0.0 | Data access for sample infrastructure and CoreEx EF integration | Directory.Packages.props; samples/src/Contoso.Shopping.Infrastructure/Contoso.Shopping.Infrastructure.csproj | +| Microsoft.Data.SqlClient + Aspire SQL client | 6.1.4 / 13.2.2 | SQL Server connectivity | Directory.Packages.props; src/CoreEx.Database.SqlServer/CoreEx.Database.SqlServer.csproj | +| NSwag.AspNetCore | 14.6.2 | OpenAPI generation for sample APIs | Directory.Packages.props; samples/src/Contoso.Products.Api/Program.cs | +| OpenTelemetry | 1.15.0 family | Tracing and OTLP export across APIs, relays, subscribers, and workflow worker | Directory.Packages.props; samples/src/Contoso.Products.Api/Program.cs; samples/src/Contoso.Products.Outbox.Relay/Program.cs; samples/src/Contoso.Order.Workflow.Worker/Program.cs | +| Azure Service Bus Aspire integration | 13.2.2 | Messaging publisher/subscriber wiring | Directory.Packages.props; src/CoreEx.Azure.Messaging.ServiceBus/CoreEx.Azure.Messaging.ServiceBus.csproj; samples/src/Contoso.Products.Subscribe/Program.cs | +| FusionCache + Redis backplane | 2.5.0 | Hybrid cache and idempotency/caching support | Directory.Packages.props; src/CoreEx.Caching.FusionCache/CoreEx.Caching.FusionCache.csproj; samples/src/Contoso.Shopping.Api/Program.cs | +| CloudNative.CloudEvents.SystemTextJson | 2.8.0 | CloudEvent interoperability | Directory.Packages.props | +| Microsoft.DurableTask.* | 1.17.1 | Order workflow sample orchestration and worker runtime | Directory.Packages.props; samples/src/Contoso.Order.Workflow.Worker/Program.cs; samples/src/Contoso.Order.Workflow.Workflow/Contoso.Order.Workflow.Workflow.csproj | +| DbEx.SqlServer | 3.0.0-preview-2 | Database migration/data console utilities in samples/tests | Directory.Packages.props; samples/src/Contoso.Products.Database/Program.cs | + +### 3) Development Toolchain + +| Tool | Purpose | Evidence | +|------|---------|----------| +| NUnit | Test framework | Directory.Packages.props; tests/CoreEx.Test.Unit/CoreEx.Test.Unit.csproj | +| AwesomeAssertions | Assertions | Directory.Packages.props; tests/CoreEx.Test.Unit/CoreEx.Test.Unit.csproj | +| coverlet.collector | Test coverage collection | Directory.Packages.props; tests/CoreEx.Test.Unit/CoreEx.Test.Unit.csproj | +| UnitTestEx / UnitTestEx.NUnit | API and integration-style test helpers | Directory.Packages.props; samples/tests/Contoso.Products.Test.Api/Contoso.Products.Test.Api.csproj | +| CoreEx.Gen | Roslyn analyzer/source generator packaged as an analyzer | gen/CoreEx.Gen/CoreEx.Gen.csproj; tests/CoreEx.Test.Unit/CoreEx.Test.Unit.csproj | +| .editorconfig | Formatting baseline | .editorconfig | + +### 4) Key Commands + +```bash +dotnet build CoreEx.sln +dotnet test CoreEx.sln +dotnet run --project samples/aspire/Contoso.Aspire +docker compose up -d db-sql-server redis-cache servicebus-emulator aspire-dashboard dts-emulator +``` + +### 5) Environment and Config + +- Config sources: appsettings.json and appsettings.Development.json in sample hosts, docker-compose.yml, servicebus/Config.json, central MSBuild props, and a UserSecretsId in the Aspire host. +- Required env vars: dts-endpoint, TASKHUB, [TODO] additional deployment/runtime variables are not summarized in a committed env template. +- Deployment/runtime constraints: local sample execution expects SQL Server, Redis, Azure Service Bus emulator, and Aspire dashboard infrastructure. Current concrete implementation/scaffolding in inspected code is SQL Server-primary, with broader backend support discussed in docs as a capability direction. + +### 6) Evidence + +- Directory.Packages.props +- src/Directory.Build.props +- CoreEx.sln +- src/CoreEx/CoreEx.csproj +- src/CoreEx.Database.SqlServer/CoreEx.Database.SqlServer.csproj +- src/CoreEx.Azure.Messaging.ServiceBus/CoreEx.Azure.Messaging.ServiceBus.csproj +- src/CoreEx.Caching.FusionCache/CoreEx.Caching.FusionCache.csproj +- samples/aspire/Contoso.Aspire/Contoso.Aspire.csproj +- samples/src/Contoso.Products.Api/Program.cs +- samples/src/Contoso.Shopping.Api/Program.cs +- samples/src/Contoso.Order.Workflow.Worker/Program.cs +- docker-compose.yml diff --git a/docs/codebase/STRUCTURE.md b/docs/codebase/STRUCTURE.md new file mode 100644 index 00000000..eda8a4ac --- /dev/null +++ b/docs/codebase/STRUCTURE.md @@ -0,0 +1,56 @@ +# Codebase Structure + +## Core Sections (Required) + +### 1) Top-Level Map + +List only meaningful top-level directories and files. + +| Path | Purpose | Evidence | +|------|---------|----------| +| src/ | CoreEx reusable library packages | CoreEx.sln; README.md | +| tests/ | Unit and API-style tests for CoreEx libraries | CoreEx.sln; tests/CoreEx.Test.Unit/CoreEx.Test.Unit.csproj | +| samples/src/ | Contoso sample applications and hosts by domain/layer, including in-progress Orders and Order.Workflow sample areas | CoreEx.sln; samples/README.md | +| samples/tests/ | Sample API, relay, subscriber, unit, and E2E test projects | CoreEx.sln; samples/README.md | +| samples/aspire/ | Aspire AppHost that references runnable sample services | samples/aspire/Contoso.Aspire/Contoso.Aspire.csproj | +| gen/ | Roslyn source generator/analyzer project | CoreEx.sln; gen/CoreEx.Gen/CoreEx.Gen.csproj | +| docs/ | Technical notes and generated codebase knowledge docs | README.md; docs/capabilities.md | +| ref/ | Separate reference solution content (NDCOrderOrchestration.sln) | list_dir output; ref/NDCOrderOrchestration.sln | +| servicebus/ | Emulator configuration for local Azure Service Bus topics/subscriptions | CoreEx.sln; servicebus/Config.json | +| tools/ | Repo utility area | list_dir output | + +### 2) Entry Points + +- Main runtime entry: there is no single root application entry; runnable entry points are sample host Program.cs files under samples/src/ plus the Aspire AppHost in samples/aspire/Contoso.Aspire. +- Secondary entry points (worker/cli/jobs): database console utilities in samples/src/*Database/Program.cs, outbox relays in samples/src/*.Outbox.Relay/Program.cs, subscriber hosts in samples/src/*.Subscribe/Program.cs, and the order workflow worker in samples/src/Contoso.Order.Workflow.Worker/Program.cs. +- How entry is selected (script/config): projects are selected explicitly via dotnet run --project ..., as shown in README.md and samples/README.md. + +### 3) Module Boundaries + +| Boundary | What belongs here | What must not be here | +|----------|-------------------|------------------------| +| src/CoreEx.* | Reusable framework primitives, ASP.NET integration, data, validation, events, caching, and unit-testing helpers | Sample-specific domain logic | +| samples/src/Contoso.*.Api | HTTP host bootstrap and controllers | Direct persistence details beyond injected services/repositories | +| samples/src/Contoso.*.Application | Use-case orchestration, validation, repository interfaces, adapters, and service contracts | ASP.NET host wiring and low-level EF mappings | +| samples/src/Contoso.Shopping.Domain | Aggregate/entity/value-object behavior | HTTP transport or EF DbContext concerns | +| samples/src/Contoso.*.Infrastructure | EF repositories, mappers, typed clients, adapters, outbox publishers | Web host startup | +| samples/tests/ and tests/ | Automated tests, test data, and test-only resources | Production host/runtime code | + +### 4) Naming and Organization Rules + +- File naming pattern: PascalCase .cs filenames such as ProductService.cs, BasketRepository.cs, ProductController.cs, and ExceptionTests.cs. +- Directory organization pattern: mostly layer-first under samples (Api, Application, Infrastructure, Domain, Database, Subscribe, Outbox.Relay) and package-first under src (CoreEx.*, CoreEx.AspNetCore.*, CoreEx.Database.*). +- Import aliasing or path conventions: standard C# project references and global using files are used; no TypeScript-style path alias system exists in the inspected files. + +### 5) Evidence + +- CoreEx.sln +- README.md +- samples/README.md +- samples/aspire/Contoso.Aspire/Contoso.Aspire.csproj +- samples/src/Contoso.Products.Api/Program.cs +- samples/src/Contoso.Products.Database/Program.cs +- samples/src/Contoso.Products.Outbox.Relay/Program.cs +- samples/src/Contoso.Products.Subscribe/Program.cs +- samples/src/Contoso.Shopping.Domain/Basket.cs +- gen/CoreEx.Gen/CoreEx.Gen.csproj diff --git a/docs/codebase/TESTING.md b/docs/codebase/TESTING.md new file mode 100644 index 00000000..6d97caea --- /dev/null +++ b/docs/codebase/TESTING.md @@ -0,0 +1,53 @@ +# Testing Patterns + +## Core Sections (Required) + +### 1) Test Stack and Commands + +- Primary test framework: NUnit 4.3.2. +- Assertion/mocking tools: AwesomeAssertions, UnitTestEx.NUnit, coverlet.collector, and sample-specific mock helpers such as MockHttpClientFactory described in samples/README.md. +- Commands: + +```bash +dotnet test CoreEx.sln +dotnet test tests/CoreEx.Test.Unit/CoreEx.Test.Unit.csproj +dotnet test samples/tests/Contoso.Products.Test.Api/Contoso.Products.Test.Api.csproj +[TODO] no dedicated committed coverage command beyond standard dotnet test with coverlet.collector references was found. +``` + +### 2) Test Layout + +- Test file placement pattern: reusable library tests live under tests/; sample tests live under samples/tests/ with separate projects for unit, API, relay, subscriber, common test data, and E2E runner. +- Naming convention: project names end in .Test.Unit, .Test.Api, .Test.Subscribe, .Test.Outbox.Relay, or .Test.Common; files end in Tests.cs or split a suite into partial files like ProductMutateTests.Create.cs. +- Setup files and where they run: sample integration projects copy appsettings.unittest.json, embed Resources/**/*, and keep shared YAML test data in *.Test.Common projects; sample README describes OneTimeSetUp database migration, cache clearing, event capture, and HTTP client replacement. + +### 3) Test Scope Matrix + +| Scope | Covered? | Typical target | Notes | +|-------|----------|----------------|-------| +| Unit | yes | CoreEx library primitives and sample validators/domain behavior | tests/CoreEx.Test.Unit and Contoso.Products.Test.Unit are present | +| Integration | yes | Sample APIs, subscriber hosts, and relay hosts | Contoso.Products.Test.Api, Contoso.Shopping.Test.Api, Contoso.Products.Test.Subscribe, and Contoso.Products.Test.Outbox.Relay are present | +| E2E | yes | Cross-service sample scenarios against running APIs | Contoso.E2E.Runner is an interactive console runner | + +### 4) Mocking and Isolation Strategy + +- Main mocking approach: UnitTestEx tester base classes drive HTTP and generic tests; sample tests wrap outbox/service bus publishers and replace downstream HTTP clients with mocks. +- Isolation guarantees: sample integration setup migrates and reseeds SQL data, clears FusionCache/Redis state, and captures emitted events before assertions. +- Common failure mode in tests: [TODO] no committed flaky-test catalog or failure analysis file was found. + +### 5) Coverage and Quality Signals + +- Coverage tool + threshold: coverlet.collector is referenced; [TODO] no threshold was found. +- Current reported coverage: [TODO] no committed coverage report or badge was found. +- Known gaps/flaky areas: [TODO] none were explicitly documented in the inspected files. + +### 6) Evidence + +- Directory.Packages.props +- tests/CoreEx.Test.Unit/CoreEx.Test.Unit.csproj +- tests/CoreEx.Test.Unit/ExceptionTests.cs +- samples/tests/Contoso.Products.Test.Api/Contoso.Products.Test.Api.csproj +- samples/tests/Contoso.Products.Test.Api/ProductMutateTests.Create.cs +- samples/tests/Contoso.E2E.Runner/Contoso.E2E.Runner.csproj +- samples/tests/Contoso.E2E.Runner/appsettings.json +- samples/README.md diff --git a/docs/orchestration.md b/docs/orchestration.md new file mode 100644 index 00000000..95179bd8 --- /dev/null +++ b/docs/orchestration.md @@ -0,0 +1,557 @@ +# Orchestration with the Durable Task SDK + +This document explains when and how to incorporate workflow orchestration into a CoreEx-based application landscape. It draws on the `Contoso.Order.Workflow.*` sample, which demonstrates an order validation and submission workflow backed by Durable Task Scheduler (DTS). + +## Table of Contents + +- [When to Use Orchestration](#when-to-use-orchestration) +- [Durable Task SDK with DTS vs Durable Functions](#durable-task-sdk-with-dts-vs-durable-functions) +- [Long-Running Workflows](#long-running-workflows) +- [Business-Critical Orchestration](#business-critical-orchestration) +- [Compensation and Retries](#compensation-and-retries) +- [Deterministic Execution](#deterministic-execution) +- [Fan-Out / Fan-In](#fan-out--fan-in) +- [Batch Processing](#batch-processing) +- [External Events and Human Approval](#external-events-and-human-approval) +- [Auditability and Replay](#auditability-and-replay) +- [DTS Dashboard for Observability and Management](#dts-dashboard-for-observability-and-management) +- [Project Layout](#project-layout) +- [Worker Host Setup](#worker-host-setup) +- [Client Registration](#client-registration) +- [Running Locally](#running-locally) + +--- + +## When to Use Orchestration + +Standard request/response services, backed by application services, repositories, and outbox-relay messaging, cover the majority of business operations. Workflow orchestration solves a different class of problems where those patterns alone are insufficient. + +| Scenario | Request/response + outbox sufficient? | Orchestration adds value? | +|---|---|---| +| CRUD operations with side-effect events | Yes | No | +| Simple pub/sub fan-out (fire-and-forget, no aggregated result) | Yes | No | +| Fan-out with aggregated result or partial-failure handling | No | Yes | +| Batch processing of a variable-size work list | No | Yes | +| Throttled parallel work with a concurrency cap | No | Yes | +| External-event wait (human approval, webhook callback) | No | Yes | +| Multi-step process spanning seconds to days | No | Yes | +| Process requiring compensation on failure | No | Yes | +| Steps that must run in strict order with branching | No | Yes | +| Audit trail of every execution step required | No | Yes | +| Step must retry independently from the whole workflow | No | Yes | + +Choose orchestration when at least one of those characteristics is central to the business process. + +--- + +## Durable Task SDK with DTS vs Durable Functions + +The Durable Task SDK and Durable Functions share the same core orchestration concepts: orchestrators, activities, deterministic replay, durable timers, and external events. Both can use Durable Task Scheduler (DTS) as the durable backend. The primary difference is runtime hosting preference rather than orchestration semantics. + +### What is different + +Durable Functions is a Functions-hosted programming model. It is designed around Azure Functions triggers, bindings, and the Functions runtime lifecycle. That model is productive when the application is already centered on Functions and event-triggered serverless hosting. + +The Durable Task SDK with DTS is a general-purpose .NET library and backend combination. Instead of writing against the Azure Functions host, you write orchestrators and activities and then host them inside any .NET process that can register a worker and a client. In this repository, the workflow is hosted in a normal ASP.NET Core process: + +```csharp +builder.Services.AddDurableTaskWorker() + .AddTasks(registry => + { + registry.AddOrchestrator(); + registry.AddActivity(); + registry.AddActivity(); + }) + .UseDurableTaskScheduler(connectionString); +``` + +And any host can schedule or query workflow instances through a normal client registration: + +```csharp +services.AddDurableTaskClient(durableTaskBuilder => +{ + durableTaskBuilder.UseDurableTaskScheduler(connectionString); +}); +``` + +### Why that matters in practice + +| Concern | Durable Functions | Durable Task SDK with DTS | +|---|---|---| +| Primary hosting model | Azure Functions runtime | Any .NET host | +| Programming surface | Triggers and bindings | Explicit worker and client APIs | +| DTS backend support | Yes | Yes | +| Best fit | Serverless Functions applications | Existing APIs, workers, services, and containerized apps | +| Runtime dependency | Functions host | No Functions host required | +| Local backend story | Can run locally with emulator tooling | DTS emulator can be started directly as infrastructure | +| Container hosting | Possible, but still centered on the Functions runtime model | Natural fit for regular ASP.NET Core or worker containers | + +The difference is slight at the orchestration-code level, but important at the application-hosting level. If the goal is to use durable workflows inside an existing service landscape rather than build a Functions application, the Durable Task SDK with DTS is often the more direct fit. If the solution is already Functions-centric, Durable Functions with DTS can provide the same durable backend while preserving Functions triggers and bindings. + +### Local emulator benefit + +One of the practical advantages of DTS is that the backend can be run locally as infrastructure, without switching the application into a Functions-hosting model. This repository already includes the emulator in [docker-compose.yml](../../docker-compose.yml): + +```yaml +dts-emulator: + image: mcr.microsoft.com/dts/dts-emulator:latest + environment: + DTS_TASK_HUB_NAMES: "default,order" + ports: + - "8080:8080" + - "8082:8082" +``` + +That means developers can run the orchestration backend locally, start the workflow worker, and test orchestration behavior end-to-end without needing Azure-hosted infrastructure. In the sample worker, the connection logic explicitly detects the local emulator and switches authentication to `None`: + +```csharp +var isLocalEmulator = hostAddress.StartsWith("http://localhost:8080", StringComparison.OrdinalIgnoreCase); + +var connectionString = isLocalEmulator + ? $"Endpoint={hostAddress};TaskHub={taskHubName};Authentication=None" + : $"Endpoint={hostAddress};TaskHub={taskHubName};Authentication=DefaultAzure"; +``` + +This gives a clean local-development loop: + +1. Start the DTS emulator with container infrastructure. +2. Run the worker host locally. +3. Run an API, console app, or test that schedules orchestration instances. +4. Observe orchestration status and traces without changing the application architecture. + +### Container-hosting benefit + +Because the Durable Task SDK is hosted inside ordinary .NET applications, it fits naturally into containerized environments. A workflow worker can be packaged exactly like any other ASP.NET Core or background-service container, and the DTS backend can run either as the local emulator or as a managed service. + +That is useful when the broader application landscape already uses: + +- Containerized APIs and background workers. +- Kubernetes, Container Apps, or Docker Compose for local and deployed environments. +- Shared OpenTelemetry, health checks, and common ASP.NET Core hosting patterns. + +In this sample, the worker is just another host process with logging, OpenTelemetry, and health checks, not a special Functions runtime host. That reduces the amount of platform-specific infrastructure needed when orchestration is only one capability inside a larger service estate. + +### Guidance + +Prefer Durable Functions when the solution is intentionally Functions-centric and benefits from trigger-and-binding composition. + +Prefer the Durable Task SDK with DTS when: + +- The application is already an API, worker, or service-host landscape. +- You want orchestration without adopting the Azure Functions runtime. +- You want to run the backend locally through the DTS emulator. +- You want workflow workers to be packaged and deployed like ordinary containers. + +--- + +## Long-Running Workflows + +Durable orchestrations persist their state between steps. A workflow can be suspended while waiting for an external event, a timer, or a slow downstream system, and then resume without holding a thread or blocking an HTTP request. + +**Apply this when:** +- A business process spans minutes, hours, or days, for example order fulfilment, approval chains, or scheduled reminders. +- Steps involve human interaction, third-party callbacks, or polling. +- The initiating HTTP request cannot or should not block until the process finishes. + +**Pattern in the sample:** + +The `OrderWorkflowOrchestration` is initiated by a client call that returns an instance ID immediately. The caller can poll for status later using `GetMetadataAsync`: + +```csharp +var instanceId = await _orderWorkflowClient.StartAsync(request, cancellationToken: ct); +var metadata = await _orderWorkflowClient.GetMetadataAsync(instanceId); +``` + +The orchestration itself runs as a durable sequence of activity calls, each persisted between steps: + +```csharp +public override async Task RunAsync( + TaskOrchestrationContext context, OrderWorkflowRequest input) +{ + var validation = await context.CallActivityAsync( + nameof(ValidateOrderActivity), + new ValidateOrderActivityInput(input.OrderId, input.Amount, input.Currency)); + + if (!validation) + { + return new OrderWorkflowResult( + input.OrderId, + false, + "Order request failed validation.", + context.CurrentUtcDateTime); + } + + return await context.CallActivityAsync( + nameof(SubmitOrderActivity), + new SubmitOrderActivityInput(input.OrderId, input.Amount, input.Currency, input.RequestedBy)); +} +``` + +Each `CallActivityAsync` checkpoint is recorded. If the worker process restarts between steps, the orchestration replays only what is needed to reach the last durable checkpoint. + +--- + +## Business-Critical Orchestration + +Orchestration guarantees that every step is recorded and that the overall process will eventually reach a terminal state, even across process restarts or transient infrastructure failures. + +**Apply this when:** +- Partial execution of a process would leave the system in an inconsistent or unacceptable state. +- A process coordinates writes across multiple services or systems that do not share a transaction boundary. +- Regulatory or commercial requirements demand that every step and outcome is traceable. + +**Guidance:** +- Model each external call or side-effecting operation as a discrete `TaskActivity`. +- Keep orchestrator code free of direct I/O. +- Keep contracts serializable and explicit. +- Name activities and orchestrations clearly because those names become part of operations and diagnostics. + +**Sample activity pattern:** + +```csharp +[DurableTask] +public sealed class SubmitOrderActivity : TaskActivity +{ + public override Task RunAsync( + TaskActivityContext context, + SubmitOrderActivityInput input) + { + var message = $"Order '{input.OrderId}' accepted for {input.Amount:0.00} {input.Currency}."; + var result = new OrderWorkflowResult(input.OrderId, true, message, DateTimeOffset.UtcNow); + return Task.FromResult(result); + } +} +``` + +Activities receive typed input records and return typed result records. Keep those contracts as plain records so they serialize cleanly across durable boundaries. + +--- + +## Compensation and Retries + +When a step in a multi-step workflow fails, the process may need to undo work already performed by earlier steps. That is the classic compensation or saga pattern. + +**Apply this when:** +- Earlier steps have already committed side effects, for example inventory reservation or payment authorization. +- There is no distributed rollback mechanism. +- The compensating action itself must be durable and observable. + +**Pattern:** + +```csharp +public override async Task RunAsync( + TaskOrchestrationContext context, OrderWorkflowRequest input) +{ + var reservationId = await context.CallActivityAsync( + nameof(ReserveInventoryActivity), input); + + try + { + return await context.CallActivityAsync( + nameof(ChargePaymentActivity), input); + } + catch (TaskFailedException) + { + await context.CallActivityAsync( + nameof(ReleaseInventoryActivity), reservationId); + + return new OrderWorkflowResult( + input.OrderId, + false, + "Payment failed; reservation released.", + context.CurrentUtcDateTime); + } +} +``` + +**Retry policies:** + +Configure retries on the activity call rather than burying retry logic inside the activity: + +```csharp +var retryOptions = new TaskOptions(new RetryPolicy( + maxNumberOfAttempts: 3, + firstRetryInterval: TimeSpan.FromSeconds(5), + backoffCoefficient: 2.0)); + +await context.CallActivityAsync( + nameof(SubmitOrderActivity), + input, + retryOptions); +``` + +Retries replay only the failing step. Previously completed steps are not re-executed. + +--- + +## Deterministic Execution + +Orchestrators are replayed whenever the worker resumes. Every line of orchestrator code may run multiple times during replay. Non-deterministic logic inside the orchestrator will corrupt the execution history. + +**Rules:** +- Do not read `DateTime.UtcNow` or `DateTimeOffset.UtcNow` directly inside an orchestrator. Use `context.CurrentUtcDateTime`. +- Do not generate random values or GUIDs inside an orchestrator. +- Do not perform I/O inside an orchestrator. +- Do not use `Task.Delay`; use `context.CreateTimer`. +- Do not read environment variables or configuration directly inside the orchestrator. + +**Correct:** + +```csharp +var processedAt = context.CurrentUtcDateTime; +``` + +The sample follows that rule by keeping business work inside activities and using `context.CurrentUtcDateTime` for durable timestamps. + +--- + +## Fan-Out / Fan-In + +Fan-out/fan-in describes a pattern where one orchestrator dispatches many parallel work items, waits for them all to complete, and then aggregates the results. + +**Apply this when:** +- One business action must trigger work against a dynamic list of targets. +- The caller needs an aggregated result before proceeding. +- Individual branches may fail and need independent retries. +- The work list is not fixed at design time. + +**Pattern:** + +```csharp +[DurableTask] +public sealed class NotifyRecipientsOrchestration + : TaskOrchestrator +{ + public override async Task RunAsync( + TaskOrchestrationContext context, NotifyRecipientsRequest input) + { + var tasks = input.RecipientIds.Select(id => + context.CallActivityAsync( + nameof(SendNotificationActivity), + new SendNotificationInput(id, input.MessageTemplate))); + + var outcomes = await Task.WhenAll(tasks); + + var failed = outcomes.Where(x => !x.Delivered).Select(x => x.RecipientId).ToList(); + return new NotifyRecipientsResult(outcomes.Length, outcomes.Count(x => x.Delivered), failed); + } +} +``` + +`Task.WhenAll` is safe here because the inner tasks are durable activity calls, not raw background work. + +--- + +## Batch Processing + +Batch processing involves iterating over a variable-size list of work items and executing durable work for each item. Orchestration adds per-item checkpointing, independent retries, and controllable parallelism. + +**Apply this when:** +- A scheduled job, import file, or upstream event delivers a list of items. +- The batch must survive worker restarts. +- Some items may fail without invalidating the whole batch. +- Throughput must be capped to protect downstream systems. + +**Pattern:** + +```csharp +[DurableTask] +public sealed class OrderBatchOrchestration + : TaskOrchestrator +{ + private const int MaxConcurrency = 10; + + public override async Task RunAsync( + TaskOrchestrationContext context, OrderBatchRequest input) + { + var results = new List(); + var queue = new Queue(input.OrderIds); + + while (queue.Count > 0) + { + var window = Enumerable.Range(0, Math.Min(MaxConcurrency, queue.Count)) + .Select(_ => queue.Dequeue()) + .ToList(); + + var tasks = window.Select(orderId => + context.CallActivityAsync( + nameof(ProcessSingleOrderActivity), + new ProcessSingleOrderInput(orderId))); + + results.AddRange(await Task.WhenAll(tasks)); + } + + return new OrderBatchResult( + results.Count, + results.Count(x => x.Accepted), + results.Count(x => !x.Accepted)); + } +} +``` + +For very large batches, use sub-orchestrations to shard the work and keep orchestration histories compact. + +--- + +## External Events and Human Approval + +An orchestration can pause and wait for an event raised by an external system or a human actor, then resume with the event payload. + +**Apply this when:** +- A step requires human approval. +- A third-party system responds asynchronously. +- A timeout should trigger a compensating or fallback path. + +**Pattern:** + +```csharp +[DurableTask] +public sealed class OrderApprovalOrchestration + : TaskOrchestrator +{ + public override async Task RunAsync( + TaskOrchestrationContext context, OrderWorkflowRequest input) + { + await context.CallActivityAsync(nameof(NotifyApproverActivity), input); + + using var timeoutCts = new CancellationTokenSource(); + var approvalTask = context.WaitForExternalEvent("ApprovalDecision", timeoutCts.Token); + var timeoutTask = context.CreateTimer(context.CurrentUtcDateTime.AddHours(48), timeoutCts.Token); + + var winner = await Task.WhenAny(approvalTask, timeoutTask); + timeoutCts.Cancel(); + + if (winner == timeoutTask || !approvalTask.Result) + { + await context.CallActivityAsync(nameof(CancelOrderActivity), input.OrderId); + return new OrderWorkflowResult( + input.OrderId, + false, + "Approval not received within deadline.", + context.CurrentUtcDateTime); + } + + return await context.CallActivityAsync( + nameof(SubmitOrderActivity), + new SubmitOrderActivityInput(input.OrderId, input.Amount, input.Currency, input.RequestedBy)); + } +} +``` + +An external caller raises the event with `DurableTaskClient.RaiseEventAsync`. + +--- + +## Auditability and Replay + +The Durable Task runtime stores execution history for every orchestration instance: inputs, outputs, activity timing, and status transitions. That gives a built-in audit trail and a basis for replay-aware diagnostics. + +**Querying instance status:** + +```csharp +var metadata = await _orderWorkflowClient.GetMetadataAsync(instanceId, getInputsAndOutputs: true); +``` + +`OrchestrationMetadata` provides runtime status, timestamps, inputs, outputs, and failure details. + +Use a caller-supplied or business-key-derived instance ID when idempotent scheduling matters. That ties the durable history directly to the business entity and prevents duplicate scheduling. + +--- + +## DTS Dashboard for Observability and Management + +The DTS dashboard provides a unified operational view of orchestrations, activities, and entities. It is useful for both day-to-day observability and active management actions. + +### What it provides + +- Instance-level visibility: runtime status, duration, input and output payloads, and failure details. +- Execution flow insight: orchestration timelines including fan-out and fan-in activity branches. +- Operational controls: pause, terminate, and restart operations for orchestration instances. +- Query and filtering: locate instances by status, age, name, or identifier patterns. +- Troubleshooting support: correlate orchestration history with application logs and traces. + +### Why it matters locally + +The same dashboard experience is available when using the local emulator. That means developers can test workflows and inspect execution behavior on their workstation without deploying to Azure. + +In this repository, the emulator dashboard is exposed on `http://localhost:8082`. + +Typical local workflow: + +1. Start the emulator and run the worker host. +2. Schedule or trigger orchestration instances. +3. Open `http://localhost:8082` and select the task hub. +4. Inspect timelines, activity outcomes, and instance metadata to verify behavior. +5. Use management actions as needed during testing and debugging. + +### Relationship to OpenTelemetry + +The dashboard and OpenTelemetry traces are complementary: + +- DTS dashboard is orchestration-centric and state-history centric. +- OpenTelemetry is distributed-call centric across services and infrastructure. + +Using both gives complete coverage: orchestration state transitions plus end-to-end dependency traces. + +--- + +## Project Layout + +The sample separates concerns across three projects: + +| Project | Responsibility | +|---|---| +| `Contoso.Order.Workflow.Workflow` | Orchestrations, activities, and workflow contracts. | +| `Contoso.Order.Workflow.Worker` | Worker host that registers orchestration code with DTS. | +| `Contoso.Order.Workflow.Client` | Client library used by APIs or other callers to start and query workflows. | + +This keeps workflow logic portable and independent of the specific host process. + +--- + +## Worker Host Setup + +The worker host wires the DTS connection, registers orchestrators and activities, and configures telemetry: + +```csharp +builder.Services.AddDurableTaskWorker() + .AddTasks(registry => + { + registry.AddOrchestrator(); + registry.AddActivity(); + registry.AddActivity(); + }) + .UseDurableTaskScheduler(connectionString); +``` + +The sample worker resolves endpoint and task hub from configuration and uses `Authentication=None` for the local emulator or `Authentication=DefaultAzure` for managed DTS. + +--- + +## Client Registration + +Any host that needs to schedule or query orchestrations can register the client: + +```csharp +builder.Services.AddContosoOrderWorkflowClient(builder.Configuration); +``` + +The client registration falls back from `ConnectionStrings:DurableTaskScheduler` to `DurableTaskScheduler:Endpoint` and `DurableTaskScheduler:TaskHub`. + +--- + +## Running Locally + +This repository includes the DTS emulator in [docker-compose.yml](../../docker-compose.yml). Start it with container infrastructure: + +```bash +docker compose up -d dts-emulator +``` + +Then run the worker host: + +```bash +dotnet run --project samples/src/Contoso.Order.Workflow.Worker +``` + +By default the sample worker connects to `http://localhost:8080`, task hub `order`, with `Authentication=None`. diff --git a/docs/presentations/coreex-agentic-scaffolding-slides.md b/docs/presentations/coreex-agentic-scaffolding-slides.md new file mode 100644 index 00000000..745335d6 --- /dev/null +++ b/docs/presentations/coreex-agentic-scaffolding-slides.md @@ -0,0 +1,437 @@ +# CoreEx Agentic Scaffolding +## Skills + Prompts + Context-Aware Generation + +Audience: Engineering leadership, platform team, solution architects, implementation teams. +Duration: 20-30 minutes. + +--- + +## Slide ES1 - Executive Summary +### Why This Matters +- CoreEx scaffolding gives deterministic, repeatable delivery foundations. +- Agentic prompting adds speed and flexibility for domain-specific requirements. +- Teams move faster with less architecture and boilerplate friction. + +Speaker notes: +This section is designed for executive stakeholders. It focuses on outcomes, risk reduction, and delivery acceleration. + +--- + +## Slide ES2 - Business Value +### Impact on Delivery +- Faster time from idea to a buildable, reviewable solution baseline. +- Consistent architecture and coding conventions across teams. +- Less rework by reducing early structural and wiring defects. +- Higher engineering focus on business features instead of plumbing. +- Move fast early: discovery, prototyping, and proving value with stakeholders. + +Speaker notes: +CoreEx reduces setup variability and enables teams to start from proven implementation patterns. It is especially effective when teams need rapid evidence of value in the first phase of a delivery. + +--- + +## Slide ES3 - Deterministic Foundation + Agentic Speed +### Balanced Delivery Model +- Deterministic: templates, instructions, and skills enforce known-good structure. +- Agentic: prompts tailor scaffolding to domains, features, and constraints. +- Result: predictable governance with rapid customization. +- Converge from experimentation to determinism for repeatability, compliance, and supportability. + +Speaker notes: +This is the key message: not deterministic versus agentic, but deterministic core plus agentic acceleration. Teams can begin with rapid exploration, then lock into repeatable patterns as delivery matures. + +--- + +## Slide ES4 - What Comes Out of the Box +### CoreEx Opinionated Acceleration +- Modern architecture patterns: microservices, eventing, and DDD-aligned domains. +- Enterprise capabilities ready to pull in as needed: validation, caching, outbox, observability. +- Integrated hosting and API conventions reduce framework integration overhead. + +Speaker notes: +CoreEx is not a generic framework toolkit that teams must assemble from scratch. It is an opinionated accelerator with practical defaults. + +--- + +## Slide ES5 - Platform Flexibility +### Enterprise Choice Without Chaos +- Supports multiple backend patterns and provider strategies. +- Messaging approach is not locked to one broker. +- Teams can select infrastructure choices while preserving a consistent architecture model. + +Speaker notes: +Executives get both standardization and optionality. + +--- + +## Slide ES6 - Decision and Rollout +### Recommended Path +- Adopt CoreEx agentic scaffolding as the default start for new services. +- Run a phased rollout with KPI tracking on speed, consistency, and rework reduction. +- Govern templates and instructions as platform assets. +- Ensure the delivered solution remains maintainable and operable over time, including after team transition. + +Speaker notes: +Treat scaffolding as a strategic product capability, not a one-off project artifact. This improves continuity when ownership transitions to long-term product teams. + +--- + +## Slide ES7 - Transition to Technical Deep Dive +### Section 2: Full Technical Detail +- Detailed architecture, capability inventory, platform options, and roadmap. +- Prompt, skill, and instruction workflow with enforcement guardrails. + +Speaker notes: +The next section provides implementation depth for architects and engineering leads. + +--- + +## Slide 1 - Title +### The New Agentic Way to Scaffold CoreEx Solutions +- Generate production-aligned CoreEx domain scaffolding faster. +- Encode architecture and coding standards as reusable prompt and skill assets. +- Keep generated output aligned to CoreEx patterns and sample implementations. + +Speaker notes: +This deck explains how we move from manual scaffolding to an agentic, policy-driven workflow that is repeatable, governed, and practical for teams starting new CoreEx implementations. + +--- + +## Slide 2 - Why Change +### Pain in the Previous Approach +- Manual setup was slow and inconsistent across teams. +- Architecture intent was scattered across tribal knowledge and sample code. +- Early project output often drifted from CoreEx conventions. +- Review cycles focused on fixing structure and wiring instead of business value. + +Speaker notes: +We are not replacing engineering judgment. We are automating boilerplate and codifying known-good patterns so teams can spend time on domain behavior. + +--- + +## Slide 3 - What Is New +### Agentic Scaffolding Stack +- Prompt workflows define user intent capture and step sequencing. +- Skills define capability packs with selection logic and generation strategy. +- Instruction files enforce file-scope coding conventions. +- Templates and scripts provide repeatable project materialization. +- Sample implementations provide concrete behavioral and architectural examples. +- CoreEx scaffolding introduces deterministic outputs, reducing non-deterministic variance from pure free-form agentic generation. + +Speaker notes: +The key idea is layered context. Prompts ask for what we need, skills decide what to generate, instructions govern how to write code, and templates provide deterministic structure. + +--- + +## Slide 4 - Core Building Blocks +### Assets in the Repository +- Bootstrap skill: coreex-project-bootstrap. +- Domain generation prompts: generate-domain, scaffold-domain-from-templates. +- Environment and startup prompts: init, setup. +- File-scoped instructions for controllers, services, repositories, validators, tests, and host setup. +- Starter docs for architecture, conventions, and domains. + +Speaker notes: +Everything needed is shipped as repository assets. Teams do not start from a blank prompt. + +--- + +## Slide 5 - Operating Model +### Human + Agent Collaboration +- Human provides bounded context and domain intent. +- Agent asks only minimal clarification questions for critical decisions. +- Agent scaffolds layered projects and hosts. +- Agent validates quality gates and resolves diagnostics. +- Human reviews domain correctness and business rules. +- Deterministic CoreEx scaffolding provides predictable baselines; agentic prompting provides fast customization on top. + +Speaker notes: +This is controlled autonomy. The agent handles deterministic work while still adapting to requested features and domains. Humans remain accountable for correctness, semantics, and trade-off decisions. + +--- + +## Slide 6 - End-to-End Flow +### From Prompt to Running Solution +1. Capture request: domains, hosts, persistence, behaviors. +2. Select skill and package set. +3. Materialize solution and project templates. +4. Generate layer artifacts: Contracts, Application, Infrastructure, API, Database. +5. Apply conventions from scoped instruction files. +6. Run diagnostics and fix generation errors. +7. Build and verify starter functionality. + +Speaker notes: +The flow is intentionally explicit so we can audit and improve each stage. + +--- + +## Slide 7 - Context Hierarchy +### How the Agent Stays Aligned to CoreEx +- Copilot instructions define global CoreEx-first behavior. +- Scoped instruction files enforce local patterns by file type. +- Skill logic maps requested capabilities to concrete package choices. +- Prompts capture required inputs and enforce completion checklists. +- Sample projects and docs provide reference implementations. + +Speaker notes: +This hierarchy reduces ambiguity. If a rule exists in multiple places, the narrower scope wins for generation behavior. + +--- + +## Slide 8 - Generated Architecture +### Standard Layered Output +- Contracts project for DTOs and source-generation annotations. +- Application project for services, validation, exceptions, and orchestration. +- Infrastructure project for repository and adapter implementations. +- API host with CoreEx middleware and OpenAPI conventions. +- Database project with migrations, outbox tables, and procedures. +- Optional subscribe and outbox relay hosts when integration requires them. +- Domain-first modeling aligned to DDD (aggregates, value objects, bounded contexts). +- Modern architecture support out of the box: eventing, microservice host separation, and integration patterns. + +Speaker notes: +The generated shape matches CoreEx reference architecture, keeps dependencies layered and predictable, and gives teams a ready-to-use microservices/event-driven baseline. + +--- + +## Slide 8A - Products and Shopping Sample Topology +### Architecture Shown in the Diagram +- Two bounded domains, each with its own API and Application layer. +- Each domain application encapsulates contracts and infrastructure concerns behind its service boundary. +- Unit-of-Work sits between application logic and persistence. +- Each domain has isolated Data and Outbox stores. +- Outbox Relay and Message Subscriber are deployed per domain. +- Domains communicate asynchronously through a shared queue or stream backbone. +- Cache is shown as a shared cross-domain optimization layer. + +Speaker notes: +This diagram demonstrates the CoreEx pattern in practice: independent domain services with local transaction boundaries and outbox-driven integration. Product and Shopping remain decoupled operationally, while events over queue or stream coordinate cross-domain workflows. + +--- + +## Slide 9 - Determinism vs Pure Agentic Variance +### Why CoreEx Scaffolding Matters +- Pure agent-only generation can vary between runs, prompts, and model behavior. +- CoreEx templates, instructions, and skills constrain generation into known-good patterns. +- Teams get deterministic project structure, wiring, and conventions from day one. +- Agentic prompting still adds flexibility for domain-specific features and behavior. + +Speaker notes: +This is not a choice between deterministic and agentic. CoreEx combines both: deterministic scaffolding for foundation, agentic adaptation for domain acceleration. + +--- + +## Slide 10 - Guardrails and Quality Gates +### Built-In Enforcement +- Constructor dependency null guards. +- ConfigureAwait(false) in async service and repository flows. +- Mutation operations wrapped in Unit of Work. +- Event emission inside mutation-aware blocks. +- CoreEx WebApi helper usage and response conventions. +- Validation before persistence using CoreEx validators. +- Diagnostics check before completion. + +Speaker notes: +Guardrails shift many common review comments from late discovery to immediate generation-time enforcement. + +--- + +## Slide 11 - Prompt Sequence for Teams +### Suggested Delivery Workflow +1. Run init prompt to verify machine prerequisites. +2. Run setup prompt to start dependencies and local runtime. +3. Run solution bootstrap skill for initial project layout. +4. Run domain generation prompt for new bounded contexts. +5. Run template-scaffold prompt for fast repeatable domain cloning. +6. Execute tests and E2E checks. + +Speaker notes: +Teams can adopt this as a standard onboarding and delivery playbook. + +--- + +## Slide 12 - Example Ask +### Example Prompt to Kick Off Scaffolding +Create a new CoreEx solution. +I need a Web API and Worker service. +Domains: Product and Shopping. +Include validation and behaviors. +Use SQL Server persistence. +Use Kafka as message broker. +Scaffold full repository structure with tests. + +Speaker notes: +This level of intent is enough for the agent to scaffold an entire repo, including domains, hosts, and tests, while honoring architecture constraints. + +--- + +## Slide 13 - Benefits Realized +### Expected Outcomes +- Faster time from idea to compilable baseline. +- Higher consistency with CoreEx coding and architecture conventions. +- Reduced architecture drift during early implementation. +- Lower cognitive load for new teams onboarding to CoreEx. +- Better review quality by focusing reviews on business semantics. +- Opinionated patterns out of the box reduce friction versus composing a generic framework from scratch. +- Teams pull in only needed capabilities (validation, events, caching, messaging) instead of hand-integrating foundations. +- Smoother transition from initial delivery teams to long-lived ownership with maintainable, standards-aligned code. + +Speaker notes: +CoreEx accelerates teams by providing a pre-wired, opinionated path that removes setup churn and allows focus on business capabilities. It also reduces operational risk by leaving behind predictable, supportable implementation assets. + +--- + +## Slide 14 - Platform Flexibility +### Not Locked to One Backend or Broker +- Data persistence can target multiple backend types depending on provider and project needs. +- Messaging integration is pluggable; Azure Service Bus is supported, but not mandatory. +- Agentic prompts can specify preferred database and broker choices per solution. +- Deterministic scaffolding still applies even when platform selections vary. + +Speaker notes: +CoreEx gives an opinionated architecture, not a hard platform lock. Teams can keep consistency while selecting infrastructure that fits enterprise constraints. + +--- + +## Slide 15 - CoreEx Capability Inventory (General) +### Included Components and Features +- Error-based exceptions: NotFound, Validation, Concurrency, and related exception types. +- Dynamic dependency injection patterns. +- Entities: Identifier and CompositeKey support. +- ETag support for optimistic concurrency. +- Change log support for created and updated metadata. +- Deep-compare capabilities for entity state comparison. +- Roslyn source-generation support. +- Instrumentation and health checks. +- Hybrid cache (L1 and L2) with FusionCache, backplane support, and Redis integration. +- Hosted services for timer-driven and synchronized workloads. +- Reference data orchestration, including caching support. +- System.Text.Json support for filtering and merge-patch workflows. +- Validation pipeline as an alternative to FluentValidation. +- Mapping helpers with explicit mapping patterns (no AutoMapper). +- Globalization and localization primitives. +- Result-based railway-oriented programming composition. + +Speaker notes: +This slide is the CoreEx baseline inventory. It communicates the practical accelerator: teams pull in tested primitives instead of recreating them per project. + +--- + +## Slide 16 - ASP.NET Core and Data Capabilities +### Included Web, Data, and Database Features +- Web API styles: Http-style minimal APIs and MVC controller APIs. +- application/merge-patch+json support. +- Response JSON filtering support. +- Error handling middleware aligned with ProblemDetails. +- IF-MATCH ETag semantics for GET and PUT or PATCH workflows. +- Idempotency-Key support for POST. +- Health check endpoints. +- OpenAPI generation via NSwag. +- CQRS read and write separation support. +- Unit-of-Work with integrated Outbox. +- Paging support using skip/take with total count. +- Dynamic OData-like query support for filtering and ordering. +- Multi-tenancy behavior support. +- Type discriminator support where required. +- Database support: SQL Server and PostgreSQL (provider dependent). +- ADO.NET command, record, and parameter extensions. +- Entity Framework integration support. + +Speaker notes: +This is where friction drops versus generic frameworks: CoreEx packages wire these conventions directly so teams avoid repeated plumbing decisions. + +--- + +## Slide 17 - Messaging and Domain-Driven Capabilities +### Included Eventing and DDD Features +- EventData as an agnostic message representation. +- CloudEvent conversion and interoperability support. +- Publish and subscribe patterns with per-message subscription behavior from stream. +- Azure Service Bus integration patterns. +- Outbox relay support with partition-aware patterns. +- Domain-driven modeling support for aggregates and entities. +- ValueObject modeling using C# record class patterns. +- Integration-events only guidance. +- Explicitly no domain-events and no MediatR-based in-process orchestration by default. + +Speaker notes: +CoreEx aligns eventing and domain modeling so distributed services remain explicit, testable, and consistent across teams. + +--- + +## Slide 18 - Current Upgrade Status and Roadmap +### What Is Next +To be upgraded: +- Azure Functions. +- Cosmos (CRUD and Query). +- Dataverse. +- OData. +- Solace messaging integration. + +Roadmap: +- MongoDB. +- DocumentDB (new). +- Kafka. + +Aspire enabled (done): +- Leverages component runtime libraries. +- Sample uses console for logging, tracing, and metrics visualization. + +Speaker notes: +This makes current maturity and future direction explicit for stakeholders. It also reinforces that platform portability is planned and active, not theoretical. + +--- + +## Slide 19 - Risks and Mitigations +### What to Watch +- Risk: over-trusting generated output. + Mitigation: enforce mandatory human review and test gates. +- Risk: stale templates or instruction drift. + Mitigation: version and periodically validate scaffolding assets against samples. +- Risk: ambiguous prompts produce wrong shape. + Mitigation: require minimal clarifying questions for orchestration, CQRS, and integration topology. + +Speaker notes: +Agentic does not remove governance. It improves it when paired with clear controls. + +--- + +## Slide 20 - Adoption Plan +### 30-60-90 Day Rollout +- 30 days: pilot with one domain and baseline metrics. +- 60 days: codify team prompt playbooks and update templates from findings. +- 90 days: make agentic scaffolding the default start path for new CoreEx implementations. + +Suggested KPIs: +- Time to first successful build. +- Number of post-generation architecture corrections. +- Defect rate in generated boilerplate. +- Onboarding time for new engineers. + +Speaker notes: +Measure both speed and quality. The objective is not just faster generation, but better initial correctness. + +--- + +## Slide 21 - Evidence Pointers +### Source Anchors Used for This Deck +- .github prompts for init, setup, and domain generation. +- coreex-starter bootstrap skill and script. +- coreex-starter architecture, conventions, and domain guidance docs. +- scoped instruction files for controllers, application services, repositories, tests, validators, and host setup. + +Speaker notes: +These slides are grounded in repository assets and can be updated as those assets evolve. + +--- + +## Appendix - Presenter Q and A +### Likely Questions +- How much can we customize generated domains? +- How do we prevent template sprawl? +- Can this flow support non-SQL providers? +- How do we handle major CoreEx version upgrades? +- What is the approval model for changing instructions and skills? + +Speaker notes: +Use these as discussion prompts for architecture review and platform governance forums. diff --git a/docs/presentations/coreex-agentic-scaffolding-slides.pptx b/docs/presentations/coreex-agentic-scaffolding-slides.pptx new file mode 100644 index 00000000..c0b462ba Binary files /dev/null and b/docs/presentations/coreex-agentic-scaffolding-slides.pptx differ diff --git a/docs/presentations/export-markdown-to-pptx.ps1 b/docs/presentations/export-markdown-to-pptx.ps1 new file mode 100644 index 00000000..4a8d1199 --- /dev/null +++ b/docs/presentations/export-markdown-to-pptx.ps1 @@ -0,0 +1,261 @@ +[CmdletBinding()] +param( + [string]$MarkdownPath = "docs/presentations/coreex-agentic-scaffolding-slides.md", + [string]$OutputPath = "docs/presentations/coreex-agentic-scaffolding-slides.pptx", + [string]$TemplatePath +) + +Set-StrictMode -Version Latest +$ErrorActionPreference = 'Stop' + +function Get-SectionName { + param( + [string]$SlideId, + [string]$Title + ) + + if (-not [string]::IsNullOrWhiteSpace($SlideId) -and $SlideId.Trim().ToUpper().StartsWith('ES')) { + return 'Executive Summary' + } + + if (-not [string]::IsNullOrWhiteSpace($Title) -and $Title -match '^Appendix') { + return 'Appendix' + } + + return 'Technical Deep Dive' +} + +function New-SlideModel { + param([string[]]$Lines) + + $slideId = $null + $title = $null + $subtitle = $null + $bullets = New-Object System.Collections.Generic.List[string] + $notes = New-Object System.Collections.Generic.List[string] + + $inNotes = $false + + foreach ($raw in $Lines) { + $line = $raw.TrimEnd() + $trim = $line.Trim() + + if ([string]::IsNullOrWhiteSpace($trim)) { + if ($inNotes) { $notes.Add("") } + continue + } + + if ($trim -eq 'Speaker notes:') { + $inNotes = $true + continue + } + + if ($inNotes) { + $notes.Add($trim) + continue + } + + if (-not $title -and $trim -match '^##\s+Slide\s+([^-]+)-\s+(.+)$') { + $slideId = $Matches[1].Trim() + $title = $Matches[2].Trim() + continue + } + + if (-not $subtitle -and $trim -match '^###\s+(.+)$') { + $subtitle = $Matches[1].Trim() + continue + } + + if ($trim -match '^[-]\s+(.+)$') { + $bullets.Add($Matches[1].Trim()) + continue + } + + if ($trim -match '^\d+[.]\s+(.+)$') { + $bullets.Add($trim) + continue + } + + if ($trim -match '^(Audience|Duration):') { + $bullets.Add($trim) + continue + } + + if ($trim -notmatch '^#') { + $bullets.Add($trim) + } + } + + if (-not $title) { return $null } + + $section = Get-SectionName -SlideId $slideId -Title $title + + [PSCustomObject]@{ + SlideId = $slideId + Title = $title + Subtitle = $subtitle + Bullets = $bullets + Notes = $notes + Section = $section + } +} + +$repoRoot = Get-Location +$mdFullPath = if ([System.IO.Path]::IsPathRooted($MarkdownPath)) { $MarkdownPath } else { Join-Path $repoRoot $MarkdownPath } +$pptxFullPath = if ([System.IO.Path]::IsPathRooted($OutputPath)) { $OutputPath } else { Join-Path $repoRoot $OutputPath } +$templateFullPath = $null +$downloadedTemplatePath = $null + +if (-not [string]::IsNullOrWhiteSpace($TemplatePath)) { + if ($TemplatePath -match '^https?://') { + $downloadedTemplatePath = Join-Path ([System.IO.Path]::GetTempPath()) ("coreex-template-{0}.potx" -f ([System.Guid]::NewGuid().ToString('N'))) + try { + Invoke-WebRequest -Uri $TemplatePath -OutFile $downloadedTemplatePath | Out-Null + $templateFullPath = $downloadedTemplatePath + } + catch { + throw "Unable to download template from URL. Download it locally first and pass a file path. URL: $TemplatePath" + } + } + else { + $templateFullPath = if ([System.IO.Path]::IsPathRooted($TemplatePath)) { $TemplatePath } else { Join-Path $repoRoot $TemplatePath } + } +} + +if (-not (Test-Path -LiteralPath $mdFullPath)) { + throw "Markdown file not found: $mdFullPath" +} + +if ($templateFullPath -and -not (Test-Path -LiteralPath $templateFullPath)) { + throw "Template file not found: $templateFullPath" +} + +$allLines = Get-Content -LiteralPath $mdFullPath +$blocks = New-Object System.Collections.Generic.List[object] +$current = New-Object System.Collections.Generic.List[string] + +foreach ($line in $allLines) { + if ($line.Trim() -eq '---') { + if ($current.Count -gt 0) { + $blocks.Add(@($current)) + $current = New-Object System.Collections.Generic.List[string] + } + continue + } + + $current.Add($line) +} + +if ($current.Count -gt 0) { + $blocks.Add(@($current)) +} + +$slideModels = New-Object System.Collections.Generic.List[object] +foreach ($block in $blocks) { + $model = New-SlideModel -Lines $block + if ($null -ne $model) { $slideModels.Add($model) } +} + +if ($slideModels.Count -eq 0) { + throw "No slides were parsed from markdown." +} + +$parent = Split-Path -Parent $pptxFullPath +if (-not (Test-Path -LiteralPath $parent)) { + New-Item -ItemType Directory -Path $parent | Out-Null +} + +$ppSaveAsOpenXMLPresentation = 24 +$ppLayoutText = 2 +$readableTextColorRgb = 0 + +$powerPoint = $null +$presentation = $null + +try { + $powerPoint = New-Object -ComObject PowerPoint.Application + $powerPoint.Visible = $true + $presentation = $powerPoint.Presentations.Add() + + if ($templateFullPath) { + $presentation.ApplyTemplate($templateFullPath) + } + + while ($presentation.Slides.Count -gt 0) { + $presentation.Slides.Item(1).Delete() + } + + foreach ($m in $slideModels) { + $slide = $presentation.Slides.Add($presentation.Slides.Count + 1, $ppLayoutText) + + $titleText = if ([string]::IsNullOrWhiteSpace($m.Subtitle)) { $m.Title } else { "{0}`n{1}" -f $m.Title, $m.Subtitle } + $titleRange = $slide.Shapes.Title.TextFrame.TextRange + $titleRange.Text = $titleText + $titleRange.Font.Color.RGB = $readableTextColorRgb + + $content = if ($m.Bullets.Count -gt 0) { + ($m.Bullets | ForEach-Object { [string]::Format([char]0x2022 + ' {0}', $_) }) -join "`r`n" + } else { + "" + } + + $body = $slide.Shapes.Placeholders.Item(2).TextFrame.TextRange + $body.Text = $content + $body.Font.Color.RGB = $readableTextColorRgb + + if ($m.Notes.Count -gt 0) { + $notesText = ($m.Notes -join "`r`n") + $slide.NotesPage.Shapes.Placeholders.Item(2).TextFrame.TextRange.Text = $notesText + } + } + + # Create native PowerPoint sections from parsed markdown section metadata. + if ($slideModels.Count -gt 0) { + $sectionProperties = $presentation.SectionProperties + $firstSectionName = [string]$slideModels[0].Section + + try { + if ($sectionProperties.Count -ge 1) { + $sectionProperties.Rename(1, $firstSectionName) | Out-Null + } + else { + $sectionProperties.AddBeforeSlide(1, $firstSectionName) | Out-Null + } + } + catch { + # Ignore section rename/create issues and continue exporting slides. + } + + $previousSectionName = $firstSectionName + for ($i = 2; $i -le $slideModels.Count; $i++) { + $currentSectionName = [string]$slideModels[$i - 1].Section + if ($currentSectionName -ne $previousSectionName) { + try { + $sectionProperties.AddBeforeSlide($i, $currentSectionName) | Out-Null + } + catch { + # Ignore section insertion issues and continue exporting slides. + } + } + + $previousSectionName = $currentSectionName + } + } + + $presentation.SaveAs($pptxFullPath, $ppSaveAsOpenXMLPresentation) + Write-Host "Created: $pptxFullPath" +} +finally { + if ($presentation) { $presentation.Close() } + if ($powerPoint) { $powerPoint.Quit() } + + if ($null -ne $presentation) { [System.Runtime.InteropServices.Marshal]::ReleaseComObject($presentation) | Out-Null } + if ($null -ne $powerPoint) { [System.Runtime.InteropServices.Marshal]::ReleaseComObject($powerPoint) | Out-Null } + + [GC]::Collect() + [GC]::WaitForPendingFinalizers() + + if ($downloadedTemplatePath -and (Test-Path -LiteralPath $downloadedTemplatePath)) { + Remove-Item -LiteralPath $downloadedTemplatePath -Force -ErrorAction SilentlyContinue + } +} diff --git a/gen/CoreEx.Gen/ContractGenerator.cs b/gen/CoreEx.Gen/ContractGenerator.cs new file mode 100644 index 00000000..cbe4e389 --- /dev/null +++ b/gen/CoreEx.Gen/ContractGenerator.cs @@ -0,0 +1,86 @@ +using CoreEx.Gen.Utility; +using Microsoft.CodeAnalysis; +using Microsoft.CodeAnalysis.CSharp.Syntax; +using Microsoft.CodeAnalysis.Text; +using System.Text; + +namespace CoreEx.Gen; + +/// +/// Provides the 'ContractAttribute' implementation. +/// +[Generator] +public class ContractGenerator : IIncrementalGenerator +{ + private const string _contractAttributeResourceName = "CoreEx.Gen.Templates.ContractAttribute.cs.hb"; + private const string _contractIgnoreAttributeResourceName = "CoreEx.Gen.Templates.ContractIgnoreAttribute.cs.hb"; + private const string _refDataAttributeResourceName = "CoreEx.Gen.Templates.ReferenceDataAttribute.cs.hb"; + private const string _refDataTAttributeResourceName = "CoreEx.Gen.Templates.ReferenceDataTAttribute.cs.hb"; + private const string _refDataCodeCollectionTAttributeResourceName = "CoreEx.Gen.Templates.ReferenceDataCodeCollectionTAttribute.cs.hb"; + private const string _stringAttributeResourceName = "CoreEx.Gen.Templates.StringAttribute.cs.hb"; + private const string _dateTimeAttributeResourceName = "CoreEx.Gen.Templates.DateTimeAttribute.cs.hb"; + private const string _cleanAttributeResourceName = "CoreEx.Gen.Templates.CleanAttribute.cs.hb"; + private const string _templateResourceName = "CoreEx.Gen.Templates.Contract.cs.hb"; + private readonly HandlebarsCodeGenerator _codeGenerator = HandlebarsCodeGenerator.Create(_templateResourceName); + + /// + /// Gets a that includes nullability. + /// + internal static SymbolDisplayFormat FullyQualifiedWithNullability = + new(globalNamespaceStyle: SymbolDisplayGlobalNamespaceStyle.Included, + typeQualificationStyle: SymbolDisplayTypeQualificationStyle.NameAndContainingTypesAndNamespaces, + genericsOptions: SymbolDisplayGenericsOptions.IncludeTypeParameters, + miscellaneousOptions: SymbolDisplayMiscellaneousOptions.UseSpecialTypes | SymbolDisplayMiscellaneousOptions.IncludeNullableReferenceTypeModifier); + + /// + public void Initialize(IncrementalGeneratorInitializationContext context) + { + // Register the requisite '*Attribute' classes. + context.RegisterPostInitializationOutput(static ctx => + { + ctx.AddSource("contractattribute.g.cs", SourceText.From(HandlebarsCodeGenerator.Create(_contractAttributeResourceName).Generate(new CodeGenContext()), Encoding.UTF8)); + ctx.AddSource("contractignoreattribute.g.cs", SourceText.From(HandlebarsCodeGenerator.Create(_contractIgnoreAttributeResourceName).Generate(new CodeGenContext()), Encoding.UTF8)); + ctx.AddSource("referencedataattribute.g.cs", SourceText.From(HandlebarsCodeGenerator.Create(_refDataAttributeResourceName).Generate(new CodeGenContext()), Encoding.UTF8)); + ctx.AddSource("referencedatatattribute.g.cs", SourceText.From(HandlebarsCodeGenerator.Create(_refDataTAttributeResourceName).Generate(new CodeGenContext()), Encoding.UTF8)); + ctx.AddSource("referencedatacodecollectiontattribute.g.cs", SourceText.From(HandlebarsCodeGenerator.Create(_refDataCodeCollectionTAttributeResourceName).Generate(new CodeGenContext()), Encoding.UTF8)); + ctx.AddSource("stringattribute.g.cs", SourceText.From(HandlebarsCodeGenerator.Create(_stringAttributeResourceName).Generate(new CodeGenContext()), Encoding.UTF8)); + ctx.AddSource("datetimeattribute.g.cs", SourceText.From(HandlebarsCodeGenerator.Create(_dateTimeAttributeResourceName).Generate(new CodeGenContext()), Encoding.UTF8)); + ctx.AddSource("cleanattribute.g.cs", SourceText.From(HandlebarsCodeGenerator.Create(_cleanAttributeResourceName).Generate(new CodeGenContext()), Encoding.UTF8)); + }); + + // Register the source generator for the above 'ContractAttribute' class usage. + var provider = context.SyntaxProvider.ForAttributeWithMetadataName( + fullyQualifiedMetadataName: "CoreEx.Entities.ContractAttribute", + predicate: static (syntaxNode, cancellationToken) => syntaxNode is ClassDeclarationSyntax || syntaxNode is RecordDeclarationSyntax, + transform: static (context, cancellationToken) => ContractModel.Create(context, cancellationToken) + ); + + // Register the source output to generate the resulting contract partial class contents. + context.RegisterSourceOutput(provider, (context, model) => + { + try + { + if (!model.ReportDiagnostics(context)) + return; // Do not generate as there are errors. + + if (model.IContract == GenApproach.Undetermined) + return; // No need to generate if IContract is already declared. + + var sourceText = SourceText.From(_codeGenerator.Generate(model), Encoding.UTF8); + context.AddSource($"{model.ClassName}.contract.g.cs", sourceText); + } + catch (System.Exception ex) + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx000", + title: "Contract generation error.", + messageFormat: "An error occurred while generating an 'ContractAttribute': {0}", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + context.ReportDiagnostic(Diagnostic.Create(descriptor, null, ex.Message)); + } + }); + } +} \ No newline at end of file diff --git a/gen/CoreEx.Gen/ContractModel.cs b/gen/CoreEx.Gen/ContractModel.cs new file mode 100644 index 00000000..5cf67569 --- /dev/null +++ b/gen/CoreEx.Gen/ContractModel.cs @@ -0,0 +1,657 @@ +using CoreEx.Gen.Utility; +using HandlebarsDotNet; +using Microsoft.CodeAnalysis; +using Microsoft.CodeAnalysis.CSharp.Syntax; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Threading; + +namespace CoreEx.Gen; + +/// +/// Represents the ContractAttribute class model configuration used to drive the underlying partial class source generation. +/// +internal class ContractModel : CodeGenContext +{ + /// + /// Gets the namespace of the contract. + /// + public string? Namespace { get; private set; } + + /// + /// Gets the class name of the contract. + /// + public string? ClassName { get; private set; } + + /// + /// Gets the containing type hierarchy of the contract. + /// + public List? ContainingTypeHierarchy { get; private set; } + + /// + /// Indicates whether the contract is a record; otherwise, indicates a class. + /// + public bool IsRecord { get; private set; } + + /// + /// Indicates whether the contract has a base type. + /// + public bool HasBaseType => BaseType is not null && !BaseType.Equals("object", System.StringComparison.OrdinalIgnoreCase); + + /// + /// Gets the base type of the contract. + /// + public string? BaseType { get; private set; } + + /// + /// Gets the for the contract. + /// + public GenApproach IContract { get; private set; } + + /// + /// Gets the list of properties for the contract. + /// + public List Properties { get; } = []; + + /// + /// Gets the list of properties that are to be code-generated as declared as partial. + /// + public IEnumerable PartialProperties => Properties.Where(p => p.IsPartial); + + /// + /// Create the from the . + /// + /// The . + /// The . + /// The . + public static ContractModel Create(GeneratorAttributeSyntaxContext context, CancellationToken cancellationToken) + { + try + { + return context.TargetNode is ClassDeclarationSyntax ? CreateForClass(context, cancellationToken) : CreateForRecord(context, cancellationToken); + } + catch (System.Exception ex) + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx000", + title: "Contract generation error.", + messageFormat: "An error occurred while generating an 'ContractAttribute': {0}", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + return new ContractModel { IContract = GenApproach.Undetermined, Diagnostics = { Diagnostic.Create(descriptor, null, ex.Message) } }; + } + } + + /// + /// Create the from the . + /// + private static ContractModel CreateForClass(GeneratorAttributeSyntaxContext context, CancellationToken cancellationToken) + { + var syntax = (ClassDeclarationSyntax)context.TargetNode; + var symbol = (INamedTypeSymbol)context.SemanticModel.GetDeclaredSymbol(syntax)!; + + var model = new ContractModel + { + Namespace = context.TargetSymbol.ContainingType is null + ? context.TargetSymbol.ContainingNamespace?.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat.WithGlobalNamespaceStyle(SymbolDisplayGlobalNamespaceStyle.Omitted)) + : context.TargetSymbol.ContainingType.ContainingNamespace?.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat.WithGlobalNamespaceStyle(SymbolDisplayGlobalNamespaceStyle.Omitted)), + ContainingTypeHierarchy = context.TargetSymbol.ContainingType is null ? [] : GetContainingTypeHierarchy(context.TargetSymbol.ContainingType), + ClassName = symbol.Name + }; + + return CreateForStandard(context, symbol, model, cancellationToken); + } + + /// + /// Create the from the . + /// + private static ContractModel CreateForRecord(GeneratorAttributeSyntaxContext context, CancellationToken cancellationToken) + { + var syntax = (RecordDeclarationSyntax)context.TargetNode; + var symbol = (INamedTypeSymbol)context.SemanticModel.GetDeclaredSymbol(syntax)!; + + var model = new ContractModel + { + Namespace = context.TargetSymbol.ContainingType is null + ? context.TargetSymbol.ContainingNamespace?.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat.WithGlobalNamespaceStyle(SymbolDisplayGlobalNamespaceStyle.Omitted)) + : context.TargetSymbol.ContainingType.ContainingNamespace?.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat.WithGlobalNamespaceStyle(SymbolDisplayGlobalNamespaceStyle.Omitted)), + ContainingTypeHierarchy = context.TargetSymbol.ContainingType is null ? [] : GetContainingTypeHierarchy(context.TargetSymbol.ContainingType), + ClassName = symbol.Name, + IsRecord = true + }; + + return CreateForStandard(context, symbol, model, cancellationToken); + } + + /// + /// Continues the create for the standardized behaviour. + /// + private static ContractModel CreateForStandard(GeneratorAttributeSyntaxContext context, INamedTypeSymbol symbol, ContractModel model, CancellationToken cancellationToken) + { + // Determine whether already implements IContract where T is itself. + var iContractSymbol = context.SemanticModel.Compilation.GetTypeByMetadataName("CoreEx.Entities.IContract`1"); + if (AlreadyImplementsIContractGenericSelf(symbol, iContractSymbol)) + return model; + + // Determine whether IContract is the base/interface implementation. + model.IContract = IsBaseInterfaceImplementation(symbol, iContractSymbol!) ? GenApproach.Declare : GenApproach.Override; + model.BaseType = symbol.BaseType?.ToDisplayString(); + + // Check the cancellation token. + cancellationToken.ThrowIfCancellationRequested(); + + // Get the symbol for IReferenceData. + var iRefDataSymbol = context.SemanticModel.Compilation.GetTypeByMetadataName("CoreEx.RefData.Abstractions.IReferenceData"); + if (symbol.AllInterfaces.FirstOrDefault(x => SymbolEqualityComparer.Default.Equals(x.OriginalDefinition, iRefDataSymbol)) is not null) + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx010", + title: "ContractAttribute is not supported.", + messageFormat: "The ContractAttribute is not supported where the class/record implements CoreEx.RefData.Abstractions.IReferenceData; alternatively, consider using the ReferenceDataAttribute.", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + model.Diagnostics.Add(Diagnostic.Create(descriptor, symbol.Locations.FirstOrDefault(), symbol.Name)); + } + + // Get the list of properties which as a minimum do a get. + foreach (var p in symbol.GetMembers().OfType().Where(p => p.GetMethod is not null)) + { + if (p.GetAttributes().FirstOrDefault(a => a.AttributeClass?.ToDisplayString() == "CoreEx.Entities.ContractIgnoreAttribute") is not null) + continue; // Ignore properties with ContractIgnoreAttribute. + + var emp = new PropertyModel + { + Context = model, + Name = p.Name, + IsReadonly = p.SetMethod is null, + IsInitOnly = p.SetMethod?.IsInitOnly ?? false, + IsRequired = p.IsRequired, + Type = FormatTypeWithNullability(p.Type.ToDisplayString(ContractGenerator.FullyQualifiedWithNullability), p.NullableAnnotation), + JsonName = p.GetAttributes().FirstOrDefault(a => a.AttributeClass?.ToDisplayString() == "System.Text.Json.Serialization.JsonPropertyNameAttribute")?.ConstructorArguments.FirstOrDefault().Value as string, + FallbackText = GetDisplayAttributeName(p), + Default = p.DeclaringSyntaxReferences.Select(ds => ds.GetSyntax()).OfType().Select(ps => GetDefaultConstant(ps, context.SemanticModel)).FirstOrDefault(), + Format = GetDisplayFormatAttributeDataFormatString(p) + }; + + if (model.IsRecord && emp.Name == "EqualityContract") + continue; + + emp.KeyAndOrText = emp.HasFallbackText ? emp.Name : null; + + ManageLocalizationAttribute(p, emp); + ManageStringAttributeProperty(p, emp); + ManageDateTimeAttributeProperty(p, emp); + ManageCleanAttributeProperty(p, emp); + ManageReferenceDataAttributeProperty(p, emp); + ManageReferenceDataCodeCollectionAttributeProperty(p, emp); + + model.Properties.Add(emp); + } + + return model; + } + + /// + /// Formats the type with nullability. + /// + /// The type name. + /// The . + /// The type name with added as required. + internal static string FormatTypeWithNullability(string type, NullableAnnotation nullableAnnotation) => nullableAnnotation == NullableAnnotation.Annotated && !type.EndsWith("?") ? type + "?" : type; + + /// + /// Gets the name from the DisplayAttribute where defined. + /// + internal static string? GetDisplayAttributeName(IPropertySymbol propertySymbol) + { + var att = propertySymbol.GetAttributes().FirstOrDefault(a => a.AttributeClass?.OriginalDefinition.ToDisplayString() == "System.ComponentModel.DataAnnotations.DisplayAttribute"); + if (att is null) + return null; + + var na = att.NamedArguments.FirstOrDefault(na => na.Key == "Name"); + if (na.Key is not null && na.Value.Value is string name) + return string.IsNullOrEmpty(name) ? null : name; + + return null; + } + + /// + /// Gets the format from the DisplayFormatAttribute where defined. + /// + internal static string? GetDisplayFormatAttributeDataFormatString(IPropertySymbol propertySymbol) + { + var att = propertySymbol.GetAttributes().FirstOrDefault(a => a.AttributeClass?.OriginalDefinition.ToDisplayString() == "System.ComponentModel.DataAnnotations.DisplayFormatAttribute"); + if (att is null) + return null; + + var na = att.NamedArguments.FirstOrDefault(na => na.Key == "DataFormatString"); + if (na.Key is not null && na.Value.Value is string format) + return string.IsNullOrEmpty(format) ? null : format; + + return null; + } + + /// + /// Gets the default constant value from the property syntax. + /// + internal static string? GetDefaultConstant(PropertyDeclarationSyntax propertySyntax, SemanticModel semanticModel) + { + // Check if there's an initializer + var initializer = propertySyntax.Initializer?.Value; + if (initializer is null) + return null; + + // Try to get the constant value + var constantValue = semanticModel.GetConstantValue(initializer); + if (constantValue.HasValue) + return initializer.ToString(); + + // Where not constant then we can not reliably determine the value, so default for you! + return initializer.ToString(); + } + + /// + /// Determine type declaration hierarchy. + /// + /// The . + /// The resulting hierarchy list. + internal static List GetContainingTypeHierarchy(INamedTypeSymbol? type) + { + static void AddContainingTypeHierarchy(INamedTypeSymbol? type, List list) + { + if (type is null) + return; + + list.Insert(0, type.ToDisplayString(SymbolDisplayFormat.MinimallyQualifiedFormat)); + AddContainingTypeHierarchy(type.ContainingType, list); + } + + var list = new List(); + AddContainingTypeHierarchy(type, list); + return list; + } + + /// + /// Determines whether the already implements somewhere in its parent hierarchy. + /// + private static bool AlreadyImplementsIContractGenericSelf(INamedTypeSymbol symbol, INamedTypeSymbol? interfaceSymbol) + { + foreach (var iface in symbol.AllInterfaces) + { + if (SymbolEqualityComparer.Default.Equals(iface.OriginalDefinition, interfaceSymbol) && iface.TypeArguments.Length == 1 && SymbolEqualityComparer.Default.Equals(iface.TypeArguments[0], symbol)) + return true; + } + + return false; + } + + /// + /// Check whether this is considered the base interface implementation. + /// + private static bool IsBaseInterfaceImplementation(INamedTypeSymbol symbol, INamedTypeSymbol interfaceSymbol) + { + if (symbol.BaseType is null || symbol.SpecialType == SpecialType.System_Object) + return true; + + if (!symbol.BaseType.Locations.Any(loc => loc.IsInSource)) + return !symbol.BaseType.AllInterfaces.Any(x => SymbolEqualityComparer.Default.Equals(x.OriginalDefinition, interfaceSymbol)); + + if (symbol.BaseType.GetAttributes().Any(a => a.AttributeClass?.ToDisplayString() == "CoreEx.Entities.ContractAttribute")) + return false; + + return IsBaseInterfaceImplementation(symbol.BaseType, interfaceSymbol); + } + + /// + /// Determines and manages the LocalizationAttribute property configuration. + /// + internal static void ManageLocalizationAttribute(IPropertySymbol propertySymbol, PropertyModel model) + { + var att = propertySymbol.GetAttributes().FirstOrDefault(a => a.AttributeClass?.OriginalDefinition.ToDisplayString() == "CoreEx.Localization.LocalizationAttribute"); + if (att is null) + return; + + var kt = att.ConstructorArguments.Length < 1 ? null : att.ConstructorArguments[0].Value as string; + var ft = att.ConstructorArguments.Length < 2 ? null : att.ConstructorArguments[1].Value as string; + + if (!string.IsNullOrEmpty(kt)) + { + model.KeyAndOrText = kt; + model.FallbackText = ft; + } + } + + /// + /// Determines and manages the property configuration. + /// + /// The . + /// The . + internal static void ManageStringAttributeProperty(IPropertySymbol propertySymbol, PropertyModel model) + { + var att = propertySymbol.GetAttributes().FirstOrDefault(a => a.AttributeClass?.OriginalDefinition.ToDisplayString() == "CoreEx.Entities.StringAttribute"); + if (att is null) + return; + + if (propertySymbol.Type.SpecialType != SpecialType.System_String) + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx006", + title: "Property type invalid.", + messageFormat: "Property '{0}' must be declared with a type of 'string' to enable 'StringAttribute' capabilities.", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + model.Context!.Diagnostics.Add(Diagnostic.Create(descriptor, propertySymbol.Locations.FirstOrDefault(), propertySymbol.Name)); + return; + } + + if (!propertySymbol.IsPartialDefinition) + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx007", + title: "Property must be partial.", + messageFormat: "Property '{0}' must be declared as 'partial' to enable 'StringAttribute' capabilities.", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + model.Context!.Diagnostics.Add(Diagnostic.Create(descriptor, propertySymbol.Locations.FirstOrDefault(), propertySymbol.Name)); + return; + } + + if (model.IsReadonly) + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx008", + title: "Property must support get and set.", + messageFormat: "Property '{0}' must be declared with a 'get' and 'set' to enable 'StringAttribute' capabilities.", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + model.Context!.Diagnostics.Add(Diagnostic.Create(descriptor, propertySymbol.Locations.FirstOrDefault(), propertySymbol.Name)); + } + + model.IsPartial = true; + model.IsSelfCleanedString = true; + model.StringTrim = (att.ConstructorArguments.Length < 1 ? null : GetEnumFriendlyName(att.ConstructorArguments[0])) ?? "UseDefault"; + model.StringTransform = (att.ConstructorArguments.Length < 2 ? null : GetEnumFriendlyName(att.ConstructorArguments[1])) ?? "UseDefault"; + model.StringCase = (att.ConstructorArguments.Length < 3 ? null : GetEnumFriendlyName(att.ConstructorArguments[2])) ?? "UseDefault"; + } + + /// + /// Determines and manages the property configuration. + /// + /// The . + /// The . + internal static void ManageDateTimeAttributeProperty(IPropertySymbol propertySymbol, PropertyModel model) + { + var att = propertySymbol.GetAttributes().FirstOrDefault(a => a.AttributeClass?.OriginalDefinition.ToDisplayString() == "CoreEx.Entities.DateTimeAttribute"); + if (att is null) + return; + + var ds = propertySymbol.Type.ToDisplayString(); + if (ds != "System.DateTime" && ds != "System.DateTime?") + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx009", + title: "Property type invalid.", + messageFormat: "Property '{0}' must be declared with a type of 'DateTime' to enable 'DateTimeAttribute' capabilities.", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + model.Context!.Diagnostics.Add(Diagnostic.Create(descriptor, propertySymbol.Locations.FirstOrDefault(), propertySymbol.Name)); + return; + } + + if (!propertySymbol.IsPartialDefinition) + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx011", + title: "Property must be partial.", + messageFormat: "Property '{0}' must be declared as 'partial' to enable 'DateTimeAttribute' capabilities.", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + model.Context!.Diagnostics.Add(Diagnostic.Create(descriptor, propertySymbol.Locations.FirstOrDefault(), propertySymbol.Name)); + return; + } + + if (model.IsReadonly) + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx012", + title: "Property must support get and set.", + messageFormat: "Property '{0}' must be declared with a 'get' and 'set' to enable 'DateTimeAttribute' capabilities.", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + model.Context!.Diagnostics.Add(Diagnostic.Create(descriptor, propertySymbol.Locations.FirstOrDefault(), propertySymbol.Name)); + } + + model.IsPartial = true; + model.IsSelfCleanedDateTime = true; + model.DateTimeTransform = (att.ConstructorArguments.Length < 1 ? null : GetEnumFriendlyName(att.ConstructorArguments[0])) ?? "UseDefault"; + } + + /// + /// Determines and manages the clean property configuration. + /// + /// The . + /// The . + internal static void ManageCleanAttributeProperty(IPropertySymbol propertySymbol, PropertyModel model) + { + var att = propertySymbol.GetAttributes().FirstOrDefault(a => a.AttributeClass?.OriginalDefinition.ToDisplayString() == "CoreEx.Entities.CleanAttribute"); + if (att is null) + return; + + model.IsCleanOption = true; + model.CleanOption = GetEnumFriendlyName(att.ConstructorArguments[0]) ?? "UseDefault"; + } + + /// + /// Gets the friendly name from the . + /// + private static string? GetEnumFriendlyName(TypedConstant arg) + { + if (arg.Kind != TypedConstantKind.Enum || arg.Value is not int ev) + return null; + + var enumType = (INamedTypeSymbol)arg.Type!; + var member = enumType + .GetMembers() + .OfType() + .FirstOrDefault(f => f.HasConstantValue && (int)f.ConstantValue! == ev); + + return member?.Name; + } + + /// + /// Determines and manages the reference data property configuration. + /// + /// The . + /// The . + internal static void ManageReferenceDataAttributeProperty(IPropertySymbol propertySymbol, PropertyModel model) + { + var att = propertySymbol.GetAttributes().FirstOrDefault(a => a.AttributeClass?.OriginalDefinition.ToDisplayString() == "CoreEx.RefData.ReferenceDataAttribute"); + if (att is null || att.AttributeClass is null || !att.AttributeClass!.IsGenericType) + return; + + model.IsRefData = true; + model.RefDataType = FormatTypeWithNullability(att.AttributeClass.TypeArguments.FirstOrDefault()?.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat)!, propertySymbol.NullableAnnotation); + + if (propertySymbol.Type.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat) != "string" && propertySymbol.Type.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat) != "string?") + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx001", + title: "Reference Data property type invalid.", + messageFormat: "Reference Data property '{0}' must be declared with a type of 'string' to enable.", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + model.Context!.Diagnostics.Add(Diagnostic.Create(descriptor, propertySymbol.Locations.FirstOrDefault(), propertySymbol.Name)); + model.IsRefData = false; + } + + if (!propertySymbol.IsPartialDefinition) + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx002", + title: "Reference Data property must be partial.", + messageFormat: "Reference Data property '{0}' must be declared as 'partial' to enable.", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + model.Context!.Diagnostics.Add(Diagnostic.Create(descriptor, propertySymbol.Locations.FirstOrDefault(), propertySymbol.Name)); + model.IsRefData = false; + } + + if (!(propertySymbol.Name.Length >= 4 && propertySymbol.Name.EndsWith("Sid", System.StringComparison.OrdinalIgnoreCase)) + && !(propertySymbol.Name.Length >= 5 && propertySymbol.Name.EndsWith("Code", System.StringComparison.OrdinalIgnoreCase))) + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx004", + title: "Reference Data property name invalid.", + messageFormat: "Reference Data property '{0}' must be declared by convention with a name that ends with 'Sid' (Serializer Identifier) or 'Code'.", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + model.Context!.Diagnostics.Add(Diagnostic.Create(descriptor, propertySymbol.Locations.FirstOrDefault(), propertySymbol.Name)); + model.IsRefData = false; + } + + if (propertySymbol.DeclaredAccessibility != Accessibility.Public || (propertySymbol.GetMethod?.DeclaredAccessibility ?? Accessibility.Public) != Accessibility.Public || (propertySymbol.SetMethod?.DeclaredAccessibility ?? Accessibility.Public) != Accessibility.Public) + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx005", + title: "Reference Data property accessibility invalid.", + messageFormat: "Reference data property '{0}' must be declared with 'public' accessibility only.", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + model.Context!.Diagnostics.Add(Diagnostic.Create(descriptor, propertySymbol.Locations.FirstOrDefault(), propertySymbol.Name)); + model.IsRefData = false; + } + + if (!model.IsRefData) + return; + + model.IsPartial = true; + model.IsRefDataJson = !propertySymbol.GetAttributes().Any(a => a.AttributeClass?.ToDisplayString()?.StartsWith("System.Text.Json.Serialization.Json") ?? false); + + // Camelcase the property name for JSON serialization. + model.JsonName ??= model.RefDataName!.Length == 1 ? model.RefDataName.ToLowerInvariant() : char.ToLower(model.RefDataName[0], CultureInfo.InvariantCulture) + model.RefDataName.Substring(1); + } + + /// + /// Determines and manages the reference data code collection property configuration. + /// + /// The . + /// The . + internal static void ManageReferenceDataCodeCollectionAttributeProperty(IPropertySymbol propertySymbol, PropertyModel model) + { + var att = propertySymbol.GetAttributes().FirstOrDefault(a => a.AttributeClass?.OriginalDefinition.ToDisplayString() == "CoreEx.RefData.ReferenceDataCodeCollectionAttribute"); + if (att is null || att.AttributeClass is null || !att.AttributeClass!.IsGenericType) + return; + + model.IsPartial = true; + model.IsRefDataCodeCollection = true; + model.RefDataType = att.AttributeClass.TypeArguments.FirstOrDefault()?.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat)!; + + if (propertySymbol.Type.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat) != "global::System.Collections.Generic.List") + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx002", + title: "Reference Data property type invalid.", + messageFormat: "Reference Data code collection property '{0}' must be declared with a type of 'List'/'List' to enable.", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + model.Context!.Diagnostics.Add(Diagnostic.Create(descriptor, propertySymbol.Locations.FirstOrDefault(), propertySymbol.Name)); + model.IsRefDataCodeCollection = false; + } + + if (!propertySymbol.IsPartialDefinition) + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx002", + title: "Reference Data property must be partial.", + messageFormat: "Reference Data code collection property '{0}' must be declared as 'partial' to enable.", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + model.Context!.Diagnostics.Add(Diagnostic.Create(descriptor, propertySymbol.Locations.FirstOrDefault(), propertySymbol.Name)); + model.IsRefDataCodeCollection = false; + } + + if (!(propertySymbol.Name.Length >= 5 && propertySymbol.Name.EndsWith("Sids", System.StringComparison.OrdinalIgnoreCase)) + && !(propertySymbol.Name.Length >= 6 && propertySymbol.Name.EndsWith("Codes", System.StringComparison.OrdinalIgnoreCase))) + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx004", + title: "Reference Data property name invalid.", + messageFormat: "Reference Data code collection property '{0}' must be declared by convention with a name that ends with 'Sids' (Serializer Identifiers) or 'Codes'.", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + model.Context!.Diagnostics.Add(Diagnostic.Create(descriptor, propertySymbol.Locations.FirstOrDefault(), propertySymbol.Name)); + model.IsRefDataCodeCollection = false; + } + } + + /// + public override bool Equals(object obj) + { + if (ReferenceEquals(this, obj)) + return true; + + if (obj is not ContractModel other) + return false; + + if (Namespace != other.Namespace + || ClassName != other.ClassName + || IsRecord != other.IsRecord + || IContract != other.IContract + || BaseType != other.BaseType) + return false; + + if (Enumerable.SequenceEqual(ContainingTypeHierarchy ?? [], other.ContainingTypeHierarchy ?? []) && Enumerable.SequenceEqual(Properties, other.Properties)) + return true; + + return false; + } + + /// + public override int GetHashCode() + { + var hash = (Namespace?.GetHashCode() ?? 0) + ^ (ClassName?.GetHashCode() ?? 0) + ^ IsRecord.GetHashCode() + ^ IContract.GetHashCode() + ^ (BaseType?.GetHashCode() ?? 0); + + if (ContainingTypeHierarchy is not null) + hash ^= ContainingTypeHierarchy.Aggregate(0, (current, item) => current ^ item.GetHashCode()); + + if (Properties is not null) + hash ^= Properties.Aggregate(0, (current, item) => current ^ item.GetHashCode()); + + return hash; + } +} \ No newline at end of file diff --git a/gen/CoreEx.Gen/CoreEx.Gen.csproj b/gen/CoreEx.Gen/CoreEx.Gen.csproj new file mode 100644 index 00000000..c278ddc6 --- /dev/null +++ b/gen/CoreEx.Gen/CoreEx.Gen.csproj @@ -0,0 +1,64 @@ + + + + netstandard2.0 + enable + preview + true + true + Analyzer + + false + $(GetTargetPathDependsOn);GetDependencyTargetPaths + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + false + + + + + + + true + + + + + + <_AnalyzerDep Include="$(PkgPluralize_NET)\lib\netstandard2.0\Pluralize.NET.dll" /> + <_AnalyzerDep Include="$(PkgHandlebars_Net)\lib\netstandard2.0\Handlebars.dll" /> + + + + + + + diff --git a/gen/CoreEx.Gen/GenApproach.cs b/gen/CoreEx.Gen/GenApproach.cs new file mode 100644 index 00000000..f9c69e81 --- /dev/null +++ b/gen/CoreEx.Gen/GenApproach.cs @@ -0,0 +1,27 @@ +namespace CoreEx.Gen; + +/// +/// Defines the approach to code generation for a given scenario. +/// +internal enum GenApproach +{ + /// + /// No approach has been determined; i.e. initial state. + /// + Undetermined, + + /// + /// Declares new code (as virtual), but does not override any existing code. + /// + Declare, + + /// + /// Overrides existing code, also calling into the base implementation as appropriate. + /// + Override, + + /// + /// Bypasses code generation as determined as not required (i.e. manually implemented) or unable (i.e. sealed). + /// + Bypass +} \ No newline at end of file diff --git a/gen/CoreEx.Gen/Properties/launchSettings.json b/gen/CoreEx.Gen/Properties/launchSettings.json new file mode 100644 index 00000000..5d4005ad --- /dev/null +++ b/gen/CoreEx.Gen/Properties/launchSettings.json @@ -0,0 +1,8 @@ +{ + "profiles": { + "Roslyn": { + "commandName": "DebugRoslynComponent", + "targetProject": "..\\..\\samples\\src\\Contoso.Products.Contracts\\Contoso.Products.Contracts.csproj" + } + } +} \ No newline at end of file diff --git a/gen/CoreEx.Gen/PropertyModel.cs b/gen/CoreEx.Gen/PropertyModel.cs new file mode 100644 index 00000000..81d05abe --- /dev/null +++ b/gen/CoreEx.Gen/PropertyModel.cs @@ -0,0 +1,255 @@ +using CoreEx.Gen.Utility; + +namespace CoreEx.Gen; + +/// +/// Represents the ContractAttribute class model's property configuration used to drive the underlying partial class source generation. +/// +internal class PropertyModel +{ + /// + /// Gets the owner of the property. + /// + public CodeGenContext? Context { get; set; } + + /// + /// Gets or sets the property name. + /// + public string? Name { get; set; } + + /// + /// Gets or sets the property type. + /// + public string? Type { get; set; } + + /// + /// Indicates whether the property type is a nullable value type. + /// + public bool IsNullableValueType { get; set; } + + /// + /// Indicates whether the property has been declared as partial. + /// + public bool IsPartial { get; set; } + + /// + /// Indicates whether the property is read-only (i.e. does not have a setter). + /// + public bool IsReadonly { get; set; } + + /// + /// Indicates whether the property is init-only (i.e. has an init setter syntax). + /// + public bool IsInitOnly { get; set; } + + /// + /// Indicates whether the property is settable (i.e. has a setter that is not init-only). + /// + public bool IsSettable => !IsReadonly && !IsInitOnly; + + /// + /// Indicates whether the property is required (i.e. has required syntax). + /// + public bool IsRequired { get; set; } + + /// + /// Indicates whether the property has a . + /// + public bool HasJsonName => !string.IsNullOrEmpty(JsonName); + + /// + /// Gets or sets the JSON property name where different from the without the Code suffix for reference data serialization. + /// + public string? JsonName { get; set; } + + /// + /// Gets or sets the key and/or text for the property. + /// + public string? KeyAndOrText { get; set; } + + /// + /// Gets or sets the fallback text for the property. + /// + public string? FallbackText { get; set; } + + /// + /// Indicates whether the property has . + /// + public bool HasText => !string.IsNullOrEmpty(KeyAndOrText); + + /// + /// Indicates whether the property has . + /// + public bool HasFallbackText => !string.IsNullOrEmpty(FallbackText); + + /// + /// Gets or sets the default; being the corresponding c# code. + /// + public string? Default { get; set; } + + /// + /// Indicates whether the property has a . + /// + public bool HasDefault => !string.IsNullOrEmpty(Default); + + /// + /// Gets or sets the format string used when formatting the property value as a . + /// + public string? Format { get; set; } + + /// + /// Indicates whether the property has a . + /// + public bool HasFormat => Format is not null; + + /// + /// Indicates whether the property has been marked up with the ReferenceData<TRefData>. + /// + public bool IsRefData { get; set; } + + /// + /// Indicates whether the property name ends with the 'Sid' or 'Sids' suffix. + /// + public bool IsSuffixSid => IsRefDataCodeCollection + ? Name is not null && Name.EndsWith("Sids") + : Name is not null && Name.EndsWith("Sid"); + + /// + /// Gets the reference data name. + /// + public string? RefDataName => Name is not null && IsRefData + ? (IsSuffixSid ? Name.Substring(0, Name.Length - 3) : Name.Substring(0, Name.Length - 4)) + : Name is not null && IsRefDataCodeCollection + ? Pluralizer.Instance.Pluralize(IsSuffixSid ? Name.Substring(0, Name.Length - 4) : Name.Substring(0, Name.Length - 5)) : Name; + + /// + /// Gets or sets the reference data type where is . + /// + public string? RefDataType { get; set; } + + /// + /// Indicates whether the json serialization attribute is required. + /// + public bool IsRefDataJson { get; set; } + + /// + /// Indicates whether an additional Text property is required. + /// + public bool IsRefDataText { get; set; } + + /// + /// Gets or sets the JSON property name used for reference data text serialization. + /// + public string? RefDataTextJsonName { get; set; } + + /// + /// Indicates whether the property has been marked up with the ReferenceDataCodeCollection<TRefData>. + /// + public bool IsRefDataCodeCollection { get; set; } + + /// + /// Gets the corresponding backing field name for the property. + /// + public string RefDataCodeCollectionFieldName => $"_{char.ToLowerInvariant(Name![0])}{Name.Substring(1)}"; + + /// + /// Gets the JSON property name used to represent the reference data code collection. + /// + public string RefDataCodeCollectionJsonName => $"{char.ToLowerInvariant(RefDataName![0])}{RefDataName.Substring(1)}"; + + /// + /// Indicates whether the property is self-cleaned (i.e. has a StringAttribute or DataTimeAttribute declared). + /// + public bool IsSelfCleaned => IsSelfCleanedString || IsSelfCleanedDateTime || IsCleanOption; + + /// + /// Indicates whether the property is self-cleaned as a value (i.e. has a StringAttribute declared). + /// + public bool IsSelfCleanedString { get; set; } + + /// + /// Gets or sets the trim. + /// + public string? StringTrim { get; set; } + + /// + /// Gets or sets the transform. + /// + public string? StringTransform { get; set; } + + /// + /// Gets or sets the casing. + /// + public string? StringCase { get; set; } + + /// + /// Indicates whether the property is self-cleaned as a value (i.e. has a DateTimeAttribute declared). + /// + public bool IsSelfCleanedDateTime { get; set; } + + /// + /// Gets or sets the transform. + /// + public string? DateTimeTransform { get; set; } + + /// + /// Indicates whether the property has a clean option specified. + /// + public bool IsCleanOption { get; set; } + + /// + /// Gets or sets the clean option. + /// + public string? CleanOption { get; set; } = "UseDefault"; + + /// + public override bool Equals(object obj) + { + if (ReferenceEquals(this, obj)) + return true; + + if (obj is not PropertyModel other) + return false; + + if (Name != other.Name || Type != other.Type || IsNullableValueType != other.IsNullableValueType || IsPartial != other.IsPartial || IsReadonly != other.IsReadonly || IsInitOnly != other.IsInitOnly || IsRequired != other.IsRequired + || KeyAndOrText != other.KeyAndOrText || FallbackText != other.FallbackText || JsonName != other.JsonName || Default != other.Default || Format != other.Format + || IsRefData != other.IsRefData || RefDataType != other.RefDataType + || IsRefDataJson != other.IsRefDataJson || IsRefDataText != other.IsRefDataText || RefDataTextJsonName != other.RefDataTextJsonName + || IsRefDataCodeCollection != other.IsRefDataCodeCollection + || IsSelfCleanedString != other.IsSelfCleanedString || StringTrim != other.StringTrim || StringTransform != other.StringTransform || StringCase != other.StringCase + || IsSelfCleanedDateTime != other.IsSelfCleanedDateTime || DateTimeTransform != other.DateTimeTransform + || IsCleanOption != other.IsCleanOption || CleanOption != other.CleanOption) + return false; + + return true; + } + + /// + public override int GetHashCode() + => Name?.GetHashCode() ?? 0 + ^ (Type?.GetHashCode() ?? 0) + ^ IsNullableValueType.GetHashCode() + ^ IsPartial.GetHashCode() + ^ IsReadonly.GetHashCode() + ^ IsInitOnly.GetHashCode() + ^ IsRequired.GetHashCode() + ^ (KeyAndOrText?.GetHashCode() ?? 0) + ^ (FallbackText?.GetHashCode() ?? 0) + ^ (JsonName?.GetHashCode() ?? 0) + ^ (Default?.GetHashCode() ?? 0) + ^ (Format?.GetHashCode() ?? 0) + ^ IsRefData.GetHashCode() + ^ (RefDataType?.GetHashCode() ?? 0) + ^ IsRefDataJson.GetHashCode() + ^ IsRefDataText.GetHashCode() + ^ (RefDataTextJsonName?.GetHashCode() ?? 0) + ^ IsRefDataCodeCollection.GetHashCode() + ^ IsSelfCleanedString.GetHashCode() + ^ (StringTrim?.GetHashCode() ?? 0) + ^ (StringTransform?.GetHashCode() ?? 0) + ^ (StringCase?.GetHashCode() ?? 0) + ^ IsSelfCleanedDateTime.GetHashCode() + ^ (DateTimeTransform?.GetHashCode() ?? 0) + ^ IsCleanOption.GetHashCode() + ^ (CleanOption?.GetHashCode() ?? 0); +} \ No newline at end of file diff --git a/gen/CoreEx.Gen/ReferenceDataGenerator.cs b/gen/CoreEx.Gen/ReferenceDataGenerator.cs new file mode 100644 index 00000000..0e37ab0e --- /dev/null +++ b/gen/CoreEx.Gen/ReferenceDataGenerator.cs @@ -0,0 +1,58 @@ +using CoreEx.Gen.Utility; +using Microsoft.CodeAnalysis; +using Microsoft.CodeAnalysis.CSharp.Syntax; +using Microsoft.CodeAnalysis.Text; +using System.Text; + +namespace CoreEx.Gen; + +/// +/// Provides the 'ReferenceDataAttribute' implementation. +/// +[Generator] +public class ReferenceDataGenerator : IIncrementalGenerator +{ + private const string _templateResourceName = "CoreEx.Gen.Templates.ReferenceData.cs.hb"; + private readonly HandlebarsCodeGenerator _codeGenerator = HandlebarsCodeGenerator.Create(_templateResourceName); + + /// + public void Initialize(IncrementalGeneratorInitializationContext context) + { + // No RegisterPostInitializationOutput needed as handled by ContractGenerator (i.e. centralized singleton). + + // Register the source generator for the above 'ReferenceDataAttribute' class usage. + var provider = context.SyntaxProvider.ForAttributeWithMetadataName( + fullyQualifiedMetadataName: "CoreEx.RefData.ReferenceDataAttribute", + predicate: static (syntaxNode, cancellationToken) => syntaxNode is ClassDeclarationSyntax || syntaxNode is RecordDeclarationSyntax, + transform: static (context, cancellationToken) => ReferenceDataModel.Create(context, cancellationToken) + ); + + // Register the source output to generate the resulting reference data partial class contents. + context.RegisterSourceOutput(provider, (context, model) => + { + try + { + if (!model.ReportDiagnostics(context)) + return; // Do not generate as there are errors. + + if (model.IReferenceData == GenApproach.Undetermined) + return; // No need to generate if IReferenceData is already declared. + + var sourceText = SourceText.From(_codeGenerator.Generate(model), Encoding.UTF8); + context.AddSource($"{model.ClassName}.refdata.g.cs", sourceText); + } + catch (System.Exception ex) + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx000", + title: "Reference data generation error.", + messageFormat: "An error occurred while generating a 'ReferenceDataAttribute': {0}", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + context.ReportDiagnostic(Diagnostic.Create(descriptor, null, ex.Message)); + } + }); + } +} \ No newline at end of file diff --git a/gen/CoreEx.Gen/ReferenceDataModel.cs b/gen/CoreEx.Gen/ReferenceDataModel.cs new file mode 100644 index 00000000..c3a7d3a2 --- /dev/null +++ b/gen/CoreEx.Gen/ReferenceDataModel.cs @@ -0,0 +1,210 @@ +using CoreEx.Gen.Utility; +using Microsoft.CodeAnalysis; +using Microsoft.CodeAnalysis.CSharp.Syntax; +using System.Collections.Generic; +using System.Linq; +using System.Threading; + +namespace CoreEx.Gen; + +/// +/// Represents the ReferenceDataAttribute class model configuration used to drive the underlying partial class source generation. +/// +internal class ReferenceDataModel : CodeGenContext +{ + /// + /// Gets the namespace of the contract. + /// + public string? Namespace { get; private set; } + + /// + /// Gets the class name of the contract. + /// + public string? ClassName { get; private set; } + + /// + /// Gets the containing type hierarchy of the contract. + /// + public List? ContainingTypeHierarchy { get; private set; } + + /// + /// Indicates whether the contract is a record; otherwise, indicates a class. + /// + public bool IsRecord { get; private set; } + + /// + /// Gets the for the reference data. + /// + public GenApproach IReferenceData { get; private set; } + + /// + /// Gets the list of properties for the contract. + /// + public List Properties { get; } = []; + + /// + /// Gets the list of properties that are to be code-generated as declared as partial. + /// + public IEnumerable PartialProperties => Properties.Where(p => p.IsRefData || p.IsSelfCleanedString || p.IsSelfCleanedDateTime); + + /// + /// Create the from the . + /// + /// The . + /// The . + /// The . + public static ReferenceDataModel Create(GeneratorAttributeSyntaxContext context, CancellationToken cancellationToken) + { + try + { + var model = context.TargetNode is ClassDeclarationSyntax ? CreateForClass(context, cancellationToken) : CreateForRecord(context, cancellationToken); + return model; + } + catch (System.Exception ex) + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx100", + title: "Reference data generation error.", + messageFormat: "An error occurred while generating an 'ReferenceDataAttribute': {0}", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + return new ReferenceDataModel { IReferenceData = GenApproach.Undetermined, Diagnostics = { Diagnostic.Create(descriptor, null, ex.Message) } }; + } + } + + /// + /// Create the from the . + /// + private static ReferenceDataModel CreateForClass(GeneratorAttributeSyntaxContext context, CancellationToken cancellationToken) + { + var syntax = (ClassDeclarationSyntax)context.TargetNode; + var symbol = (INamedTypeSymbol)context.SemanticModel.GetDeclaredSymbol(syntax)!; + + var model = new ReferenceDataModel + { + Namespace = context.TargetSymbol.ContainingType is null + ? context.TargetSymbol.ContainingNamespace?.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat.WithGlobalNamespaceStyle(SymbolDisplayGlobalNamespaceStyle.Omitted)) + : context.TargetSymbol.ContainingType.ContainingNamespace?.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat.WithGlobalNamespaceStyle(SymbolDisplayGlobalNamespaceStyle.Omitted)), + ContainingTypeHierarchy = context.TargetSymbol.ContainingType is null ? [] : ContractModel.GetContainingTypeHierarchy(context.TargetSymbol.ContainingType), + ClassName = symbol.Name + }; + + return CreateForStandard(context, symbol, model, cancellationToken); + } + + /// + /// Create the from the . + /// + private static ReferenceDataModel CreateForRecord(GeneratorAttributeSyntaxContext context, CancellationToken cancellationToken) + { + var syntax = (RecordDeclarationSyntax)context.TargetNode; + var symbol = (INamedTypeSymbol)context.SemanticModel.GetDeclaredSymbol(syntax)!; + + var model = new ReferenceDataModel + { + Namespace = context.TargetSymbol.ContainingType is null + ? context.TargetSymbol.ContainingNamespace?.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat.WithGlobalNamespaceStyle(SymbolDisplayGlobalNamespaceStyle.Omitted)) + : context.TargetSymbol.ContainingType.ContainingNamespace?.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat.WithGlobalNamespaceStyle(SymbolDisplayGlobalNamespaceStyle.Omitted)), + ContainingTypeHierarchy = context.TargetSymbol.ContainingType is null ? [] : ContractModel.GetContainingTypeHierarchy(context.TargetSymbol.ContainingType), + ClassName = symbol.Name, + IsRecord = true + }; + + return CreateForStandard(context, symbol, model, cancellationToken); + } + + /// + /// Continues the create for the standardized behaviour. + /// + private static ReferenceDataModel CreateForStandard(GeneratorAttributeSyntaxContext context, INamedTypeSymbol symbol, ReferenceDataModel model, CancellationToken cancellationToken) + { + // Check the cancellation token. + cancellationToken.ThrowIfCancellationRequested(); + + // Get the symbol for IReferenceData. + var iRefDataSymbol = context.SemanticModel.Compilation.GetTypeByMetadataName("CoreEx.RefData.Abstractions.IReferenceData"); + if (symbol.AllInterfaces.FirstOrDefault(x => SymbolEqualityComparer.Default.Equals(x.OriginalDefinition, iRefDataSymbol)) is null) + { + var descriptor = new DiagnosticDescriptor( + id: "CoreEx101", + title: "ReferenceDataAttribute is not supported.", + messageFormat: "The ReferenceDataAttribute is not supported where the class/record does not implement CoreEx.RefData.Abstractions.IReferenceData; alternatively, consider using ContractAttribute.", + category: "CoreEx", + defaultSeverity: DiagnosticSeverity.Error, + isEnabledByDefault: true); + + model.Diagnostics.Add(Diagnostic.Create(descriptor, symbol.Locations.FirstOrDefault(), symbol.Name)); + } + + model.IReferenceData = GenApproach.Declare; + + // Get the list of get/set properties. + foreach (var p in symbol.GetMembers().OfType().Where(p => p.GetMethod is not null)) + { + if (p.GetAttributes().FirstOrDefault(a => a.AttributeClass?.ToDisplayString() == "CoreEx.Entities.ContractIgnoreAttribute") is not null) + continue; // Ignore properties with ContractIgnoreAttribute. + + var emp = new PropertyModel + { + Context = model, + Name = p.Name, + IsReadonly = p.SetMethod is null, + IsInitOnly = p.SetMethod?.IsInitOnly ?? false, + Type = ContractModel.FormatTypeWithNullability(p.Type.ToDisplayString(ContractGenerator.FullyQualifiedWithNullability), p.NullableAnnotation), + JsonName = p.GetAttributes().FirstOrDefault(a => a.AttributeClass?.ToDisplayString() == "System.Text.Json.Serialization.JsonPropertyNameAttribute")?.ConstructorArguments.FirstOrDefault().Value as string, + FallbackText = ContractModel.GetDisplayAttributeName(p), + Default = p.DeclaringSyntaxReferences.Select(ds => ds.GetSyntax()).OfType().Select(ps => ContractModel.GetDefaultConstant(ps, context.SemanticModel)).FirstOrDefault(), + Format = ContractModel.GetDisplayFormatAttributeDataFormatString(p) + }; + + if (model.IsRecord && emp.Name == "EqualityContract") + continue; + + emp.KeyAndOrText = emp.HasFallbackText ? emp.Name : null; + + ContractModel.ManageLocalizationAttribute(p, emp); + ContractModel.ManageStringAttributeProperty(p, emp); + ContractModel.ManageDateTimeAttributeProperty(p, emp); + ContractModel.ManageCleanAttributeProperty(p, emp); + ContractModel.ManageReferenceDataAttributeProperty(p, emp); + ContractModel.ManageReferenceDataCodeCollectionAttributeProperty(p, emp); + + model.Properties.Add(emp); + } + + return model; + } + + /// + public override bool Equals(object obj) + { + if (ReferenceEquals(this, obj)) + return true; + + if (obj is not ReferenceDataModel other) + return false; + + if (Namespace != other.Namespace || ClassName != other.ClassName || IsRecord != other.IsRecord || IReferenceData != other.IReferenceData) + return false; + + if (Enumerable.SequenceEqual(ContainingTypeHierarchy ?? [], other.ContainingTypeHierarchy ?? []) && Enumerable.SequenceEqual(Properties, other.Properties)) + return true; + + return false; + } + + /// + public override int GetHashCode() + { + var hash = (Namespace?.GetHashCode() ?? 0) ^ (ClassName?.GetHashCode() ?? 0) ^ IsRecord.GetHashCode() ^ IReferenceData.GetHashCode(); + if (ContainingTypeHierarchy is not null) + hash ^= ContainingTypeHierarchy.Aggregate(0, (current, item) => current ^ item.GetHashCode()); + + if (Properties is not null) + hash ^= Properties.Aggregate(0, (current, item) => current ^ item.GetHashCode()); + + return hash; + } +} \ No newline at end of file diff --git a/gen/CoreEx.Gen/Templates/CleanAttribute.cs.hb b/gen/CoreEx.Gen/Templates/CleanAttribute.cs.hb new file mode 100644 index 00000000..5b4f2c83 --- /dev/null +++ b/gen/CoreEx.Gen/Templates/CleanAttribute.cs.hb @@ -0,0 +1,23 @@ +// +#nullable enable +#pragma warning disable + +namespace CoreEx.Entities; + +/// +/// Indicates that the corresponding property should be extended (source generation) to include capabilities. +/// +/// This is dependent on either or usage. +/// This is used for the generation of functionality; this otherwise does not get used for the implementation of the property itself. +[global::System.AttributeUsage(global::System.AttributeTargets.Property, AllowMultiple = false)] +[global::System.CodeDom.Compiler.GeneratedCodeAttribute("CoreEx.Gen.ContractGenerator", "1.0.0.0")] +internal class CleanAttribute(CleanOption option = CleanOption.UseDefault) : global::System.Attribute +{ + /// + /// Gets the . + /// + public CleanOption CleanOption { get; } = option; +} + +#pragma warning restore +#nullable restore \ No newline at end of file diff --git a/gen/CoreEx.Gen/Templates/Contract.cs.hb b/gen/CoreEx.Gen/Templates/Contract.cs.hb new file mode 100644 index 00000000..7ad02f4d --- /dev/null +++ b/gen/CoreEx.Gen/Templates/Contract.cs.hb @@ -0,0 +1,102 @@ +// +#nullable enable +#pragma warning disable + +namespace {{Namespace}}; + +{{#each ContainingTypeHierarchy}} +{{indent}}partial class {{.}} +{{indent}}{{bo}}{{indent++}} +{{/each}} +{{indent}}partial {{#if IsRecord}}record{{else}}class{{/if}} {{ClassName}} : global::CoreEx.Entities.IContract<{{ClassName}}> +{{indent}}{{bo}} +{{indent}} /// +{{indent}} public static {{#ifne IContract 'Declare'}}new {{/ifne}}global::System.Collections.Generic.IEnumerable GetStaticPropertyRuntimeMetadata() +{{indent}} { + {{#if HasBaseType}} +{{indent}} foreach (var p in global::CoreEx.Metadata.RuntimeMetadata.GetPropertyRuntimeMetadata<{{BaseType}}>()) +{{indent}} yield return p; + + {{/if}} + {{#each Properties}} +{{indent}} yield return new global::CoreEx.Metadata.PropertyRuntimeMetadata<{{../ClassName}}, {{Type}}>(nameof({{Name}}), static e => e.{{Name}}{{#unless IsReadOnly}}{{#unless IsInitOnly}}, static (e, v) => e.{{Name}} = v{{/unless}}{{/unless}}{{#if HasDefault}}, defaultValue: {{Default}}{{/if}}{{#if HasText}}, text: static () => new global::CoreEx.Localization.LText("{{KeyAndOrText}}"{{#if HasFallbackText}}, "{{FallbackText}}"{{/if}}){{/if}}{{#ifne CleanOption 'UseDefault'}}, clean: global::CoreEx.Entities.CleanOption.{{CleanOption}}{{/ifne}}{{#if HasJsonName}}, jsonName: "{{JsonName}}"{{/if}}{{#if HasFormat}}, format: "{{Format}}"{{/if}}); + {{/each}} +{{indent}} } + +{{indent}} /// +{{indent}} public {{#ifeq IContract 'Declare'}}virtual{{else}}override{{/ifeq}} global::System.Collections.Generic.IEnumerable GetPropertyRuntimeMetadata() +{{indent}} { +{{indent}} foreach (var p in GetStaticPropertyRuntimeMetadata()) +{{indent}} yield return p; +{{indent}} } +{{#ifeq IContract 'Declare'}} + +{{indent}} /// +{{indent}} public virtual bool IsDefault() => global::CoreEx.Metadata.RuntimeMetadata.IsDefault(this); + +{{indent}} /// +{{indent}} public virtual void CopyFrom(TFrom from) where TFrom : class => global::CoreEx.Metadata.RuntimeMetadata.CopyInto(from, this); +{{/ifeq}} +{{#unless IsRecord}} + +{{indent}} /// +{{indent}} public override int GetHashCode() => global::CoreEx.Metadata.RuntimeMetadata.GetHashCode(this); + +{{indent}} /// +{{indent}} public override bool Equals(object? other) => global::CoreEx.Metadata.RuntimeMetadata.AreEqual(this, other); + +{{indent}} /// +{{indent}} public virtual bool Equals({{ClassName}}? other) => global::CoreEx.Metadata.RuntimeMetadata.AreEqual(this, other); +{{/unless}} +{{#each PartialProperties}} + + {{#if IsRefData}} + {{#if IsRefDataJson}} +{{indent}} [global::System.Text.Json.Serialization.JsonPropertyName("{{JsonName}}")] + {{/if}} +{{indent}} public partial {{Type}} {{Name}} { get => field;{{#unless IsReadOnly}} {{#if IsInitOnly}}init{{else}}set{{/if}} => field = value; {{/unless}} } + +{{indent}} /// +{{indent}} /// Gets the corresponding value as per the related . +{{indent}} /// +{{indent}} [global::System.Diagnostics.DebuggerBrowsable(global::System.Diagnostics.DebuggerBrowsableState.Never)] +{{indent}} [global::System.Text.Json.Serialization.JsonIgnore] +{{indent}} [global::System.Text.Json.Serialization.JsonPropertyName("{{JsonName}}")] + {{#if HasText}} +{{indent}} [global::CoreEx.Localization.Localization("{{KeyAndOrText}}"{{#if HasFallbackText}}, "{{FallbackText}}"{{/if}})] + {{/if}} +{{indent}} public {{RefDataType}} {{RefDataName}} { get => ({{RefDataType}}){{Name}}; {{#if IsSettable}}set => {{Name}} = value; {{/if}} } + +{{indent}} /// +{{indent}} /// Gets the related text where explicitly requested. +{{indent}} /// +{{indent}} /// Generally, the guidance (by design) is that the text should not be initialized/set directly; only offered to support advanced, serialization, and testing scenarios. +{{indent}} [global::System.Diagnostics.DebuggerBrowsable(global::System.Diagnostics.DebuggerBrowsableState.Never)] +{{indent}} [global::System.ComponentModel.ReadOnly(true)] +{{indent}} public string? {{RefDataName}}Text { get => field ?? global::CoreEx.ExecutionContext.GetRelatedText(() => {{Name}} is null ? null : {{RefDataName}}?.Text); init => field = value; } + {{else}} + {{#if IsRefDataCodeCollection}} +{{indent}} private {{Type}} {{RefDataCodeCollectionFieldName}}; + + {{#unless HasJsonName}} +{{indent}} [global::System.Text.Json.Serialization.JsonPropertyName("{{RefDataCodeCollectionJsonName}}")] + {{/unless}} +{{indent}} public partial {{Type}} {{Name}} { get => {{RefDataCodeCollectionFieldName}}; set => {{RefDataCodeCollectionFieldName}} = value; } + +{{indent}} /// +{{indent}} /// Gets or sets the () encapsulation of the underlying . +{{indent}} /// +{{indent}} [global::System.Text.Json.Serialization.JsonIgnore] +{{indent}} public global::CoreEx.RefData.ReferenceDataCodeCollection<{{RefDataType}}> {{RefDataName}} { get => new(ref {{RefDataCodeCollectionFieldName}}); set => value?.ToCodeList(); } + {{else}} +{{indent}} public {{#if IsRequired}}required {{/if}}partial {{Type}} {{Name}} { get => field;{{#unless IsReadOnly}} {{#if IsInitOnly}}init{{else}}set{{/if}} => field = global::CoreEx.Entities.Cleaner.Clean(value, {{#if IsSelfCleanedString}}global::CoreEx.Entities.StringTrim.{{StringTrim}}, global::CoreEx.Entities.StringTransform.{{StringTransform}}, global::CoreEx.Entities.StringCase.{{StringCase}}{{else}}global::CoreEx.Entities.DateTimeTransform.{{DateTimeTransform}}{{/if}});{{/unless}} } + {{/if}} + {{/if}} +{{/each}} +{{indent}}{{bc}} +{{#each ContainingTypeHierarchy}} +{{indent--}}{{Indent}}{{bc}} +{{/each}} + +#pragma warning restore +#nullable restore \ No newline at end of file diff --git a/gen/CoreEx.Gen/Templates/ContractAttribute.cs.hb b/gen/CoreEx.Gen/Templates/ContractAttribute.cs.hb new file mode 100644 index 00000000..68102eaf --- /dev/null +++ b/gen/CoreEx.Gen/Templates/ContractAttribute.cs.hb @@ -0,0 +1,18 @@ +// +#nullable enable +#pragma warning disable + +namespace CoreEx.Entities; + +/// +/// Enables source generation of the CoreEx extended contract functionality; specifically the implementation of . +/// +/// Supports both and types; however, the equality for a is not implemented as the native compiler generated equality must be used instead. +/// The equality is implemented using the functionality which supports deep equality (where possible). +/// Usage is all or nothing, no partial implementation of the is supported. +[global::System.AttributeUsage(global::System.AttributeTargets.Class, AllowMultiple = false)] +[global::System.CodeDom.Compiler.GeneratedCodeAttribute("CoreEx.Gen.ContractGenerator", "1.0.0.0")] +internal class ContractAttribute : global::System.Attribute { } + +#pragma warning restore +#nullable restore \ No newline at end of file diff --git a/gen/CoreEx.Gen/Templates/ContractIgnoreAttribute.cs.hb b/gen/CoreEx.Gen/Templates/ContractIgnoreAttribute.cs.hb new file mode 100644 index 00000000..07851d40 --- /dev/null +++ b/gen/CoreEx.Gen/Templates/ContractIgnoreAttribute.cs.hb @@ -0,0 +1,17 @@ +// +#nullable enable +#pragma warning disable + +namespace CoreEx.Entities; + +/// +/// Indicates that the corresponding property should not be included in the source generation of the CoreEx extended contract functionality. +/// +/// This is dependent on either or usage. +/// This is used for the generation of functionality; this otherwise does not get used for the implementation of the property itself. +[global::System.AttributeUsage(global::System.AttributeTargets.Property, AllowMultiple = false)] +[global::System.CodeDom.Compiler.GeneratedCodeAttribute("CoreEx.Gen.ContractGenerator", "1.0.0.0")] +internal class ContractIgnoreAttribute : global::System.Attribute { } + +#pragma warning restore +#nullable restore \ No newline at end of file diff --git a/gen/CoreEx.Gen/Templates/DateTimeAttribute.cs.hb b/gen/CoreEx.Gen/Templates/DateTimeAttribute.cs.hb new file mode 100644 index 00000000..cf14bdc9 --- /dev/null +++ b/gen/CoreEx.Gen/Templates/DateTimeAttribute.cs.hb @@ -0,0 +1,24 @@ +// +#nullable enable +#pragma warning disable + +namespace CoreEx.Entities; + +/// +/// Indicates that the corresponding property should be extended (source generation) to include capabilities. +/// +/// The (defaults to ). +/// This is dependent on either or usage. +/// The property must be declared as for this to be generated correctly. +[global::System.AttributeUsage(global::System.AttributeTargets.Property, AllowMultiple = false)] +[global::System.CodeDom.Compiler.GeneratedCodeAttribute("CoreEx.Gen.ContractGenerator", "1.0.0.0")] +internal class DateTimeAttribute(DateTimeTransform transform = DateTimeTransform.UseDefault) : global::System.Attribute +{ + /// + /// Gets the . + /// + public DateTimeTransform Transform { get; } = transform; +} + +#pragma warning restore +#nullable restore \ No newline at end of file diff --git a/gen/CoreEx.Gen/Templates/ReferenceData.cs.hb b/gen/CoreEx.Gen/Templates/ReferenceData.cs.hb new file mode 100644 index 00000000..20533b88 --- /dev/null +++ b/gen/CoreEx.Gen/Templates/ReferenceData.cs.hb @@ -0,0 +1,52 @@ +// +#nullable enable +#pragma warning disable + +namespace {{Namespace}}; + +{{#each ContainingTypeHierarchy}} +{{indent}}partial class {{.}} +{{indent}}{{bo}}{{indent++}} +{{/each}} +{{indent}}partial {{#if IsRecord}}record{{else}}class{{/if}} {{ClassName}} +{{indent}}{{bo}} +{{#each PartialProperties}} + {{#if IsRefData}} + {{#if IsRefDataJson}} +{{indent}} [global::System.Text.Json.Serialization.JsonPropertyName("{{JsonName}}")] + {{/if}} +{{indent}} public partial {{Type}} {{Name}} { get => field;{{#unless IsReadOnly}} {{#if IsInitOnly}}init{{else}}set{{/if}} => field = value; {{/unless}} } + +{{indent}} /// +{{indent}} /// Gets the corresponding value as per the related . +{{indent}} /// +{{indent}} [global::System.Diagnostics.DebuggerBrowsable(global::System.Diagnostics.DebuggerBrowsableState.Never)] +{{indent}} [global::System.Text.Json.Serialization.JsonIgnore] +{{indent}} public {{RefDataType}} {{RefDataName}} { get => ({{RefDataType}}){{Name}}; {{#if IsSettable}}set => {{Name}} = value; {{/if}} } + +{{indent}} /// +{{indent}} /// Gets the related text where explicitly requested. +{{indent}} /// +{{indent}} /// Generally, the guidance (by design) is that the text should not be initialized/set directly; only offered to support advanced, serialization, and testing scenarios. +{{indent}} [global::System.Diagnostics.DebuggerBrowsable(global::System.Diagnostics.DebuggerBrowsableState.Never)] +{{indent}} [global::System.ComponentModel.ReadOnly(true)] +{{indent}} public string? {{RefDataName}}Text { get => field ?? global::CoreEx.ExecutionContext.GetRelatedText(() => {{Name}} is null ? null : {{RefDataName}}?.Text); init => field = value; } + {{else}} +{{indent}} public partial {{Type}} {{Name}} { get => field;{{#unless IsReadOnly}} {{#if IsInitOnly}}init{{else}}set{{/if}} => field = global::CoreEx.Entities.Cleaner.Clean(value, {{#if IsSelfCleanedString}}global::CoreEx.Entities.StringTrim.{{StringTrim}}, global::CoreEx.Entities.StringTransform.{{StringTransform}}, global::CoreEx.Entities.StringCase.{{StringCase}}{{else}}global::CoreEx.Entities.DateTimeTransform.{{DateTimeTransform}}{{/if}});{{/unless}} } + {{/if}} + +{{/each}} +{{indent}} /// +{{indent}} /// An implicit cast operator that converts a to a instance. +{{indent}} /// +{{indent}} /// The . +{{indent}} /// The corresponding instance where is not ; otherwise, . +{{indent}} [return: global::System.Diagnostics.CodeAnalysis.NotNullIfNotNull(nameof(code))] +{{indent}} public static implicit operator {{ClassName}}?(string? code) => code is null ? null : global::CoreEx.RefData.ReferenceDataOrchestrator.TryGetByCode<{{ClassName}}>(code, out var item) ? item : item; +{{indent}}{{bc}} +{{#each ContainingTypeHierarchy}} +{{indent--}}{{Indent}}{{bc}} +{{/each}} + +#pragma warning restore +#nullable restore \ No newline at end of file diff --git a/gen/CoreEx.Gen/Templates/ReferenceDataAttribute.cs.hb b/gen/CoreEx.Gen/Templates/ReferenceDataAttribute.cs.hb new file mode 100644 index 00000000..684c75f2 --- /dev/null +++ b/gen/CoreEx.Gen/Templates/ReferenceDataAttribute.cs.hb @@ -0,0 +1,15 @@ +// +#nullable enable +#pragma warning disable + +namespace CoreEx.RefData; + +/// +/// Enables source generation of the CoreEx reference data functionality; specifically the implementation of . +/// +[global::System.AttributeUsage(global::System.AttributeTargets.Class, AllowMultiple = false)] +[global::System.CodeDom.Compiler.GeneratedCodeAttribute("CoreEx.Gen.ContractGenerator", "1.0.0.0")] +internal class ReferenceDataAttribute : global::System.Attribute { } + +#pragma warning restore +#nullable restore \ No newline at end of file diff --git a/gen/CoreEx.Gen/Templates/ReferenceDataCodeCollectionTAttribute.cs.hb b/gen/CoreEx.Gen/Templates/ReferenceDataCodeCollectionTAttribute.cs.hb new file mode 100644 index 00000000..3c2e2aa1 --- /dev/null +++ b/gen/CoreEx.Gen/Templates/ReferenceDataCodeCollectionTAttribute.cs.hb @@ -0,0 +1,18 @@ +// +#nullable enable +#pragma warning disable + +namespace CoreEx.RefData; + +/// +/// Indicates that the corresponding property should be extended (source generation) to include the full suite of reference data code collection properties/capabilities. +/// +/// This is dependent on either or usage. +/// Primarily, a corresponding property will be created (and linked) for non-serialized usage. +/// The property must be declared as for this to be generated correctly. +[global::System.AttributeUsage(global::System.AttributeTargets.Property, AllowMultiple = false)] +[global::System.CodeDom.Compiler.GeneratedCodeAttribute("CoreEx.Gen.ContractGenerator", "1.0.0.0")] +internal class ReferenceDataCodeCollectionAttribute : global::System.Attribute where TReferenceData : class, CoreEx.RefData.Abstractions.IReferenceData { } + +#pragma warning restore +#nullable restore \ No newline at end of file diff --git a/gen/CoreEx.Gen/Templates/ReferenceDataTAttribute.cs.hb b/gen/CoreEx.Gen/Templates/ReferenceDataTAttribute.cs.hb new file mode 100644 index 00000000..858eb14b --- /dev/null +++ b/gen/CoreEx.Gen/Templates/ReferenceDataTAttribute.cs.hb @@ -0,0 +1,30 @@ +// +#nullable enable +#pragma warning disable + +namespace CoreEx.RefData; + +/// +/// Indicates that the corresponding property should be extended (source generation) to include the full suite of reference data properties/capabilities. +/// +/// +/// This is dependent on either or usage. +/// Primarily, a corresponding property will be created (and linked) for non-serialized usage. +/// The property must be declared as for this to be generated correctly. +[global::System.AttributeUsage(global::System.AttributeTargets.Property, AllowMultiple = false)] +[global::System.CodeDom.Compiler.GeneratedCodeAttribute("CoreEx.Gen.ContractGenerator", "1.0.0.0")] +internal class ReferenceDataAttribute : global::System.Attribute where TReferenceData : class, CoreEx.RefData.Abstractions.IReferenceData +{ + /// + /// Indicates whether the CoreEx.Refdata.Abstractions.IReferenceData.Text read-only property should be included in the generated source. + /// + public bool Text { get; set; } = true; + + /// + /// Gets or sets the JSON name override to use for the CoreEx.Refdata.Abstractions.IReferenceData.Text property when serializing to JSON. + /// + public string? TextJsonName { get; set; } +} + +#pragma warning restore +#nullable restore \ No newline at end of file diff --git a/gen/CoreEx.Gen/Templates/StringAttribute.cs.hb b/gen/CoreEx.Gen/Templates/StringAttribute.cs.hb new file mode 100644 index 00000000..273f2c76 --- /dev/null +++ b/gen/CoreEx.Gen/Templates/StringAttribute.cs.hb @@ -0,0 +1,36 @@ +// +#nullable enable +#pragma warning disable + +namespace CoreEx.Entities; + +/// +/// Indicates that the corresponding property should be extended (source generation) to include capabilities. +/// +/// The (defaults to ). +/// The (defaults to ). +/// The (defaults to ). +/// This is dependent on either or usage. +/// The property must be declared as for this to be generated correctly. +[global::System.AttributeUsage(global::System.AttributeTargets.Property, AllowMultiple = false)] +[global::System.CodeDom.Compiler.GeneratedCodeAttribute("CoreEx.Gen.ContractGenerator", "1.0.0.0")] +internal class StringAttribute(StringTrim trim = StringTrim.UseDefault, StringTransform transform = StringTransform.UseDefault, StringCase casing = StringCase.UseDefault) : global::System.Attribute +{ + /// + /// Gets the . + /// + public StringTrim Trim { get; } = trim; + + /// + /// Gets the . + /// + public StringTransform Transform { get; } = transform; + + /// + /// Gets the . + /// + public StringCase Casing { get; } = casing; +} + +#pragma warning restore +#nullable restore \ No newline at end of file diff --git a/gen/CoreEx.Gen/Utility/CodeGenContext.cs b/gen/CoreEx.Gen/Utility/CodeGenContext.cs new file mode 100644 index 00000000..7a6e711d --- /dev/null +++ b/gen/CoreEx.Gen/Utility/CodeGenContext.cs @@ -0,0 +1,54 @@ +using Microsoft.CodeAnalysis; +using System.Collections.Generic; +using System.Linq; + +namespace CoreEx.Gen.Utility; + +/// +/// Provides context for code generation, allowing for customization of the generated code. +/// +public class CodeGenContext +{ + /// + /// Gets or sets the indent level to be used for the generated code. + /// + public int Indent { get; set; } = 0; + + /// + /// Gets or sets the number of spaces used for each indentation level. + /// + public int IndentSize { get; set; } = 4; + + /// + /// Increments the indent level by one. + /// + public void IncrementIndent() => Indent++; + + /// + /// Decreases the current indentation level by one. + /// + public void DecrementIndent() => Indent--; + + /// + /// Gets the string used for indentation, consisting of spaces. + /// + public string GetIndentString() => new(' ', Indent * IndentSize); + + /// + /// Gets the list to be reported. + /// + public List Diagnostics { get; } = []; + + /// + /// Reports the accumulated to the provided . + /// + /// The . + /// indicates there are no in and source production should occur; otherwise, indicates that no source production should occur. + public bool ReportDiagnostics(SourceProductionContext context) + { + foreach (var d in Diagnostics) + context.ReportDiagnostic(d); + + return !Diagnostics.Any(d => d.Severity == DiagnosticSeverity.Error); + } +} \ No newline at end of file diff --git a/gen/CoreEx.Gen/Utility/HandlebarsCodeGenerator.cs b/gen/CoreEx.Gen/Utility/HandlebarsCodeGenerator.cs new file mode 100644 index 00000000..433a61ca --- /dev/null +++ b/gen/CoreEx.Gen/Utility/HandlebarsCodeGenerator.cs @@ -0,0 +1,65 @@ +using HandlebarsDotNet; +using System; +using System.IO; + +namespace CoreEx.Gen.Utility; + +/// +/// The core code generator that manages the Handlebars compilation (cached for performance) and enables the corresponding (one or more invocations). +/// +public class HandlebarsCodeGenerator +{ + private readonly HandlebarsTemplate _template; + + /// + /// Static constructor. + /// + static HandlebarsCodeGenerator() + { + HandlebarsHelpers.RegisterHelpers(); + Handlebars.Configuration.TextEncoder = null; + } + + /// + /// Creates a new instance of the from the specified . + /// + /// The fully qualified embedded resource name for the code template. + /// The . + public static HandlebarsCodeGenerator Create(string resourceName) + { + using var s = typeof(HandlebarsCodeGenerator).Assembly.GetManifestResourceStream(resourceName); + using var sr = new StreamReader(s); + return new HandlebarsCodeGenerator(sr); + } + + /// + /// Creates a new instance of the from the specified . + /// + /// The template . + /// + public static HandlebarsCodeGenerator Create(Stream stream) + { + using var sr = new StreamReader(stream); + return new HandlebarsCodeGenerator(sr); + } + + /// + /// Initializes a new instance of the from the . + /// + /// The template . + public HandlebarsCodeGenerator(StreamReader sr) + { + if (sr is null) + throw new ArgumentNullException(nameof(sr)); + + _template = Handlebars.Compile(sr.ReadToEnd()); + } + + /// + /// Generate content from the template using the and optional secondary . + /// + /// The primary context value referenced within the template. + /// The optional secondary data. + /// The resulting generated output. + public string Generate(CodeGenContext context, object? data = null) => _template(context, data); +} \ No newline at end of file diff --git a/gen/CoreEx.Gen/Utility/HandlebarsHelpers.cs b/gen/CoreEx.Gen/Utility/HandlebarsHelpers.cs new file mode 100644 index 00000000..258df491 --- /dev/null +++ b/gen/CoreEx.Gen/Utility/HandlebarsHelpers.cs @@ -0,0 +1,119 @@ +using HandlebarsDotNet; + +namespace CoreEx.Gen.Utility; + +/// +/// Provides the Handlebars.Net capability. +/// +public static class HandlebarsHelpers +{ + private static readonly object _lock = new(); + private static bool _areRegistered = false; + + /// + /// Registers all of the required Handlebars helpers. + /// + public static void RegisterHelpers() + { + if (_areRegistered) + return; + + lock (_lock) + { + if (_areRegistered) + return; + + _areRegistered = true; + + // Increments indent only! + Handlebars.RegisterHelper("indent++", (in w, in options, in context, in args) => + { + var hc = (CodeGenContext)options.Data["Root"]; + hc.IncrementIndent(); + }); + + // Decrements indent only! + Handlebars.RegisterHelper("indent--", (in w, in options, in context, in args) => + { + var hc = (CodeGenContext)options.Data["Root"]; + hc.DecrementIndent(); + }); + + // Writes the current indent string. + Handlebars.RegisterHelper("indent", (in w, in options, in context, in args) => + { + var hc = (CodeGenContext)options.Data["Root"]; + w.WriteSafeString(hc.GetIndentString()); + }); + + Handlebars.RegisterHelper("bo", (w, _, __) => w.WriteSafeString("{")); + Handlebars.RegisterHelper("bc", (w, _, __) => w.WriteSafeString("}")); + + // Will check that the first argument equals at least one of the subsequent arguments. + Handlebars.RegisterHelper("ifeq", (writer, options, context, args) => + { + if (IfEq(args)) + options.Template(writer, context); + else + options.Inverse(writer, context); + }); + + // Will check that the first argument does not equal any of the subsequent arguments. + Handlebars.RegisterHelper("ifne", (writer, options, context, args) => + { + if (IfEq(args)) + options.Inverse(writer, context); + else + options.Template(writer, context); + }); + } + } + + /// + /// Perform the actual IfEq equality check. + /// + private static bool IfEq(Arguments args) + { + bool func() + { + for (int i = 1; i < args.Length; i++) + { + if (Compare(args[0], args[i])) + return true; + } + + return false; + } + + return args.Length switch + { + 0 => true, + 1 => args[0] is null, + 2 => Compare(args[0], args[1]), + _ => func() + }; + } + + /// + /// Compare the two values for equality. + /// + private static bool Compare(object? lval, object? rval) + { + if (lval is null && rval is null) + return true; + + if (lval is null || rval is null) + return false; + + if (lval is string ls && rval is string rs) + return ls == rs; + + if (lval is bool lb && rval is bool rb) + return lb == rb; + + if (lval is int li && rval is int ri) + return li == ri; + + return lval.ToString() == rval.ToString(); + } +} \ No newline at end of file diff --git a/gen/CoreEx.Gen/Utility/Pluralizer.cs b/gen/CoreEx.Gen/Utility/Pluralizer.cs new file mode 100644 index 00000000..a7881b9c --- /dev/null +++ b/gen/CoreEx.Gen/Utility/Pluralizer.cs @@ -0,0 +1,12 @@ +namespace CoreEx.Gen.Utility; + +/// +/// Enables access to the pluralization services. +/// +internal static class Pluralizer +{ + /// + /// Gets the singleton instance. + /// + public static Pluralize.NET.IPluralize Instance { get; } = new Pluralize.NET.Pluralizer(); +} \ No newline at end of file diff --git a/samples/Directory.Build.props b/samples/Directory.Build.props index 233eeabe..45fb7544 100644 --- a/samples/Directory.Build.props +++ b/samples/Directory.Build.props @@ -1,6 +1,6 @@ - net8.0;net9.0;net10.0 + net10.0 enable enable preview diff --git a/samples/README.md b/samples/README.md new file mode 100644 index 00000000..f8ea36b8 --- /dev/null +++ b/samples/README.md @@ -0,0 +1,394 @@ +# Contoso Samples + +The `samples` folder contains reference implementations of three domain microservices built with CoreEx. + +- Products. +- Shopping. +- Orders. + +Additional sample areas are currently work in progress: + +- Order.Workflow. + +Each domain includes three runnable hosts: + +- API host (`*.Api`). +- Outbox Relay host (`*.Outbox.Relay`). +- Event Subscriber host (`*.Subscribe`). + +The sample also includes supporting projects for contracts, application, infrastructure, domain/data, and test coverage. + +## Architecture + +The two domains are hosted as independent microservices that communicate via synchronous HTTP and asynchronous messaging over Azure Service Bus. + +```mermaid +graph TB + subgraph INFRA["Shared Infrastructure"] + direction LR + SQLSERVER[("SQL Server\n:1433")] + REDIS[("Redis\n:6379")] + ASB[["Azure Service Bus\nEmulator\n:5672"]] + ASPIRE["Aspire Dashboard\n:18888"] + end + + subgraph PRODUCTS["Contoso.Products Domain"] + direction TB + PAPI["Products API\n─────────────────\nGET/POST/PUT/PATCH/DELETE /products\nPOST /inventory/reserve\nPOST /inventory/adjust\nGET /products/{id}/on-hand\nGET /refdata"] + PAPP["Products Application\n─────────────────\nProductService\nMovementService\nInventoryService"] + PINFRA["Products Infrastructure\n─────────────────\nProductRepository\nMovementRepository\nInventoryRepository\nProductsOutboxPublisher"] + PSUBSCRIBE["Products.Subscribe\n─────────────────\nReservationConfirmSubscriber\nReservationCancelSubscriber"] + POUTBOX["Products.Outbox.Relay\n─────────────────\nOutbox to Service Bus\nPartitioned relay"] + PDB[("SQL Server\n[Products] schema\n─────────────\nProduct\nInventory\nMovement\nOutbox / OutboxLease\nRef data")] + end + + subgraph SHOPPING["Contoso.Shopping Domain"] + direction TB + SAPI["Shopping API\n─────────────────\nPOST /customers/{id}/baskets\nPOST /{id}/checkout\nPUT /{id}/apply-discount\nPOST/PUT/DELETE /{id}/items\nGET /baskets"] + SAPP["Shopping Application\n─────────────────\nBasketService\nBasketReadService"] + SDOMAIN["Shopping Domain\n─────────────────\nBasket (Aggregate Root)\nBasketItem (Entity)\nItemPricing (Value Object)"] + SINFRA["Shopping Infrastructure\n─────────────────\nBasketRepository\nShoppingOutboxPublisher\nProductAdapter (ACL)\nProductsHttpClient\nProductSyncAdapter"] + SSUBSCRIBE["Shopping.Subscribe\n─────────────────\nProductModifySubscriber\nProductDeleteSubscriber"] + SOUTBOX["Shopping.Outbox.Relay\n─────────────────\nOutbox to Service Bus\nPartitioned relay"] + SDB[("SQL Server\n[Shopping] schema\n─────────────\nBasket\nBasketItem\nProduct (replica)\nOutbox / OutboxLease\nRef data")] + end + + PAPI --> PAPP + PAPP --> PINFRA + PINFRA --> PDB + POUTBOX -->|"Poll Outbox table"| PDB + + SAPI --> SAPP + SAPP --> SDOMAIN + SAPP --> SINFRA + SINFRA --> SDB + SOUTBOX -->|"Poll Outbox table"| SDB + SINFRA -->|"L1/L2 Hybrid Cache"| REDIS + + SINFRA -->|"① HTTP POST /api/inventory/reserve\nReserve inventory at checkout"| PAPI + + POUTBOX -->|"② Publish product.created/updated/deleted"| ASB + ASB -->|"③ Consume product events (replication)"| SSUBSCRIBE + SSUBSCRIBE -->|"④ Sync product replica"| SDB + + SOUTBOX -->|"⑤ Publish reservation.confirm (on checkout success)"| ASB + SINFRA -->|"⑥ Publish reservation.cancel (on checkout failure, direct)"| ASB + ASB -->|"⑦ Consume reservation commands"| PSUBSCRIBE + PSUBSCRIBE --> PAPP + + PDB --- SQLSERVER + SDB --- SQLSERVER + + PAPI -.->|"OpenTelemetry"| ASPIRE + SAPI -.->|"OpenTelemetry"| ASPIRE +``` + +### Inter-Domain Communication + +**① Synchronous HTTP — Shopping → Products** + +During basket checkout, Shopping calls `POST /api/inventory/reserve` on the Products API directly via `ProductAdapter` (anti-corruption layer) to validate and reserve stock in real time. + +**② – ④ Async event replication — Products → Shopping** + +Products publishes `product.created`, `product.updated`, and `product.deleted` events through its Outbox → Relay → Service Bus. Shopping.Subscribe consumes these and keeps a local `[Shopping].[Product]` replica in sync for offline queries. + +**⑤ – ⑦ Async reservation commands — Shopping → Products** + +- On checkout **success**: Shopping enqueues a `reservation.confirm` command via its Outbox → Relay → Service Bus → `Products.Subscribe`, which confirms the pending inventory movement. +- On checkout **failure**: Shopping publishes `reservation.cancel` directly to Service Bus (bypassing the Outbox, since the database transaction has been rolled back) so the pending reservation is released. + +### Key Patterns + +| Pattern | Where Used | +|---|---| +| Transactional Outbox | Both domains — atomic event publishing with DB writes | +| Anti-Corruption Layer | `ProductAdapter` / `ProductsHttpClient` in Shopping | +| DDD Aggregate | `Basket` aggregate root with `BasketItem` and `ItemPricing` | +| Hybrid Cache (L1 + L2) | Shopping API — FusionCache with Redis backplane | +| Outbox Relay (partitioned) | Both domains — dedicated relay host per domain | +| Railway-Oriented Programming | `Result` flow control throughout | +| ETag / Optimistic Concurrency | `Basket` implements `IETag` | + +## What This Demonstrates + +These samples are intended to show practical CoreEx usage across: + +- API composition and HTTP behaviors. +- Data access and persistence workflows. +- Outbox and event publishing/subscribing patterns. +- End-to-end host orchestration with Aspire. +- Unit and integration testing for service behaviors. + +## Project Layout + +- `samples/src/Contoso.Products.*` for the Products domain services and supporting projects. +- `samples/src/Contoso.Shopping.*` for the Shopping domain services and supporting projects. +- `samples/aspire/Contoso.Aspire` to orchestrate both domains and view logs, traces, and metrics. +- `samples/tests` for unit, integration, host-level tests, and the E2E runner. + +## Prerequisites + +- .NET SDK (matching the repo requirements). +- A container runtime (Docker or Podman). +- Aspire CLI — install once: + + ```bash + # Linux/macOS + curl -sSL https://aspire.dev/install.sh | bash + + # Windows (PowerShell) + iwr -useb https://aspire.dev/install.ps1 | iex + ``` + + Verify with `aspire --version`. See [aspire.dev/get-started/install-cli](https://aspire.dev/get-started/install-cli/) for details. + +## Start Infrastructure + +Start sample dependencies using the root compose file: + +```bash +podman compose -f docker-compose.yml up -d +``` + +Stop dependencies when finished: + +```bash +podman compose -f docker-compose.yml down +``` + +## Initialize Sample Databases + +From the repository root: + +```bash +dotnet run --project samples/src/Contoso.Products.Database -- Migrate +dotnet run --project samples/src/Contoso.Products.Database -- Data + +dotnet run --project samples/src/Contoso.Shopping.Database -- Migrate +dotnet run --project samples/src/Contoso.Shopping.Database -- Data + +dotnet run --project samples/src/Contoso.Orders.Database -- Migrate +dotnet run --project samples/src/Contoso.Orders.Database -- Data +``` + +The E2E runner's `Database-Migration` scenario now applies schema and base data for Products, Shopping, and Orders. + +## Run With Aspire + +From the repository root: + +```bash +aspire run +``` + +Or using `dotnet run`: + +```bash +dotnet run --project samples/aspire/Contoso.Aspire +``` + +This is the easiest way to run both domains and inspect runtime behavior through centralized logs, traces, and metrics. + +## Run Tests + +The `samples/tests` folder contains unit and integration-style tests that exercise API, relay, and subscriber functionality. + +```bash +dotnet test samples/tests/Contoso.Products.Test.Unit +dotnet test samples/tests/Contoso.Products.Test.Api +dotnet test samples/tests/Contoso.Products.Test.Outbox.Relay +dotnet test samples/tests/Contoso.Products.Test.Subscribe +dotnet test samples/tests/Contoso.Shopping.Test.Api +``` + +### Unit Tests + +`Contoso.Products.Test.Unit` contains isolated component tests (e.g. validators). These use `WithGenericTester` from `CoreEx.UnitTesting` and do not require a running database, cache, or message broker. + +### Integration Tests (API) + +`Contoso.Products.Test.Api` and `Contoso.Shopping.Test.Api` are full integration tests that spin up the real API under test via `WithApiTester`. They require infrastructure (SQL Server, Redis, Service Bus emulator) to be running. + +#### Data Seeding and One-Time Setup + +Each integration test project has a `[OneTimeSetUp]` method that runs once before the test suite starts: + +1. **Migrate and seed the database** — `Test.MigrateSqlServerDataAsync(DbMigration.ConfigureMigrationArgs)` applies pending schema migrations and resets all rows in the domain's schema to the contents of `Data/data.yaml` defined in the corresponding `*.Test.Common` project. The migration args include a `DataResetFilterPredicate` scoped to only the domain's SQL schema so test runs for one domain cannot affect the other. + +2. **Clear the hybrid cache** — `Test.ClearFusionCacheAsync()` flushes both the in-process L1 cache and the Redis L2 cache to ensure tests start from a known state. + +3. **Set up event capture** — `Test.UseExpectedSqlServerOutboxPublisher()` and (where relevant) `Test.UseExpectedAzureServiceBusPublisher()` wrap the event publishers with decorators so tests can assert which events were published to the outbox or Service Bus. + +4. **Mock downstream HTTP clients** — Shopping tests replace the `IHttpClientFactory` with a `MockHttpClientFactory` that intercepts calls to the Products API (e.g. `POST api/inventory/reserve`) so the Shopping API can be tested in isolation without a running Products API. + +```csharp +[OneTimeSetUp] +public async Task OneTimeSetUpAsync() +{ + await Test.MigrateSqlServerDataAsync(DbMigration.ConfigureMigrationArgs).ConfigureAwait(false); + await Test.ClearFusionCacheAsync().ConfigureAwait(false); + + Test.UseExpectedSqlServerOutboxPublisher(); + Test.UseExpectedAzureServiceBusPublisher(); + + var mcf = MockHttpClientFactory.Create(); + _mockHttpReserveRequest = mcf.CreateClient("ProductsApi").Request(HttpMethod.Post, "api/inventory/reserve"); + Test.ReplaceHttpClientFactory(mcf); +} +``` + +#### Test Data (data.yaml) + +Test data is defined in YAML in each domain's `*.Test.Common` project under `Data/data.yaml`. The `TestData` class in that project is used as an assembly marker so the framework can locate the file. Data includes products, inventory levels, movements, and (for Shopping) pre-existing baskets. + +IDs are expressed as small integers in YAML and converted to GUIDs at load time using `n.ToGuid()` helpers (e.g. `1.ToGuid()`), keeping data files human-readable while still using GUID primary keys at the database level. + +#### Resource-Based Response Expectations + +The `Resources/` folder in each test project contains JSON files used for two purposes: + +- **Expected responses** — compared against the actual API response body (specified paths such as `etag` or `changelog` are excluded from comparison to avoid timestamp sensitivity). +- **Mock request/response bodies** — used to set up what the `MockHttpClientRequest` expects to receive and what it should return. + +#### Fluent Test Pattern + +Tests use a fluent expectation chain from `CoreEx.UnitTesting`: + +```csharp +// Assert a successful POST that publishes an outbox event. +var created = Test.Http() + .ExpectIdentifier() + .ExpectETag() + .ExpectChangeLogCreated() + .ExpectJsonFromResource("ProductMutateTests.Create_Success.res.json") + .ExpectSqlServerOutboxEvents(e => e.AssertWithValue("contoso", "contoso.products.product.created.v1")) + .Run(HttpMethod.Post, "/api/products", product) + .AssertCreated() + .Value!; + +// Assert a validation failure. +Test.Http() + .Run(HttpMethod.Post, "/api/products", invalidProduct) + .AssertBadRequest() + .AssertErrors("Text is required.", "Price must be greater than or equal to zero."); + +// Assert a checkout that calls the Products API mock and publishes an outbox event. +_mockHttpReserveRequest + .WithJsonResourceBody("Basket_Checkout_Success.products.req.json") + .Respond.With(HttpStatusCode.OK); + +Test.Http() + .ExpectSqlServerOutboxEvents(e => e.AssertWithValue("contoso", "contoso.shopping.basket.checkedout.v1")) + .Run(HttpMethod.Post, $"/api/baskets/{basketId}/checkout") + .AssertOK(); + +_mockHttpReserveRequest.Verify(); // Confirms the mock was actually called. +``` + +## E2E Runner + +`Contoso.E2E.Runner` is an interactive console application for running end-to-end scenarios against both running APIs. It supports one-shot scenario execution, full basket lifecycle tests, and a parallel load simulation mode with live statistics. + +### Prerequisites + +Both domain APIs (and their relay/subscriber hosts) must be running before using the E2E runner. The easiest way to achieve this is via Aspire (see [Run With Aspire](#run-with-aspire) above). + +### Start the Runner + +From the repository root: + +```bash +dotnet run --project samples/tests/Contoso.E2E.Runner +``` + +On startup the runner checks `/health/ready` on both APIs. Press `ESC` to skip the health check if needed. + +### Configuration + +Default endpoints are defined in [samples/tests/Contoso.E2E.Runner/appsettings.json](tests/Contoso.E2E.Runner/appsettings.json). Override any value with a matching environment variable using `__` as a separator: + +```bash +E2E__Products__BaseAddress=https://localhost:7200 +E2E__Shopping__BaseAddress=https://localhost:7219 +``` + +### Interactive Menu + +After the health check passes the runner displays a menu with three groups. + +**Set-Up** (run once before testing): + +| Choice | What it does | +|---|---| +| Database Migration and Base Data Refresh | Runs DbEx migrations for both databases and resets base reference data. | +| Data Seeding for E2E Testing | Calls `POST /api/inventory/adjust` to set on-hand quantity to 1000 units for every active, stocked product. Run this before Shopping basket tests to ensure stock is available. | + +**Scenarios** (repeatable, APIs must be healthy): + +| Choice | What it does | +|---|---| +| Product Query Lifecycle | Queries products with randomised category, brand, and text filters. | +| Product Update Lifecycle | Selects a random product, toggles a description suffix, and PUTs the update. | +| Product Quantity Lifecycle | Queries the on-hand inventory for a random product. | +| Shopping Basket Lifecycle | Creates a basket, adds 1–4 random items, optionally applies the `SAVE10` coupon, checks out, and verifies the resulting basket state. Exercises the full cross-domain flow including inventory reservation and reservation confirmation. | + +**Other:** + +| Choice | What it does | +|---|---| +| Run all scenarios as simulation | Runs all four scenarios in parallel workers (configurable parallelism and delay per scenario). Displays a live dashboard with per-scenario iteration counts, success rates, and throughput. Press `ESC` to stop gracefully. Errors are written to `logs/load-simulation-errors.log`. | +| Retry APIs | Re-runs the health check without restarting the runner. | +| Exit | Exits the runner. | + +### Recommended First-Run Order + +1. Start infrastructure: `podman compose -f docker-compose.yml up -d`. +2. Migrate and seed databases (see [Initialize Sample Databases](#initialize-sample-databases)). +3. Start all hosts via Aspire: `aspire run`. +4. Start the runner: `dotnet run --project samples/tests/Contoso.E2E.Runner`. +5. Select **Database Migration and Base Data Refresh** to apply any pending migrations. +6. Select **Data Seeding for E2E Testing** to stock inventory. +7. Run individual scenarios or select **Run all scenarios as simulation** for load testing. + +## Troubleshooting + +### Outbox Relay Does Not Pick Up Messages + +Symptom: + +- API operations succeed but published events are not processed by the relay/subscriber as expected. + +Likely cause: + +- Local machine UTC time is skewed relative to message timestamps (for example, clock drift where local UTC is ahead of expected outbox processing windows). + +What to try: + +1. Verify and correct system date/time and time zone settings. +2. Restart the outbox relay host (or restart all sample hosts from Aspire). +3. If behavior persists, restart the machine to force a clean time sync and host restart state (this resolved the issue in local testing). + +### Dependencies Not Healthy + +Symptom: + +- Hosts fail on startup or repeatedly log dependency connectivity errors. + +What to try: + +1. Ensure infrastructure containers are running via `podman compose -f docker-compose.yml up -d`. +2. Check container health/status with `docker ps` or `podman ps`. +3. Restart with `podman compose -f docker-compose.yml down && podman compose -f docker-compose.yml up -d`. + +### Database Errors On Startup + +Symptom: + +- API or relay hosts fail with database/schema-related errors. + +What to try: + +1. Re-run migrations for both databases. +2. Re-run sample data seeding for both domains. +3. Confirm the SQL dependency container is healthy before restarting hosts. diff --git a/samples/aspire.config.json b/samples/aspire.config.json new file mode 100644 index 00000000..e97bd8dd --- /dev/null +++ b/samples/aspire.config.json @@ -0,0 +1,5 @@ +{ + "appHost": { + "path": "aspire/Contoso.Aspire/Contoso.Aspire.csproj" + } +} \ No newline at end of file diff --git a/samples/aspire/Contoso.Aspire/AppHost.cs b/samples/aspire/Contoso.Aspire/AppHost.cs index e00ea1b3..5caf019e 100644 --- a/samples/aspire/Contoso.Aspire/AppHost.cs +++ b/samples/aspire/Contoso.Aspire/AppHost.cs @@ -8,6 +8,12 @@ builder.AddProject("shopping-outbox-relay").AddEndpoints("/health/ready/detailed").AddHostedServiceSupport(); builder.AddProject("shopping-subscribe").AddEndpoints("/health/ready/detailed").AddHostedServiceSupport(); +var orderWorkflowWorker = builder.AddProject("order-workflow-worker").AddEndpoints("/health"); + +builder.AddProject("orders-api") + .WaitFor(orderWorkflowWorker) + .AddEndpoints("/health/ready/detailed"); + builder.Build().Run(); diff --git a/samples/aspire/Contoso.Aspire/Contoso.Aspire.csproj b/samples/aspire/Contoso.Aspire/Contoso.Aspire.csproj index 9313825f..a57ba2a4 100644 --- a/samples/aspire/Contoso.Aspire/Contoso.Aspire.csproj +++ b/samples/aspire/Contoso.Aspire/Contoso.Aspire.csproj @@ -1,8 +1,8 @@ - + Exe - net8.0;net9.0;net10.0 + net10.0 enable enable false @@ -11,6 +11,8 @@ + + diff --git a/samples/aspire/Contoso.Aspire/Properties/launchSettings.json b/samples/aspire/Contoso.Aspire/Properties/launchSettings.json index 9bc93679..b2be8848 100644 --- a/samples/aspire/Contoso.Aspire/Properties/launchSettings.json +++ b/samples/aspire/Contoso.Aspire/Properties/launchSettings.json @@ -22,6 +22,7 @@ "environmentVariables": { "ASPNETCORE_ENVIRONMENT": "Development", "DOTNET_ENVIRONMENT": "Development", + "ASPIRE_ALLOW_UNSECURED_TRANSPORT": "true", "ASPIRE_DASHBOARD_OTLP_ENDPOINT_URL": "http://localhost:19016", "ASPIRE_DASHBOARD_MCP_ENDPOINT_URL": "http://localhost:18153", "ASPIRE_RESOURCE_SERVICE_ENDPOINT_URL": "http://localhost:20286" diff --git a/samples/src/Contoso.Order.Workflow.Client/Contoso.Order.Workflow.Client.csproj b/samples/src/Contoso.Order.Workflow.Client/Contoso.Order.Workflow.Client.csproj new file mode 100644 index 00000000..a3a173ec --- /dev/null +++ b/samples/src/Contoso.Order.Workflow.Client/Contoso.Order.Workflow.Client.csproj @@ -0,0 +1,15 @@ + + + + Contoso.Order.Workflow.Client + + + + + + + + + + + \ No newline at end of file diff --git a/samples/src/Contoso.Order.Workflow.Client/DurableTaskConnectionStringFactory.cs b/samples/src/Contoso.Order.Workflow.Client/DurableTaskConnectionStringFactory.cs new file mode 100644 index 00000000..e8a49e90 --- /dev/null +++ b/samples/src/Contoso.Order.Workflow.Client/DurableTaskConnectionStringFactory.cs @@ -0,0 +1,16 @@ +namespace Contoso.Order.Workflow.Client; + +internal static class DurableTaskConnectionStringFactory +{ + public static string Create(DurableTaskSchedulerOptions options) + { + var endpoint = options.Endpoint; + var hostAddress = endpoint.Contains(';', StringComparison.Ordinal) ? endpoint.Split(';', StringSplitOptions.TrimEntries)[0] : endpoint; + var taskHubName = string.IsNullOrWhiteSpace(options.TaskHub) ? "order" : options.TaskHub; + var isLocalEmulator = hostAddress.StartsWith("http://localhost:8080", StringComparison.OrdinalIgnoreCase) + || hostAddress.StartsWith("http://localhost:8081", StringComparison.OrdinalIgnoreCase); + + var authentication = isLocalEmulator ? "None" : "DefaultAzure"; + return $"Endpoint={hostAddress};TaskHub={taskHubName};Authentication={authentication}"; + } +} \ No newline at end of file diff --git a/samples/src/Contoso.Order.Workflow.Client/DurableTaskSchedulerOptions.cs b/samples/src/Contoso.Order.Workflow.Client/DurableTaskSchedulerOptions.cs new file mode 100644 index 00000000..505a7833 --- /dev/null +++ b/samples/src/Contoso.Order.Workflow.Client/DurableTaskSchedulerOptions.cs @@ -0,0 +1,10 @@ +namespace Contoso.Order.Workflow.Client; + +public sealed class DurableTaskSchedulerOptions +{ + public const string SectionName = "DurableTaskScheduler"; + + public string Endpoint { get; set; } = "http://localhost:8080"; + + public string TaskHub { get; set; } = "order"; +} \ No newline at end of file diff --git a/samples/src/Contoso.Order.Workflow.Client/OrderWorkflowClient.cs b/samples/src/Contoso.Order.Workflow.Client/OrderWorkflowClient.cs new file mode 100644 index 00000000..8e8381f0 --- /dev/null +++ b/samples/src/Contoso.Order.Workflow.Client/OrderWorkflowClient.cs @@ -0,0 +1,38 @@ +using Contoso.Order.Workflow.Workflow; +using Contoso.Order.Workflow.Workflow.Contracts; +using Microsoft.DurableTask; +using Microsoft.DurableTask.Client; +using Microsoft.Extensions.Logging; + +namespace Contoso.Order.Workflow.Client; + +public sealed class OrderWorkflowClient +{ + private readonly DurableTaskClient _durableTaskClient; + private readonly ILogger _logger; + + public OrderWorkflowClient(DurableTaskClient durableTaskClient, ILogger logger) + { + _durableTaskClient = durableTaskClient; + _logger = logger; + } + + public async Task StartAsync(OrderWorkflowRequest request, string? instanceId = null, CancellationToken cancellationToken = default) + { + var startOptions = string.IsNullOrWhiteSpace(instanceId) + ? null + : new StartOrchestrationOptions(instanceId); + + var orchestrationInstanceId = await _durableTaskClient.ScheduleNewOrchestrationInstanceAsync( + nameof(OrderWorkflowOrchestration), + request, + startOptions, + cancellationToken).ConfigureAwait(false); + + _logger.LogInformation("Scheduled {OrchestrationName} with instance id {InstanceId}.", nameof(OrderWorkflowOrchestration), orchestrationInstanceId); + return orchestrationInstanceId; + } + + public Task GetMetadataAsync(string instanceId, bool getInputsAndOutputs = false, CancellationToken cancellationToken = default) + => _durableTaskClient.GetInstanceAsync(instanceId, getInputsAndOutputs, cancellationToken); +} \ No newline at end of file diff --git a/samples/src/Contoso.Order.Workflow.Client/ServiceCollectionExtensions.cs b/samples/src/Contoso.Order.Workflow.Client/ServiceCollectionExtensions.cs new file mode 100644 index 00000000..9696e9bb --- /dev/null +++ b/samples/src/Contoso.Order.Workflow.Client/ServiceCollectionExtensions.cs @@ -0,0 +1,30 @@ +using Microsoft.DurableTask.Client; +using Microsoft.DurableTask.Client.AzureManaged; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; + +namespace Contoso.Order.Workflow.Client; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddContosoOrderWorkflowClient(this IServiceCollection services, IConfiguration configuration) + { + var connectionString = configuration.GetConnectionString("DurableTaskScheduler"); + + var options = new DurableTaskSchedulerOptions + { + Endpoint = configuration[$"{DurableTaskSchedulerOptions.SectionName}:Endpoint"] ?? "http://localhost:8080", + TaskHub = configuration[$"{DurableTaskSchedulerOptions.SectionName}:TaskHub"] ?? "order" + }; + + services.AddDurableTaskClient(durableTaskBuilder => + { + durableTaskBuilder.UseDurableTaskScheduler(string.IsNullOrWhiteSpace(connectionString) + ? DurableTaskConnectionStringFactory.Create(options) + : connectionString); + }); + + services.AddScoped(); + return services; + } +} \ No newline at end of file diff --git a/samples/src/Contoso.Order.Workflow.Worker/Contoso.Order.Workflow.Worker.csproj b/samples/src/Contoso.Order.Workflow.Worker/Contoso.Order.Workflow.Worker.csproj new file mode 100644 index 00000000..60edd4df --- /dev/null +++ b/samples/src/Contoso.Order.Workflow.Worker/Contoso.Order.Workflow.Worker.csproj @@ -0,0 +1,17 @@ + + + + Contoso.Order.Workflow.Worker + + + + + + + + + + + + + \ No newline at end of file diff --git a/samples/src/Contoso.Order.Workflow.Worker/Program.cs b/samples/src/Contoso.Order.Workflow.Worker/Program.cs new file mode 100644 index 00000000..b60818d2 --- /dev/null +++ b/samples/src/Contoso.Order.Workflow.Worker/Program.cs @@ -0,0 +1,65 @@ +using Contoso.Order.Workflow.Workflow; +using Contoso.Order.Workflow.Workflow.Activities; +using Microsoft.DurableTask.Worker; +using Microsoft.DurableTask.Worker.AzureManaged; +using OpenTelemetry.Trace; + +namespace Contoso.Order.Workflow.Worker; + +public class Program +{ + private static void Main(string[] args) + { + var builder = WebApplication.CreateBuilder(args); + + builder.Logging.AddConsole(); + builder.Logging.SetMinimumLevel(LogLevel.Information); + + var endpoint = Environment.GetEnvironmentVariable("dts-endpoint") + ?? builder.Configuration["DurableTaskScheduler:Endpoint"] + ?? "http://localhost:8080"; + + var taskHubName = Environment.GetEnvironmentVariable("TASKHUB") + ?? builder.Configuration["DurableTaskScheduler:TaskHub"] + ?? "order"; + + var hostAddress = endpoint.Contains(';', StringComparison.Ordinal) ? endpoint.Split(';', StringSplitOptions.TrimEntries)[0] : endpoint; + var isLocalEmulator = hostAddress.StartsWith("http://localhost:8080", StringComparison.OrdinalIgnoreCase) + || hostAddress.StartsWith("http://localhost:8081", StringComparison.OrdinalIgnoreCase); + + var connectionString = isLocalEmulator + ? $"Endpoint={hostAddress};TaskHub={taskHubName};Authentication=None" + : $"Endpoint={hostAddress};TaskHub={taskHubName};Authentication=DefaultAzure"; + + builder.Services.AddDurableTaskWorker() + .AddTasks(registry => + { + registry.AddOrchestrator(); + registry.AddActivity(); + registry.AddActivity(); + }) + .UseDurableTaskScheduler(connectionString); + + builder.Services.AddOpenTelemetry() + .WithTracing(tracing => + { + // The DurableTask SDK registers its own ActivitySource internally for orchestration and activity tracing. + tracing.AddOtlpExporter(); + }); + + builder.Services.AddHealthChecks(); + + var app = builder.Build(); + + app.MapHealthChecks("/health"); + + var logger = app.Services.GetRequiredService>(); + logger.LogInformation( + "Order workflow worker started with endpoint: {Endpoint}, task hub: {TaskHub}, local emulator: {IsLocalEmulator}.", + hostAddress, + taskHubName, + isLocalEmulator); + + app.Run(); + } +} \ No newline at end of file diff --git a/samples/src/Contoso.Order.Workflow.Worker/Properties/launchSettings.json b/samples/src/Contoso.Order.Workflow.Worker/Properties/launchSettings.json new file mode 100644 index 00000000..2cf083f9 --- /dev/null +++ b/samples/src/Contoso.Order.Workflow.Worker/Properties/launchSettings.json @@ -0,0 +1,12 @@ +{ + "profiles": { + "Contoso.Order.Workflow.Worker": { + "commandName": "Project", + "launchBrowser": true, + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + }, + "applicationUrl": "https://localhost:49680;http://localhost:49681" + } + } +} \ No newline at end of file diff --git a/samples/src/Contoso.Order.Workflow.Worker/appsettings.Development.json b/samples/src/Contoso.Order.Workflow.Worker/appsettings.Development.json new file mode 100644 index 00000000..c86fcf0f --- /dev/null +++ b/samples/src/Contoso.Order.Workflow.Worker/appsettings.Development.json @@ -0,0 +1,6 @@ +{ + "DurableTaskScheduler": { + "Endpoint": "http://localhost:8080", + "TaskHub": "order" + } +} \ No newline at end of file diff --git a/samples/src/Contoso.Order.Workflow.Worker/appsettings.json b/samples/src/Contoso.Order.Workflow.Worker/appsettings.json new file mode 100644 index 00000000..7ed4ca62 --- /dev/null +++ b/samples/src/Contoso.Order.Workflow.Worker/appsettings.json @@ -0,0 +1,12 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "DurableTaskScheduler": { + "Endpoint": "http://localhost:8080", + "TaskHub": "order" + } +} \ No newline at end of file diff --git a/samples/src/Contoso.Order.Workflow.Workflow/Activities/SubmitOrderActivity.cs b/samples/src/Contoso.Order.Workflow.Workflow/Activities/SubmitOrderActivity.cs new file mode 100644 index 00000000..019cca51 --- /dev/null +++ b/samples/src/Contoso.Order.Workflow.Workflow/Activities/SubmitOrderActivity.cs @@ -0,0 +1,15 @@ +using Contoso.Order.Workflow.Workflow.Contracts; +using Microsoft.DurableTask; + +namespace Contoso.Order.Workflow.Workflow.Activities; + +[DurableTask] +public sealed class SubmitOrderActivity : TaskActivity +{ + public override Task RunAsync(TaskActivityContext context, SubmitOrderActivityInput input) + { + var message = $"Order '{input.OrderId}' accepted for {input.Amount:0.00} {input.Currency}."; + var result = new OrderWorkflowResult(input.OrderId, true, message, DateTimeOffset.UtcNow); + return Task.FromResult(result); + } +} \ No newline at end of file diff --git a/samples/src/Contoso.Order.Workflow.Workflow/Activities/ValidateOrderActivity.cs b/samples/src/Contoso.Order.Workflow.Workflow/Activities/ValidateOrderActivity.cs new file mode 100644 index 00000000..796b998c --- /dev/null +++ b/samples/src/Contoso.Order.Workflow.Workflow/Activities/ValidateOrderActivity.cs @@ -0,0 +1,17 @@ +using Contoso.Order.Workflow.Workflow.Contracts; +using Microsoft.DurableTask; + +namespace Contoso.Order.Workflow.Workflow.Activities; + +[DurableTask] +public sealed class ValidateOrderActivity : TaskActivity +{ + public override Task RunAsync(TaskActivityContext context, ValidateOrderActivityInput input) + { + var hasOrderId = !string.IsNullOrWhiteSpace(input.OrderId); + var hasCurrency = !string.IsNullOrWhiteSpace(input.Currency); + var hasPositiveAmount = input.Amount > 0; + + return Task.FromResult(hasOrderId && hasCurrency && hasPositiveAmount); + } +} \ No newline at end of file diff --git a/samples/src/Contoso.Order.Workflow.Workflow/Contoso.Order.Workflow.Workflow.csproj b/samples/src/Contoso.Order.Workflow.Workflow/Contoso.Order.Workflow.Workflow.csproj new file mode 100644 index 00000000..7785e2fe --- /dev/null +++ b/samples/src/Contoso.Order.Workflow.Workflow/Contoso.Order.Workflow.Workflow.csproj @@ -0,0 +1,11 @@ + + + + Contoso.Order.Workflow.Workflow + + + + + + + \ No newline at end of file diff --git a/samples/src/Contoso.Order.Workflow.Workflow/Contracts/OrderWorkflowContracts.cs b/samples/src/Contoso.Order.Workflow.Workflow/Contracts/OrderWorkflowContracts.cs new file mode 100644 index 00000000..c137ea35 --- /dev/null +++ b/samples/src/Contoso.Order.Workflow.Workflow/Contracts/OrderWorkflowContracts.cs @@ -0,0 +1,9 @@ +namespace Contoso.Order.Workflow.Workflow.Contracts; + +public record OrderWorkflowRequest(string OrderId, decimal Amount, string Currency, string? RequestedBy = null); + +public record OrderWorkflowResult(string OrderId, bool Accepted, string Message, DateTimeOffset ProcessedAt); + +public record ValidateOrderActivityInput(string OrderId, decimal Amount, string Currency); + +public record SubmitOrderActivityInput(string OrderId, decimal Amount, string Currency, string? RequestedBy); \ No newline at end of file diff --git a/samples/src/Contoso.Order.Workflow.Workflow/OrderWorkflowOrchestration.cs b/samples/src/Contoso.Order.Workflow.Workflow/OrderWorkflowOrchestration.cs new file mode 100644 index 00000000..932f2160 --- /dev/null +++ b/samples/src/Contoso.Order.Workflow.Workflow/OrderWorkflowOrchestration.cs @@ -0,0 +1,27 @@ +using Contoso.Order.Workflow.Workflow.Activities; +using Contoso.Order.Workflow.Workflow.Contracts; +using Microsoft.DurableTask; + +namespace Contoso.Order.Workflow.Workflow; + +[DurableTask] +public sealed class OrderWorkflowOrchestration : TaskOrchestrator +{ + public override async Task RunAsync(TaskOrchestrationContext context, OrderWorkflowRequest input) + { + var validation = await context.CallActivityAsync( + nameof(ValidateOrderActivity), + new ValidateOrderActivityInput(input.OrderId, input.Amount, input.Currency)); + + if (!validation) + { + return new OrderWorkflowResult(input.OrderId, false, "Order request failed validation.", context.CurrentUtcDateTime); + } + + var submission = await context.CallActivityAsync( + nameof(SubmitOrderActivity), + new SubmitOrderActivityInput(input.OrderId, input.Amount, input.Currency, input.RequestedBy)); + + return submission; + } +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Api/Contoso.Orders.Api.csproj b/samples/src/Contoso.Orders.Api/Contoso.Orders.Api.csproj new file mode 100644 index 00000000..62f64966 --- /dev/null +++ b/samples/src/Contoso.Orders.Api/Contoso.Orders.Api.csproj @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + PreserveNewest + + + + \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Api/Controllers/OrderController.cs b/samples/src/Contoso.Orders.Api/Controllers/OrderController.cs new file mode 100644 index 00000000..e0fa0d6a --- /dev/null +++ b/samples/src/Contoso.Orders.Api/Controllers/OrderController.cs @@ -0,0 +1,55 @@ +namespace Contoso.Orders.Api.Controllers; + +using Contoso.Order.Workflow.Client; +using Contoso.Order.Workflow.Workflow.Contracts; +using OrderContract = Contoso.Orders.Contracts.Order; + +[ApiController, Route("/api/orders"), OpenApiTag("Orders")] +public class OrderController(WebApi webApi, IOrderService service, OrderWorkflowClient orderWorkflowClient) : ControllerBase +{ + private readonly WebApi _webApi = webApi.ThrowIfNull(); + private readonly IOrderService _service = service.ThrowIfNull(); + private readonly OrderWorkflowClient _orderWorkflowClient = orderWorkflowClient.ThrowIfNull(); + + [HttpPost] + [Accepts] + [ProducesResponseType(StatusCodes.Status201Created)] + [IdempotencyKey] + public Task PostAsync() => _webApi.PostAsync(Request, (ro, _) => + { + ro.WithLocationUri(o => new Uri($"/api/orders/{o.Id}", UriKind.Relative)); + return _service.CreateAsync(ro.Value); + }); + + [HttpPut("{id}")] + [Accepts] + [ProducesResponseType(typeof(OrderContract), StatusCodes.Status200OK)] + [ProducesNotFoundProblem()] + public Task PutAsync(string id) => _webApi.PutAsync(Request, (ro, _) + => _service.UpdateAsync(ro.Value.Adjust(o => o.Id = id.Required()))); + + [HttpPatch("{id}")] + [Accepts(HttpNames.MergePatchJsonMediaTypeName)] + [ProducesResponseType(typeof(OrderContract), StatusCodes.Status200OK)] + [ProducesNotFoundProblem()] + public Task PatchAsync(string id) => _webApi.PatchAsync(Request, + get: (ro, _) => _service.GetAsync(id.Required()), + put: (ro, _) => _service.UpdateAsync(ro.Value.Adjust(o => o.Id = id))); + + [HttpDelete("{id}")] + [ProducesResponseType(StatusCodes.Status204NoContent)] + public Task DeleteAsync(string id) => _webApi.DeleteAsync(Request, (_, _) + => _service.DeleteAsync(id.Required())); + + [HttpPost("orchestrate")] + [Accepts] + [ProducesResponseType(typeof(OrchestrateOrderResponse), StatusCodes.Status202Accepted)] + [IdempotencyKey] + public Task OrchestrateOrderAsync() => _webApi.PostAsync(Request, async (ro, ct) => + { + var instanceId = await _orderWorkflowClient.StartAsync(ro.Value, cancellationToken: ct).ConfigureAwait(false); + return new OrchestrateOrderResponse(instanceId); + }, HttpStatusCode.Accepted); +} + +public sealed record OrchestrateOrderResponse(string InstanceId); \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Api/Controllers/OrderReadController.cs b/samples/src/Contoso.Orders.Api/Controllers/OrderReadController.cs new file mode 100644 index 00000000..31183dea --- /dev/null +++ b/samples/src/Contoso.Orders.Api/Controllers/OrderReadController.cs @@ -0,0 +1,22 @@ +namespace Contoso.Orders.Api.Controllers; + +using OrderContract = Contoso.Orders.Contracts.Order; + +[ApiController, Route("/api/orders"), OpenApiTag("Orders")] +public class OrderReadController(WebApi webApi, IOrderReadService service) : ControllerBase +{ + private readonly WebApi _webApi = webApi.ThrowIfNull(); + private readonly IOrderReadService _service = service.ThrowIfNull(); + + [HttpGet("{id}"), HttpHead("{id}")] + [ProducesResponseType(typeof(OrderContract), StatusCodes.Status200OK)] + [ProducesNotFoundProblem()] + public Task GetAsync(string id) => _webApi.GetAsync(Request, (_, _) + => _service.GetAsync(id.Required())); + + [HttpGet] + [ProducesResponseType(typeof(OrderLite[]), StatusCodes.Status200OK)] + [Query(supportsOrderBy: true), Paging(supportsCount: true)] + public Task QueryAsync() => _webApi.GetAsync(Request, (ro, _) + => _service.QueryAsync(ro.QueryArgs, ro.PagingArgs)); +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Api/Controllers/ReferenceDataController.cs b/samples/src/Contoso.Orders.Api/Controllers/ReferenceDataController.cs new file mode 100644 index 00000000..a7677951 --- /dev/null +++ b/samples/src/Contoso.Orders.Api/Controllers/ReferenceDataController.cs @@ -0,0 +1,12 @@ +namespace Contoso.Orders.Api.Controllers; + +[ApiController, Route("/api/refdata")] +public class ReferenceDataController(WebApi webApi) : ControllerBase +{ + private readonly WebApi _webApi = webApi.ThrowIfNull(); + + [HttpGet("order-statuses"), HttpHead("order-statuses")] + [ProducesResponseType(typeof(OrderStatus[]), StatusCodes.Status200OK)] + public Task GetOrderStatusesAsync([FromQuery] IEnumerable? codes = default, string? text = default) + => _webApi.GetAsync(Request, (ro, ct) => ReferenceDataOrchestrator.Current.GetWithFilterAsync(codes, text, ro.IsIncludeInactive, ct)); +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Api/GlobalUsing.cs b/samples/src/Contoso.Orders.Api/GlobalUsing.cs new file mode 100644 index 00000000..e912609f --- /dev/null +++ b/samples/src/Contoso.Orders.Api/GlobalUsing.cs @@ -0,0 +1,14 @@ +global using Contoso.Orders.Application; +global using Contoso.Orders.Application.Interfaces; +global using Contoso.Orders.Contracts; +global using CoreEx; +global using CoreEx.AspNetCore.Mvc; +global using CoreEx.Entities; +global using CoreEx.Http; +global using CoreEx.Json; +global using CoreEx.RefData; +global using CoreEx.Validation; +global using Microsoft.AspNetCore.Mvc; +global using NSwag.Annotations; +global using System.Net; +global using System.Text.Json; \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Api/Program.cs b/samples/src/Contoso.Orders.Api/Program.cs new file mode 100644 index 00000000..ebfc1191 --- /dev/null +++ b/samples/src/Contoso.Orders.Api/Program.cs @@ -0,0 +1,82 @@ +using Contoso.Orders.Infrastructure.Repositories; +using Contoso.Order.Workflow.Client; +using Microsoft.Extensions.Options; +using OpenTelemetry; +using OpenTelemetry.Trace; +using StackExchange.Redis; +using ZiggyCreatures.Caching.Fusion; +using ZiggyCreatures.Caching.Fusion.Backplane.StackExchangeRedis; + +namespace Contoso.Orders.Api; + +public class Program +{ + private static void Main(string[] args) + { + var builder = WebApplication.CreateBuilder(args); + + builder.AddHostSettings(); + + builder.Services + .AddExecutionContext() + .AddReferenceDataOrchestrator() + .AddMvcWebApi() + .AddHttpWebApi(); + + builder.Services.AddContosoOrderWorkflowClient(builder.Configuration); + + builder.Services.AddDynamicServicesUsing(); + + builder.Services.AddMemoryCache(); + builder.AddRedisDistributedCache("redis"); + + builder.Services.AddFusionCache() + .WithRegisteredMemoryCache() + .WithRegisteredDistributedCache() + .WithBackplane(sp => new RedisBackplane(new RedisBackplaneOptions { Configuration = sp.GetRequiredService>().Value.ToString() })) + .WithSystemTextJsonSerializer(JsonDefaults.SerializerOptions); + + builder.Services + .AddFusionHybridCache() + .AddDefaultCacheKeyProvider() + .AddHybridCacheIdempotencyProvider(); + + builder.AddSqlServerClient("SqlServer"); + builder.Services + .AddSqlServerDatabase() + .AddSqlServerUnitOfWork() + .AddEventFormatter() + .AddSqlServerOutboxPublisher() + .AddDbContext() + .AddEfDb(); + + builder.Services.PostConfigureAllHealthChecks(); + + builder.Services.AddControllers(); + + builder.Services.AddOpenApiDocument(s => + { + s.Title = builder.Environment.ApplicationName; + s.AddCoreExConfiguration(); + }); + + builder.WithCoreExTelemetry() + .WithCoreExSqlServerTelemetry() + .UseOtlpExporter(); + + var app = builder.Build(); + + app.UseCoreExExceptionHandler(); + app.UseHttpsRedirection(); + app.UseAuthorization(); + app.UseExecutionContext(); + app.UseIdempotencyKey(); + app.MapControllers(); + + app.UseOpenApi(); + app.UseSwaggerUi(); + app.MapHealthChecks(); + + app.Run(); + } +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Api/Properties/launchSettings.json b/samples/src/Contoso.Orders.Api/Properties/launchSettings.json new file mode 100644 index 00000000..0898c92e --- /dev/null +++ b/samples/src/Contoso.Orders.Api/Properties/launchSettings.json @@ -0,0 +1,13 @@ +{ + "profiles": { + "Contoso.Orders.Api": { + "commandName": "Project", + "launchUrl": "https://localhost:62023/swagger", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + }, + "dotnetRunMessages": true, + "applicationUrl": "https://localhost:62023;http://localhost:62024" + } + } +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Api/appsettings.Development.json b/samples/src/Contoso.Orders.Api/appsettings.Development.json new file mode 100644 index 00000000..ad8dee3b --- /dev/null +++ b/samples/src/Contoso.Orders.Api/appsettings.Development.json @@ -0,0 +1,36 @@ +{ + "ConnectionStrings": { + "DurableTaskScheduler": "Endpoint=http://localhost:8080;TaskHub=order;Authentication=None" + }, + "Logging": { + "LogLevel": { + "Default": "Information", + "Azure": "Warning", + "Microsoft": "Warning", + "ZiggyCreature": "Warning", + "StackExchange": "Warning" + } + }, + "Aspire": { + "Microsoft": { + "Data": { + "SqlClient": { + "ConnectionString": "Data Source=127.0.0.1,1433;Initial Catalog=Contoso;User id=sa;Password=yourStrong(!)Password;TrustServerCertificate=true" + } + } + }, + "StackExchange": { + "Redis": { + "ConnectionString": "localhost:6379", + "ConfigurationOptions": { + "ConnectTimeout": 3000, + "ConnectRetry": 2 + } + } + } + }, + "DurableTaskScheduler": { + "Endpoint": "http://localhost:8080", + "TaskHub": "order" + } +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Api/appsettings.json b/samples/src/Contoso.Orders.Api/appsettings.json new file mode 100644 index 00000000..8946e119 --- /dev/null +++ b/samples/src/Contoso.Orders.Api/appsettings.json @@ -0,0 +1,20 @@ +{ + "CoreEx": { + "Host": { + "SolutionName": "Contoso", + "DomainName": "Orders" + }, + "Events": { + "Destination": "contoso" + } + }, + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning", + "Microsoft.EntityFrameworkCore": "Warning", + "Microsoft.EntityFrameworkCore.Update": "None" + } + }, + "AllowedHosts": "*" +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Application/Contoso.Orders.Application.csproj b/samples/src/Contoso.Orders.Application/Contoso.Orders.Application.csproj new file mode 100644 index 00000000..7a63b0d5 --- /dev/null +++ b/samples/src/Contoso.Orders.Application/Contoso.Orders.Application.csproj @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Application/GlobalUsing.cs b/samples/src/Contoso.Orders.Application/GlobalUsing.cs new file mode 100644 index 00000000..e6597fe2 --- /dev/null +++ b/samples/src/Contoso.Orders.Application/GlobalUsing.cs @@ -0,0 +1,14 @@ +global using Contoso.Orders.Application.Interfaces; +global using Contoso.Orders.Application.Repositories; +global using Contoso.Orders.Application.Validators; +global using Contoso.Orders.Contracts; +global using CoreEx; +global using CoreEx.Data; +global using CoreEx.DependencyInjection; +global using CoreEx.Events; +global using CoreEx.Localization; +global using CoreEx.RefData; +global using CoreEx.RefData.Abstractions; +global using CoreEx.Results; +global using CoreEx.Validation; +global using System.Text.Json; \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Application/Interfaces/IOrderReadService.cs b/samples/src/Contoso.Orders.Application/Interfaces/IOrderReadService.cs new file mode 100644 index 00000000..b4d1fc0c --- /dev/null +++ b/samples/src/Contoso.Orders.Application/Interfaces/IOrderReadService.cs @@ -0,0 +1,8 @@ +namespace Contoso.Orders.Application.Interfaces; + +public interface IOrderReadService +{ + Task GetAsync(string id); + + Task> QueryAsync(QueryArgs? query, PagingArgs? paging); +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Application/Interfaces/IOrderService.cs b/samples/src/Contoso.Orders.Application/Interfaces/IOrderService.cs new file mode 100644 index 00000000..ceacfc72 --- /dev/null +++ b/samples/src/Contoso.Orders.Application/Interfaces/IOrderService.cs @@ -0,0 +1,12 @@ +namespace Contoso.Orders.Application.Interfaces; + +public interface IOrderService +{ + Task GetAsync(string id); + + Task CreateAsync(Contracts.Order order); + + Task UpdateAsync(Contracts.Order order); + + Task DeleteAsync(string id); +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Application/OrderReadService.cs b/samples/src/Contoso.Orders.Application/OrderReadService.cs new file mode 100644 index 00000000..4d632199 --- /dev/null +++ b/samples/src/Contoso.Orders.Application/OrderReadService.cs @@ -0,0 +1,12 @@ +namespace Contoso.Orders.Application; + +[ScopedService] +public class OrderReadService(IOrderRepository repository) : IOrderReadService +{ + private readonly IOrderRepository _repository = repository.ThrowIfNull(); + + public Task GetAsync(string id) => _repository.GetAsync(id); + + public Task> QueryAsync(QueryArgs? query, PagingArgs? paging) + => _repository.QueryAsync(query, paging); +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Application/OrderService.cs b/samples/src/Contoso.Orders.Application/OrderService.cs new file mode 100644 index 00000000..561ed359 --- /dev/null +++ b/samples/src/Contoso.Orders.Application/OrderService.cs @@ -0,0 +1,56 @@ +namespace Contoso.Orders.Application; + +[ScopedService] +public class OrderService(IUnitOfWork unitOfWork, IOrderRepository repository) : IOrderService +{ + private readonly IUnitOfWork _unitOfWork = unitOfWork.ThrowIfNull(); + private readonly IOrderRepository _repository = repository.ThrowIfNull(); + + public Task GetAsync(string id) => _repository.GetAsync(id); + + public async Task CreateAsync(Order order) + { + order.ThrowIfNull(); + + await OrderValidator.Default.ValidateAndThrowAsync(order).ConfigureAwait(false); + + order.Id = Runtime.NewId(); + order.StatusCode ??= "P"; + + return await _unitOfWork.ExecuteAsync(async () => + { + var dr = await _repository.CreateAsync(order).ConfigureAwait(false); + return dr.WhereMutated(v => _unitOfWork.Events.Add(EventData.CreateEventWith(v, EventAction.Created))); + }).ConfigureAwait(false); + } + + public async Task UpdateAsync(Order order) + { + order.ThrowIfNull(); + order.Id.ThrowIfNullOrEmpty(); + + await OrderValidator.Default.ValidateAndThrowAsync(order).ConfigureAwait(false); + + var current = await _repository.GetAsync(order.Id).ConfigureAwait(false); + NotFoundException.ThrowIfDefault(current); + + return await _unitOfWork.ExecuteAsync(async () => + { + var dr = await _repository.UpdateAsync(order).ConfigureAwait(false); + return dr.WhereMutated(v => _unitOfWork.Events.Add(EventData.CreateEventWith(v, EventAction.Updated))); + }).ConfigureAwait(false); + } + + public async Task DeleteAsync(string id) + { + var order = await _repository.GetAsync(id).ConfigureAwait(false); + if (order is null) + return; + + await _unitOfWork.ExecuteAsync(async () => + { + var dr = await _repository.DeleteAsync(id).ConfigureAwait(false); + dr.WhereMutated(() => _unitOfWork.Events.Add(EventData.CreateEventWith(default, EventAction.Deleted).WithKey(id))); + }).ConfigureAwait(false); + } +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Application/ReferenceDataService.cs b/samples/src/Contoso.Orders.Application/ReferenceDataService.cs new file mode 100644 index 00000000..a2503d0f --- /dev/null +++ b/samples/src/Contoso.Orders.Application/ReferenceDataService.cs @@ -0,0 +1,18 @@ +namespace Contoso.Orders.Application; + +[ScopedService] +public class ReferenceDataService(IReferenceDataRepository repository) : IReferenceDataProvider +{ + private readonly IReferenceDataRepository _repository = repository.ThrowIfNull(); + + public IEnumerable<(Type, Type)> Types => + [ + (typeof(OrderStatus), typeof(OrderStatusCollection)), + ]; + + public async Task GetAsync(Type type, CancellationToken cancellationToken = default) => type switch + { + _ when type == typeof(OrderStatus) => await _repository.GetAllOrderStatusesAsync().ConfigureAwait(false), + _ => throw new InvalidOperationException($"Type {type.FullName} is not a known {nameof(IReferenceData)}.") + }; +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Application/Repositories/IOrderRepository.cs b/samples/src/Contoso.Orders.Application/Repositories/IOrderRepository.cs new file mode 100644 index 00000000..dd65ab9f --- /dev/null +++ b/samples/src/Contoso.Orders.Application/Repositories/IOrderRepository.cs @@ -0,0 +1,14 @@ +namespace Contoso.Orders.Application.Repositories; + +public interface IOrderRepository +{ + Task GetAsync(string id); + + Task> CreateAsync(Contracts.Order order); + + Task> UpdateAsync(Contracts.Order order); + + Task DeleteAsync(string id); + + Task> QueryAsync(QueryArgs? query, PagingArgs? paging); +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Application/Repositories/IReferenceDataRepository.cs b/samples/src/Contoso.Orders.Application/Repositories/IReferenceDataRepository.cs new file mode 100644 index 00000000..7a7bea08 --- /dev/null +++ b/samples/src/Contoso.Orders.Application/Repositories/IReferenceDataRepository.cs @@ -0,0 +1,6 @@ +namespace Contoso.Orders.Application.Repositories; + +public interface IReferenceDataRepository +{ + Task GetAllOrderStatusesAsync(); +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Application/Validators/OrderValidator.cs b/samples/src/Contoso.Orders.Application/Validators/OrderValidator.cs new file mode 100644 index 00000000..e8e52b13 --- /dev/null +++ b/samples/src/Contoso.Orders.Application/Validators/OrderValidator.cs @@ -0,0 +1,16 @@ +namespace Contoso.Orders.Application.Validators; + +public class OrderValidator : Validator +{ + private static readonly Validator _itemValidator = Validator.Create() + .HasProperty(x => x.ProductId, p => p.Mandatory().MaximumLength(100)) + .HasProperty(x => x.Quantity, p => p.GreaterThanOrEqualTo(0m).PrecisionScale(null, 4)) + .HasProperty(x => x.UnitPrice, p => p.GreaterThanOrEqualTo(0m).PrecisionScale(null, 4)); + + public OrderValidator() + { + Property(o => o.CustomerId).Mandatory().MaximumLength(100); + Property(o => o.Status).Mandatory().IsValid(); + Property(o => o.Items).Collection(with => with.WithItemValidator(_itemValidator)); + } +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Contracts/Contoso.Orders.Contracts.csproj b/samples/src/Contoso.Orders.Contracts/Contoso.Orders.Contracts.csproj new file mode 100644 index 00000000..3cebd766 --- /dev/null +++ b/samples/src/Contoso.Orders.Contracts/Contoso.Orders.Contracts.csproj @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Contracts/GlobalUsing.cs b/samples/src/Contoso.Orders.Contracts/GlobalUsing.cs new file mode 100644 index 00000000..e5b13f15 --- /dev/null +++ b/samples/src/Contoso.Orders.Contracts/GlobalUsing.cs @@ -0,0 +1,5 @@ +global using CoreEx.Entities; +global using CoreEx.Localization; +global using CoreEx.RefData; +global using System.ComponentModel; +global using System.Text.Json.Serialization; \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Contracts/Order.cs b/samples/src/Contoso.Orders.Contracts/Order.cs new file mode 100644 index 00000000..7d6ab4a8 --- /dev/null +++ b/samples/src/Contoso.Orders.Contracts/Order.cs @@ -0,0 +1,11 @@ +namespace Contoso.Orders.Contracts; + +[Contract] +public partial class Order : OrderBase, IETag, IChangeLog +{ + [ReadOnly(true)] + public ChangeLog? ChangeLog { get; set; } + + [ReadOnly(true)] + public string? ETag { get; set; } +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Contracts/OrderBase.cs b/samples/src/Contoso.Orders.Contracts/OrderBase.cs new file mode 100644 index 00000000..98ec1a6c --- /dev/null +++ b/samples/src/Contoso.Orders.Contracts/OrderBase.cs @@ -0,0 +1,16 @@ +namespace Contoso.Orders.Contracts; + +[Contract] +public abstract partial class OrderBase : IIdentifier +{ + [ReadOnly(true)] + public string? Id { get; set; } + + public string? CustomerId { get; set; } + + [ReferenceData] + [Localization("Order status")] + public partial string? StatusCode { get; set; } + + public List? Items { get; set; } +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Contracts/OrderItem.cs b/samples/src/Contoso.Orders.Contracts/OrderItem.cs new file mode 100644 index 00000000..ea92c967 --- /dev/null +++ b/samples/src/Contoso.Orders.Contracts/OrderItem.cs @@ -0,0 +1,14 @@ +namespace Contoso.Orders.Contracts; + +[Contract] +public partial class OrderItem : IIdentifier +{ + [ReadOnly(true)] + public string? Id { get; set; } + + public string? ProductId { get; set; } + + public decimal Quantity { get; set; } + + public decimal UnitPrice { get; set; } +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Contracts/OrderLite.cs b/samples/src/Contoso.Orders.Contracts/OrderLite.cs new file mode 100644 index 00000000..ee1bdca3 --- /dev/null +++ b/samples/src/Contoso.Orders.Contracts/OrderLite.cs @@ -0,0 +1,17 @@ +namespace Contoso.Orders.Contracts; + +[Contract] +public partial class OrderLite : IIdentifier +{ + [ReadOnly(true)] + public string? Id { get; set; } + + public string? CustomerId { get; set; } + + [ReferenceData] + [Localization("Order status")] + public partial string? StatusCode { get; set; } + + [ReadOnly(true)] + public ChangeLog? ChangeLog { get; set; } +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Contracts/OrderStatus.cs b/samples/src/Contoso.Orders.Contracts/OrderStatus.cs new file mode 100644 index 00000000..855973d6 --- /dev/null +++ b/samples/src/Contoso.Orders.Contracts/OrderStatus.cs @@ -0,0 +1,6 @@ +namespace Contoso.Orders.Contracts; + +[ReferenceData] +public partial class OrderStatus : ReferenceData { } + +public class OrderStatusCollection() : ReferenceDataCollection(ReferenceDataSortOrder.Code) { } \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Database/Contoso.Orders.Database.csproj b/samples/src/Contoso.Orders.Database/Contoso.Orders.Database.csproj new file mode 100644 index 00000000..fe22e9e9 --- /dev/null +++ b/samples/src/Contoso.Orders.Database/Contoso.Orders.Database.csproj @@ -0,0 +1,21 @@ + + + + Exe + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Database/Data/ref-data.yaml b/samples/src/Contoso.Orders.Database/Data/ref-data.yaml new file mode 100644 index 00000000..d5b6547b --- /dev/null +++ b/samples/src/Contoso.Orders.Database/Data/ref-data.yaml @@ -0,0 +1,5 @@ +Orders: + - $^OrderStatus: + - P: Pending + - C: Confirmed + - X: Cancelled \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Database/Migrations/20260101-000001-create-orders-schema.sql b/samples/src/Contoso.Orders.Database/Migrations/20260101-000001-create-orders-schema.sql new file mode 100644 index 00000000..6b581a3b --- /dev/null +++ b/samples/src/Contoso.Orders.Database/Migrations/20260101-000001-create-orders-schema.sql @@ -0,0 +1 @@ +CREATE SCHEMA [Orders] \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Database/Migrations/20260101-000101-create-orders-orderstatus.sql b/samples/src/Contoso.Orders.Database/Migrations/20260101-000101-create-orders-orderstatus.sql new file mode 100644 index 00000000..85e4cdd8 --- /dev/null +++ b/samples/src/Contoso.Orders.Database/Migrations/20260101-000101-create-orders-orderstatus.sql @@ -0,0 +1,18 @@ +-- Migration Script + +BEGIN TRANSACTION + +CREATE TABLE [Orders].[OrderStatus] ( + [OrderStatusId] NVARCHAR(50) NOT NULL PRIMARY KEY, + [Code] NVARCHAR(50) NOT NULL UNIQUE, + [Text] NVARCHAR(250) NULL, + [IsActive] BIT NULL, + [SortOrder] INT NULL, + [RowVersion] TIMESTAMP NOT NULL, + [CreatedBy] NVARCHAR(250) NULL, + [CreatedOn] DATETIMEOFFSET NULL, + [UpdatedBy] NVARCHAR(250) NULL, + [UpdatedOn] DATETIMEOFFSET NULL +); + +COMMIT TRANSACTION \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Database/Migrations/20260101-000201-create-orders-order.sql b/samples/src/Contoso.Orders.Database/Migrations/20260101-000201-create-orders-order.sql new file mode 100644 index 00000000..226a5af3 --- /dev/null +++ b/samples/src/Contoso.Orders.Database/Migrations/20260101-000201-create-orders-order.sql @@ -0,0 +1,16 @@ +-- Migration Script + +BEGIN TRANSACTION + +CREATE TABLE [Orders].[Order] ( + [OrderId] NVARCHAR(50) NOT NULL PRIMARY KEY, + [CustomerId] NVARCHAR(100) NOT NULL, + [StatusCode] NVARCHAR(50) NOT NULL, + [CreatedBy] NVARCHAR(250) NULL, + [CreatedOn] DATETIMEOFFSET NULL, + [UpdatedBy] NVARCHAR(250) NULL, + [UpdatedOn] DATETIMEOFFSET NULL, + [RowVersion] TIMESTAMP NOT NULL +); + +COMMIT TRANSACTION \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Database/Migrations/20260101-000202-create-orders-orderitem.sql b/samples/src/Contoso.Orders.Database/Migrations/20260101-000202-create-orders-orderitem.sql new file mode 100644 index 00000000..28c1bb48 --- /dev/null +++ b/samples/src/Contoso.Orders.Database/Migrations/20260101-000202-create-orders-orderitem.sql @@ -0,0 +1,17 @@ +-- Migration Script + +BEGIN TRANSACTION + +CREATE TABLE [Orders].[OrderItem] ( + [OrderItemId] NVARCHAR(50) NOT NULL PRIMARY KEY, + [OrderId] NVARCHAR(50) NOT NULL FOREIGN KEY REFERENCES [Orders].[Order]([OrderId]), + [ProductId] NVARCHAR(100) NOT NULL, + [Quantity] DECIMAL(18, 4) NOT NULL DEFAULT 0, + [UnitPrice] DECIMAL(18, 4) NOT NULL DEFAULT 0, + [CreatedBy] NVARCHAR(250) NULL, + [CreatedOn] DATETIMEOFFSET NULL, + [UpdatedBy] NVARCHAR(250) NULL, + [UpdatedOn] DATETIMEOFFSET NULL +); + +COMMIT TRANSACTION \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Database/Migrations/20260101-000301-create-orders-outbox-tables.sql b/samples/src/Contoso.Orders.Database/Migrations/20260101-000301-create-orders-outbox-tables.sql new file mode 100644 index 00000000..80696224 --- /dev/null +++ b/samples/src/Contoso.Orders.Database/Migrations/20260101-000301-create-orders-outbox-tables.sql @@ -0,0 +1,33 @@ +-- Create table: [Orders].[Outbox] and [Orders].[OutboxLease] + +BEGIN TRANSACTION + +CREATE TABLE [Orders].[Outbox] ( + [OutboxId] BIGINT IDENTITY (1, 1) NOT NULL PRIMARY KEY, + [TenantId] NVARCHAR(255) NOT NULL, + [PartitionId] INT NOT NULL, + [Status] TINYINT NOT NULL DEFAULT 0, + [EnqueuedUtc] DATETIME2 NOT NULL, + [AvailableUtc] DATETIME2 NOT NULL, + [DequeuedUtc] DATETIME2 NULL, + [Attempts] INT NOT NULL DEFAULT 0, + [Destination] NVARCHAR(255) NULL, + [Event] NVARCHAR(MAX) NOT NULL, + [LeaseId] UNIQUEIDENTIFIER NULL, + [LeaseUntilUtc] DATETIME2 NULL, + + INDEX [IX_Orders_Outbox_PartitionOrder] ([TenantId], [PartitionId], [OutboxId]) INCLUDE ([Status], [AvailableUtc], [LeaseUntilUtc], [Destination], [Event], [Attempts]), + INDEX [IX_Orders_Outbox_WorkerPull] ([TenantId], [PartitionId], [Status]) INCLUDE ([OutboxId], [AvailableUtc]), + INDEX [IX_Orders_Outbox_CleanUp] ([OutboxId]) INCLUDE ([DequeuedUtc]) WHERE [Status] = 2 +); + +CREATE TABLE [Orders].[OutboxLease] ( + [TenantId] NVARCHAR(255) NOT NULL, + [PartitionId] INT NOT NULL, + [LeaseId] UNIQUEIDENTIFIER NULL, + [LeaseUntilUtc] DATETIME2 NULL, + + CONSTRAINT PK_Orders_OutboxLease PRIMARY KEY (TenantId, PartitionId) +); + +COMMIT TRANSACTION \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Database/Program.cs b/samples/src/Contoso.Orders.Database/Program.cs new file mode 100644 index 00000000..14d86fbb --- /dev/null +++ b/samples/src/Contoso.Orders.Database/Program.cs @@ -0,0 +1,40 @@ +using CoreEx.Database; +using DbEx.Migration; +using DbEx.SqlServer.Console; + +namespace Contoso.Orders.Database; + +/// +/// Represents the database utilities program (capability). +/// +public class Program +{ + /// + /// Main startup. + /// + /// The startup arguments. + /// The status code whereby zero indicates success. + public static Task Main(string[] args) => SqlServerMigrationConsole + .Create("Data Source=127.0.0.1,1433;Initial Catalog=Contoso;User id=sa;Password=yourStrong(!)Password;TrustServerCertificate=true") + .Configure(c => ConfigureMigrationArgs(c.Args)) + .RunAsync(args); + + /// + /// Configure the . + /// + /// The . + /// The . + public static MigrationArgs ConfigureMigrationArgs(MigrationArgs args) + { + args.AddAssembly().AddAssembly() + .IncludeExtendedSchemaScripts() + .DataParserArgs + .RefDataColumnDefault("SortOrder", _ => 0) + .RefDataColumnDefault("Scale", _ => 0); + + // Only reset data for the Orders schema. + args.DataResetFilterPredicate = ts => ts.Schema == "Orders"; + + return args; + } +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxBatchCancel.g.sql b/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxBatchCancel.g.sql new file mode 100644 index 00000000..5730db0d --- /dev/null +++ b/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxBatchCancel.g.sql @@ -0,0 +1,50 @@ +CREATE OR ALTER PROCEDURE [Orders].[spOutboxBatchCancel] + @LeaseId UNIQUEIDENTIFIER, + @BackoffSeconds INT +AS +BEGIN + /* + * This is automatically generated; any changes will be lost. + */ + + SET NOCOUNT ON; + SET XACT_ABORT ON; + SET LOCK_TIMEOUT 5000; + SET TRANSACTION ISOLATION LEVEL READ COMMITTED; + + DECLARE @Now DATETIME2 = SYSUTCDATETIME(); + + BEGIN TRY + BEGIN TRAN; + + UPDATE o + SET o.[Status] = 0, + o.[Attempts] = o.[Attempts] + 1, + o.[AvailableUtc] = DATEADD(SECOND, @BackoffSeconds, @Now), + o.[LeaseId] = NULL, + o.[LeaseUntilUtc] = NULL + FROM [Orders].[Outbox] AS o WITH (UPDLOCK, ROWLOCK) + WHERE o.[LeaseId] = @LeaseId + AND o.[Status] = 1; + + IF (@@ROWCOUNT = 0) + BEGIN + COMMIT; + RETURN -1; + END + + COMMIT; + + BEGIN TRY + EXEC [Orders].[spOutboxLeaseRelease] @LeaseId; + END TRY + BEGIN CATCH + END CATCH + + RETURN 0; + END TRY + BEGIN CATCH + IF (XACT_STATE() <> 0) ROLLBACK; + THROW; + END CATCH +END \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxBatchClaim.g.sql b/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxBatchClaim.g.sql new file mode 100644 index 00000000..423dcb11 --- /dev/null +++ b/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxBatchClaim.g.sql @@ -0,0 +1,100 @@ +CREATE OR ALTER PROCEDURE [Orders].[spOutboxBatchClaim] + @TenantId NVARCHAR(255) = NULL, + @PartitionId INT, + @BatchSize INT, + @LeaseId UNIQUEIDENTIFIER, + @LeaseSeconds INT +AS +BEGIN + /* + * This is automatically generated; any changes will be lost. + */ + + SET NOCOUNT ON; + SET XACT_ABORT ON; + + DECLARE @Now DATETIME2 = SYSUTCDATETIME(); + DECLARE @LeaseUntilUtc DATETIME2; + DECLARE @EffectiveTenantId NVARCHAR(255) = COALESCE(@TenantId, '(none)'); + + SET TRANSACTION ISOLATION LEVEL READ COMMITTED; + SET LOCK_TIMEOUT 5000; + + DECLARE @RC INT; + EXEC @RC = [Orders].[spOutboxLeaseAcquire] @EffectiveTenantId, @PartitionId, @LeaseId, @LeaseSeconds, @LeaseUntilUtc OUTPUT; + IF (@RC < 0) RETURN -3; + + BEGIN TRY + BEGIN TRAN; + + DECLARE @HeadId BIGINT; + DECLARE @BlockerId BIGINT; + + SELECT @HeadId = MIN(o.OutboxId) + FROM [Orders].[Outbox] o WITH (UPDLOCK) + WHERE o.[TenantId] = @EffectiveTenantId + AND o.[PartitionId] = @PartitionId + AND o.[Status] IN (0, 1) + OPTION (RECOMPILE); + + IF @HeadId IS NULL + BEGIN + COMMIT; + EXEC [Orders].[spOutboxLeaseRelease] @LeaseId; + RETURN -2; + END + + SELECT @BlockerId = MIN(o.OutboxId) + FROM [Orders].[Outbox] o WITH (READPAST, UPDLOCK) + WHERE o.[TenantId] = @EffectiveTenantId + AND o.[PartitionId] = @PartitionId + AND o.[OutboxId] >= @HeadId + AND ((o.Status = 1 AND o.[LeaseUntilUtc] IS NOT NULL AND o.[LeaseUntilUtc] > @Now) + OR (o.Status = 0 AND o.[AvailableUtc] > @Now)) + OPTION (RECOMPILE); + + ;WITH claim AS + ( + SELECT TOP (@BatchSize) + o.[OutboxId], o.[TenantId], o.[Status], o.[PartitionId], o.[Destination], o.[Event], + o.[Attempts], o.[EnqueuedUtc], o.[AvailableUtc], o.[LeaseId], o.[LeaseUntilUtc] + FROM [Orders].[Outbox] o WITH (READPAST, UPDLOCK, ROWLOCK) + WHERE o.[TenantId] = @EffectiveTenantId + AND o.[PartitionId] = @PartitionId + AND o.[OutboxId] >= @HeadId + AND (@BlockerId IS NULL OR o.[OutboxId] < @BlockerId) + AND ((o.[Status] = 0 AND o.[AvailableUtc] <= @Now) + OR (o.[Status] = 1 AND (o.[LeaseUntilUtc] IS NULL OR o.[LeaseUntilUtc] <= @Now))) + ORDER BY o.OutboxId + ) + UPDATE claim + SET [Status] = 1, + [LeaseId] = @LeaseId, + [LeaseUntilUtc] = @LeaseUntilUtc + OUTPUT + inserted.[OutboxId], + inserted.[TenantId], + inserted.[Status], + inserted.[PartitionId], + inserted.[Destination], + inserted.[Event], + inserted.[Attempts], + inserted.[EnqueuedUtc], + inserted.[AvailableUtc], + inserted.[LeaseUntilUtc]; + + IF (@@ROWCOUNT = 0) + BEGIN + COMMIT; + EXEC [Orders].[spOutboxLeaseRelease] @LeaseId; + RETURN -1; + END + + COMMIT; + RETURN 0; + END TRY + BEGIN CATCH + IF (XACT_STATE() <> 0) ROLLBACK; + THROW; + END CATCH +END \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxBatchComplete.g.sql b/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxBatchComplete.g.sql new file mode 100644 index 00000000..cf5a1612 --- /dev/null +++ b/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxBatchComplete.g.sql @@ -0,0 +1,54 @@ +CREATE OR ALTER PROCEDURE [Orders].[spOutboxBatchComplete] + @LeaseId UNIQUEIDENTIFIER, + @DequeuedUtc DATETIME2 NULL +AS +BEGIN + /* + * This is automatically generated; any changes will be lost. + */ + + SET NOCOUNT ON; + SET XACT_ABORT ON; + SET LOCK_TIMEOUT 5000; + SET TRANSACTION ISOLATION LEVEL READ COMMITTED; + + DECLARE @Now DATETIME2 = SYSUTCDATETIME(); + DECLARE @Completed TABLE (TenantId NVARCHAR(255), PartitionId INT); + + BEGIN TRY + BEGIN TRAN; + + UPDATE o + SET o.[Status] = 2, + o.[LeaseId] = NULL, + o.[LeaseUntilUtc] = NULL, + o.[DequeuedUtc] = COALESCE(@DequeuedUtc, @Now) + OUTPUT + deleted.[TenantId], + deleted.[PartitionId] + INTO @Completed + FROM [Orders].[Outbox] AS o WITH (UPDLOCK, ROWLOCK) + WHERE o.[LeaseId] = @LeaseId + AND o.[Status] = 1; + + IF (@@ROWCOUNT = 0) + BEGIN + COMMIT; + RETURN -1; + END + + COMMIT; + + BEGIN TRY + EXEC [Orders].[spOutboxLeaseRelease] @LeaseId; + END TRY + BEGIN CATCH + END CATCH + + RETURN 0; + END TRY + BEGIN CATCH + IF (XACT_STATE() <> 0) ROLLBACK; + THROW; + END CATCH +END \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxEnqueue.g.sql b/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxEnqueue.g.sql new file mode 100644 index 00000000..4a9fb6d8 --- /dev/null +++ b/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxEnqueue.g.sql @@ -0,0 +1,36 @@ +CREATE OR ALTER PROCEDURE [Orders].[spOutboxEnqueue] + @TenantId AS NVARCHAR(255) = NULL, + @PartitionId AS INT, + @Destination AS NVARCHAR(255), + @Event AS NVARCHAR(MAX), + @EnqueuedUtc AS DATETIME2 = NULL, + @AvailableUtc AS DATETIME2 = NULL +AS +BEGIN + /* + * This file is automatically generated; any changes will be lost. + */ + + SET NOCOUNT ON; + SET XACT_ABORT ON; + + DECLARE @Now DATETIME2 = SYSUTCDATETIME(); + DECLARE @EffectiveTenantId NVARCHAR(255) = COALESCE(@TenantId, '(none)'); + + INSERT INTO [Orders].[Outbox] ( + [TenantId], + [PartitionId], + [Destination], + [Event], + [EnqueuedUtc], + [AvailableUtc] + ) + VALUES ( + @EffectiveTenantId, + @PartitionId, + @Destination, + @Event, + COALESCE(@EnqueuedUtc, @Now), + COALESCE(@AvailableUtc, COALESCE(@EnqueuedUtc, @Now)) + ) +END \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxLeaseAcquire.g.sql b/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxLeaseAcquire.g.sql new file mode 100644 index 00000000..46c8b9aa --- /dev/null +++ b/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxLeaseAcquire.g.sql @@ -0,0 +1,60 @@ +CREATE OR ALTER PROCEDURE [Orders].[spOutboxLeaseAcquire] + @TenantId NVARCHAR(255) = NULL, + @PartitionId INT, + @LeaseId UNIQUEIDENTIFIER, + @LeaseSeconds INT, + @LeaseUntilUtc DATETIME2 OUTPUT +AS +BEGIN + /* + * This is automatically generated; any changes will be lost. + */ + + SET NOCOUNT ON; + SET XACT_ABORT ON; + SET LOCK_TIMEOUT 5000; + SET TRANSACTION ISOLATION LEVEL READ COMMITTED; + + DECLARE @Now DATETIME2 = SYSUTCDATETIME(); + DECLARE @Until DATETIME2 = DATEADD(SECOND, @LeaseSeconds, @Now); + DECLARE @EffectiveTenantId NVARCHAR(255) = COALESCE(@TenantId, '(none)'); + + BEGIN TRY + BEGIN TRAN; + + IF NOT EXISTS ( + SELECT 1 + FROM [Orders].[OutboxLease] WITH (UPDLOCK, HOLDLOCK) + WHERE [TenantId] = @EffectiveTenantId AND [PartitionId] = @PartitionId + ) + BEGIN + INSERT INTO [Orders].[OutboxLease] ([TenantId], [PartitionId]) + VALUES (@EffectiveTenantId, @PartitionId); + END + + UPDATE ol + SET ol.[LeaseId] = @LeaseId, + ol.[LeaseUntilUtc] = @Until + FROM [Orders].[OutboxLease] AS ol WITH (UPDLOCK, ROWLOCK) + WHERE ol.[PartitionId] = @PartitionId + AND ol.[TenantId] = @EffectiveTenantId + AND (ol.[LeaseUntilUtc] IS NULL OR ol.[LeaseUntilUtc] <= @Now) + OPTION (RECOMPILE); + + DECLARE @Rows INT = @@ROWCOUNT; + COMMIT; + + IF @Rows = 1 + BEGIN + SET @LeaseUntilUtc = @Until; + RETURN 0; + END + + SET @LeaseUntilUtc = NULL; + RETURN -1; + END TRY + BEGIN CATCH + IF (XACT_STATE() <> 0) ROLLBACK; + THROW; + END CATCH +END \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxLeaseRelease.g.sql b/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxLeaseRelease.g.sql new file mode 100644 index 00000000..c24e5dd0 --- /dev/null +++ b/samples/src/Contoso.Orders.Database/Schema/Stored Procedures/spOutboxLeaseRelease.g.sql @@ -0,0 +1,33 @@ +CREATE OR ALTER PROCEDURE [Orders].[spOutboxLeaseRelease] + @LeaseId UNIQUEIDENTIFIER +AS +BEGIN + /* + * This is automatically generated; any changes will be lost. + */ + + SET NOCOUNT ON; + SET XACT_ABORT ON; + SET LOCK_TIMEOUT 5000; + SET TRANSACTION ISOLATION LEVEL READ COMMITTED; + + BEGIN TRY + BEGIN TRAN; + + UPDATE ol + SET ol.[LeaseId] = NULL, + ol.[LeaseUntilUtc] = NULL + FROM [Orders].[OutboxLease] AS ol WITH (UPDLOCK, ROWLOCK) + WHERE ol.[LeaseId] = @LeaseId; + + DECLARE @Rows INT = @@ROWCOUNT; + COMMIT; + + IF @Rows = 1 RETURN 0; + RETURN -1; + END TRY + BEGIN CATCH + IF (XACT_STATE() <> 0) ROLLBACK; + THROW; + END CATCH +END \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Database/dbex.yaml b/samples/src/Contoso.Orders.Database/dbex.yaml new file mode 100644 index 00000000..1d3ceb3d --- /dev/null +++ b/samples/src/Contoso.Orders.Database/dbex.yaml @@ -0,0 +1,8 @@ +outbox: true +tables: +# Reference-data +- name: OrderStatus + +# Transactional-data +- name: Order +- name: OrderItem \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Infrastructure/Contoso.Orders.Infrastructure.csproj b/samples/src/Contoso.Orders.Infrastructure/Contoso.Orders.Infrastructure.csproj new file mode 100644 index 00000000..e8e6a3c8 --- /dev/null +++ b/samples/src/Contoso.Orders.Infrastructure/Contoso.Orders.Infrastructure.csproj @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Infrastructure/GlobalUsing.cs b/samples/src/Contoso.Orders.Infrastructure/GlobalUsing.cs new file mode 100644 index 00000000..f5d9b216 --- /dev/null +++ b/samples/src/Contoso.Orders.Infrastructure/GlobalUsing.cs @@ -0,0 +1,19 @@ +global using Contoso.Orders.Application.Repositories; +global using Contoso.Orders.Infrastructure.Mapping; +global using CoreEx; +global using CoreEx.Data; +global using CoreEx.Data.Models; +global using CoreEx.Data.Querying; +global using CoreEx.Database; +global using CoreEx.Database.SqlServer; +global using CoreEx.Database.SqlServer.Outbox; +global using CoreEx.DependencyInjection; +global using CoreEx.Entities; +global using CoreEx.EntityFrameworkCore; +global using CoreEx.EntityFrameworkCore.Converters; +global using CoreEx.Events; +global using CoreEx.Events.Publishing; +global using CoreEx.Mapping; +global using Microsoft.EntityFrameworkCore; +global using Microsoft.Extensions.Logging; +global using System.Text.Json; \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Infrastructure/Mapping/OrderMapper.cs b/samples/src/Contoso.Orders.Infrastructure/Mapping/OrderMapper.cs new file mode 100644 index 00000000..c466f6d1 --- /dev/null +++ b/samples/src/Contoso.Orders.Infrastructure/Mapping/OrderMapper.cs @@ -0,0 +1,33 @@ +namespace Contoso.Orders.Infrastructure.Mapping; + +public class OrderMapper : BiDirectionMapper +{ + protected override Persistence.Order OnMap(Contracts.Order source) => new() + { + Id = source.Id, + CustomerId = source.CustomerId, + StatusCode = source.Status?.Code, + Items = source.Items?.Select(i => new Persistence.OrderItem + { + Id = i.Id, + OrderId = source.Id, + ProductId = i.ProductId, + Quantity = i.Quantity, + UnitPrice = i.UnitPrice + }).ToList() ?? [] + }; + + protected override Contracts.Order OnMap(Persistence.Order source) => new() + { + Id = source.Id, + CustomerId = source.CustomerId, + StatusCode = source.StatusCode, + Items = source.Items?.Select(i => new Contracts.OrderItem + { + Id = i.Id, + ProductId = i.ProductId, + Quantity = i.Quantity, + UnitPrice = i.UnitPrice + }).ToList() ?? [] + }; +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Infrastructure/Mapping/OrderStatusMapper.cs b/samples/src/Contoso.Orders.Infrastructure/Mapping/OrderStatusMapper.cs new file mode 100644 index 00000000..5de65875 --- /dev/null +++ b/samples/src/Contoso.Orders.Infrastructure/Mapping/OrderStatusMapper.cs @@ -0,0 +1,16 @@ +namespace Contoso.Orders.Infrastructure.Mapping; + +internal class OrderStatusMapper : BiDirectionMapper +{ + protected override Persistence.OrderStatus OnMap(Contracts.OrderStatus source) => throw new NotImplementedException(); + + protected override Contracts.OrderStatus OnMap(Persistence.OrderStatus source) => new() + { + Id = source.Id!, + Code = source.Code, + Text = source.Text, + SortOrder = source.SortOrder, + IsInactive = !source.IsActive, + ETag = source.ETag + }; +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Infrastructure/Persistence/Order.cs b/samples/src/Contoso.Orders.Infrastructure/Persistence/Order.cs new file mode 100644 index 00000000..3be7c9aa --- /dev/null +++ b/samples/src/Contoso.Orders.Infrastructure/Persistence/Order.cs @@ -0,0 +1,10 @@ +namespace Contoso.Orders.Infrastructure.Persistence; + +public partial class Order : ModelBase +{ + public string? CustomerId { get; set; } + + public string? StatusCode { get; set; } + + public virtual ICollection Items { get; set; } = []; +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Infrastructure/Persistence/OrderItem.cs b/samples/src/Contoso.Orders.Infrastructure/Persistence/OrderItem.cs new file mode 100644 index 00000000..0454e675 --- /dev/null +++ b/samples/src/Contoso.Orders.Infrastructure/Persistence/OrderItem.cs @@ -0,0 +1,12 @@ +namespace Contoso.Orders.Infrastructure.Persistence; + +public partial class OrderItem : ModelBase +{ + public string? OrderId { get; set; } + + public string? ProductId { get; set; } + + public decimal Quantity { get; set; } + + public decimal UnitPrice { get; set; } +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Infrastructure/Persistence/OrderStatus.cs b/samples/src/Contoso.Orders.Infrastructure/Persistence/OrderStatus.cs new file mode 100644 index 00000000..d8402393 --- /dev/null +++ b/samples/src/Contoso.Orders.Infrastructure/Persistence/OrderStatus.cs @@ -0,0 +1,3 @@ +namespace Contoso.Orders.Infrastructure.Persistence; + +public partial class OrderStatus : ReferenceDataModelBase { } \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Infrastructure/Repositories/OrderRepository.cs b/samples/src/Contoso.Orders.Infrastructure/Repositories/OrderRepository.cs new file mode 100644 index 00000000..ed9b997c --- /dev/null +++ b/samples/src/Contoso.Orders.Infrastructure/Repositories/OrderRepository.cs @@ -0,0 +1,91 @@ +namespace Contoso.Orders.Infrastructure.Repositories; + +[ScopedService] +public class OrderRepository(OrdersEfDb ef) : IOrderRepository +{ + private readonly OrdersEfDb _ef = ef.ThrowIfNull(); + + private static readonly QueryArgsConfig _queryConfig = QueryArgsConfig.Create() + .WithFilter(filter => filter + .AddField(nameof(Contracts.OrderBase.CustomerId), c => c.WithOperators(QueryFilterOperator.EqualityOperators | QueryFilterOperator.StartsWith)) + .AddReferenceDataField(nameof(Contracts.OrderBase.Status), "StatusCode")) + .WithOrderBy(orderby => orderby + .AddField(nameof(Contracts.OrderBase.CustomerId), c => c.WithDefault().WithAlwaysInclude())); + + public Task GetAsync(string id) => _ef.Orders.GetAsync(id); + + public Task> CreateAsync(Contracts.Order order) => _ef.Orders.CreateAsync(order); + + public async Task> UpdateAsync(Contracts.Order order) + { + // Load the existing order with its items so EF tracks the child collection before the mapped update. + var existing = await _ef.DbContext.Set() + .Include(o => o.Items) + .FirstOrDefaultAsync(o => o.Id == order.Id) + .ConfigureAwait(false); + + if (existing is not null) + SynchronizeItems(order, existing); + + return await _ef.Orders.UpdateAsync(order).ConfigureAwait(false); + } + + public Task DeleteAsync(string id) => _ef.Orders.DeleteAsync(id); + + public async Task> QueryAsync(QueryArgs? query, PagingArgs? paging) + { + var parsed = _queryConfig.Parse(query).ThrowOnError(); + + var orders = _ef.Orders.Model.Query(); + return await orders.Where(parsed).OrderBy(parsed).ToMappedItemsResultAsync(x => new Contracts.OrderLite + { + Id = x.Id, + CustomerId = x.CustomerId, + StatusCode = x.StatusCode, + ChangeLog = new ChangeLog { CreatedBy = x.CreatedBy, CreatedOn = x.CreatedOn, UpdatedBy = x.UpdatedBy, UpdatedOn = x.UpdatedOn } + }, paging).ConfigureAwait(false); + } + + /// + /// Synchronizes the items collection between the contract order and the tracked persistence model. + /// + private void SynchronizeItems(Contracts.Order order, Persistence.Order model) + { + var newItems = order.Items ?? []; + var existingItems = model.Items?.ToList() ?? []; + + // Remove items that are no longer present in the updated order. + var toRemove = existingItems.Where(e => !newItems.Any(n => n.Id == e.Id)).ToList(); + foreach (var item in toRemove) + _ef.DbContext.Entry(item).State = EntityState.Deleted; + + // Add new items or update existing items. + foreach (var newItem in newItems) + { + var existingItem = existingItems.FirstOrDefault(e => e.Id == newItem.Id); + if (existingItem is null) + { + // New item: assign an Id and add to the tracked collection. + var addedItem = new Persistence.OrderItem + { + Id = Runtime.NewId(), + OrderId = order.Id, + ProductId = newItem.ProductId, + Quantity = newItem.Quantity, + UnitPrice = newItem.UnitPrice + }; + model.Items ??= []; + model.Items.Add(addedItem); + _ef.DbContext.Entry(addedItem).State = EntityState.Added; + } + else + { + // Existing item: update its properties. + existingItem.ProductId = newItem.ProductId; + existingItem.Quantity = newItem.Quantity; + existingItem.UnitPrice = newItem.UnitPrice; + _ef.DbContext.Entry(existingItem).State = EntityState.Modified; + } + } + } +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Infrastructure/Repositories/OrdersDbContext.cs b/samples/src/Contoso.Orders.Infrastructure/Repositories/OrdersDbContext.cs new file mode 100644 index 00000000..66440706 --- /dev/null +++ b/samples/src/Contoso.Orders.Infrastructure/Repositories/OrdersDbContext.cs @@ -0,0 +1,65 @@ +namespace Contoso.Orders.Infrastructure.Repositories; + +public class OrdersDbContext(DbContextOptions options, SqlServerDatabase database) : DbContext(options), IEfDbContext +{ + public IDatabase BaseDatabase { get; } = database.ThrowIfNull(); + + protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder) + { + base.OnConfiguring(optionsBuilder); + + if (!optionsBuilder.IsConfigured) + optionsBuilder.UseSqlServer(BaseDatabase.Connection); + } + + protected override void OnModelCreating(ModelBuilder modelBuilder) + { + modelBuilder.ThrowIfNull().Entity(e => + { + e.ToTable("Order", "Orders"); + e.HasKey(p => p.Id); + e.Property(p => p.Id).HasColumnName("OrderId").HasColumnType("NVARCHAR(50)"); + e.Property(p => p.CustomerId).HasColumnName("CustomerId").HasColumnType("NVARCHAR(100)"); + e.Property(p => p.StatusCode).HasColumnName("StatusCode").HasColumnType("NVARCHAR(50)"); + e.Property(p => p.CreatedBy).HasColumnName("CreatedBy").HasColumnType("NVARCHAR(250)"); + e.Property(p => p.CreatedOn).HasColumnName("CreatedOn").HasColumnType("DATETIMEOFFSET"); + e.Property(p => p.UpdatedBy).HasColumnName("UpdatedBy").HasColumnType("NVARCHAR(250)"); + e.Property(p => p.UpdatedOn).HasColumnName("UpdatedOn").HasColumnType("DATETIMEOFFSET"); + e.Property(p => p.ETag).HasColumnName("RowVersion").HasColumnType("TIMESTAMP").IsRowVersion().HasConversion(StringBase64Converter.Default); + e.HasMany(p => p.Items).WithOne().HasForeignKey(i => i.OrderId).OnDelete(DeleteBehavior.Cascade); + e.Navigation(p => p.Items).AutoInclude(true); + }); + + modelBuilder.ThrowIfNull().Entity(e => + { + e.ToTable("OrderItem", "Orders"); + e.HasKey(p => p.Id); + e.Property(p => p.Id).HasColumnName("OrderItemId").HasColumnType("NVARCHAR(50)"); + e.Property(p => p.OrderId).HasColumnName("OrderId").HasColumnType("NVARCHAR(50)"); + e.Property(p => p.ProductId).HasColumnName("ProductId").HasColumnType("NVARCHAR(100)"); + e.Property(p => p.Quantity).HasColumnName("Quantity").HasColumnType("DECIMAL(18,4)"); + e.Property(p => p.UnitPrice).HasColumnName("UnitPrice").HasColumnType("DECIMAL(18,4)"); + e.Property(p => p.CreatedBy).HasColumnName("CreatedBy").HasColumnType("NVARCHAR(250)"); + e.Property(p => p.CreatedOn).HasColumnName("CreatedOn").HasColumnType("DATETIMEOFFSET"); + e.Property(p => p.UpdatedBy).HasColumnName("UpdatedBy").HasColumnType("NVARCHAR(250)"); + e.Property(p => p.UpdatedOn).HasColumnName("UpdatedOn").HasColumnType("DATETIMEOFFSET"); + }); + + modelBuilder.ThrowIfNull().Entity(e => + { + e.ToTable("OrderStatus", "Orders"); + e.HasKey(p => p.Id); + e.Property(p => p.Id).HasColumnName("OrderStatusId").HasColumnType("NVARCHAR(50)"); + e.Property(p => p.Code).HasColumnName("Code").HasColumnType("NVARCHAR(50)"); + e.Property(p => p.Text).HasColumnName("Text").HasColumnType("NVARCHAR(250)"); + e.Property(p => p.SortOrder).HasColumnName("SortOrder").HasColumnType("INT"); + e.Property(p => p.IsActive).HasColumnName("IsActive").HasColumnType("BIT"); + e.Property(p => p.CreatedBy).HasColumnName("CreatedBy").HasColumnType("NVARCHAR(250)"); + e.Property(p => p.CreatedOn).HasColumnName("CreatedOn").HasColumnType("DATETIMEOFFSET"); + e.Property(p => p.UpdatedBy).HasColumnName("UpdatedBy").HasColumnType("NVARCHAR(250)"); + e.Property(p => p.UpdatedOn).HasColumnName("UpdatedOn").HasColumnType("DATETIMEOFFSET"); + e.Property(p => p.ETag).HasColumnName("RowVersion").HasColumnType("TIMESTAMP").IsRowVersion().HasConversion(StringBase64Converter.Default); + e.Ignore(p => p.Description).Ignore(p => p.StartsOn).Ignore(p => p.EndsOn); + }); + } +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Infrastructure/Repositories/OrdersEfDb.cs b/samples/src/Contoso.Orders.Infrastructure/Repositories/OrdersEfDb.cs new file mode 100644 index 00000000..82b67875 --- /dev/null +++ b/samples/src/Contoso.Orders.Infrastructure/Repositories/OrdersEfDb.cs @@ -0,0 +1,8 @@ +namespace Contoso.Orders.Infrastructure.Repositories; + +public sealed class OrdersEfDb(OrdersDbContext dbContext) : EfDb(dbContext) +{ + public EfDbModel OrderStatuses => Model(); + + public EfDbMappedModel Orders => Model().ToMappedModel(OrderMapper.Default); +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Infrastructure/Repositories/OrdersOutboxPublisher.cs b/samples/src/Contoso.Orders.Infrastructure/Repositories/OrdersOutboxPublisher.cs new file mode 100644 index 00000000..742d8b33 --- /dev/null +++ b/samples/src/Contoso.Orders.Infrastructure/Repositories/OrdersOutboxPublisher.cs @@ -0,0 +1,7 @@ +namespace Contoso.Orders.Infrastructure.Repositories; + +public class OrdersOutboxPublisher(SqlServerDatabase database, IDestinationProvider? destinationProvider = null, IEventFormatter? formatter = null, ILogger? logger = null) + : SqlServerOutboxPublisher(database, destinationProvider, formatter, logger) +{ + public override SqlStatement Statement { get; set; } = SqlStatement.StoredProcedure("[Orders].[spOutboxEnqueue]"); +} \ No newline at end of file diff --git a/samples/src/Contoso.Orders.Infrastructure/Repositories/ReferenceDataRepository.cs b/samples/src/Contoso.Orders.Infrastructure/Repositories/ReferenceDataRepository.cs new file mode 100644 index 00000000..ea065ced --- /dev/null +++ b/samples/src/Contoso.Orders.Infrastructure/Repositories/ReferenceDataRepository.cs @@ -0,0 +1,10 @@ +namespace Contoso.Orders.Infrastructure.Repositories; + +[ScopedService] +public class ReferenceDataRepository(OrdersEfDb ef) : IReferenceDataRepository +{ + private readonly OrdersEfDb _ef = ef.ThrowIfNull(); + + public Task GetAllOrderStatusesAsync() + => _ef.OrderStatuses.Query().ToMappedItemsAsync(OrderStatusMapper.From); +} \ No newline at end of file diff --git a/samples/tests/Contoso.E2E.Runner/Contoso.E2E.Runner.csproj b/samples/tests/Contoso.E2E.Runner/Contoso.E2E.Runner.csproj index 104efa57..8084f4a7 100644 --- a/samples/tests/Contoso.E2E.Runner/Contoso.E2E.Runner.csproj +++ b/samples/tests/Contoso.E2E.Runner/Contoso.E2E.Runner.csproj @@ -2,7 +2,7 @@ Exe - net8.0;net9.0;net10.0 + net10.0 enable enable preview @@ -18,7 +18,9 @@ + + diff --git a/samples/tests/Contoso.E2E.Runner/Scenarios/DatabaseMigrationSetup.cs b/samples/tests/Contoso.E2E.Runner/Scenarios/DatabaseMigrationSetup.cs index 88571324..196464b5 100644 --- a/samples/tests/Contoso.E2E.Runner/Scenarios/DatabaseMigrationSetup.cs +++ b/samples/tests/Contoso.E2E.Runner/Scenarios/DatabaseMigrationSetup.cs @@ -1,7 +1,7 @@ namespace Contoso.E2E.Runner.Scenarios; /// -/// Provides scenario setup for performing database migrations and refreshing base data for the Products and Shopping databases. +/// Provides scenario setup for performing database migrations and refreshing base data for the Products, Shopping and Orders databases. /// [ScenarioSetUp("Database-Migration", "Database Migration and Base Data Refresh", 1, false)] public sealed class DatabaseMigrationSetup : IScenario @@ -36,5 +36,19 @@ await context.StepAsync("Shopping database migration.", async () => if (!Success) throw new Exception("Database migration failed:" + Environment.NewLine + Output); }, "Successfully migrated; base data refreshed.").ConfigureAwait(false); + + // Step 3: Orders database migration. + await context.StepAsync("Orders database migration.", async () => + { + var cs = context.TestContext.Config.GetValue("E2E:Orders:ConnectionString") ?? throw new InvalidOperationException("E2E:Orders:ConnectionString configuration value is missing."); + var ma = new MigrationArgs(MigrationCommand.All | MigrationCommand.ResetAndData, cs); + Contoso.Orders.Database.Program.ConfigureMigrationArgs(ma); + ma.AddAssembly(); + + using var m = new SqlServerMigration(ma); + var (Success, Output) = await m.MigrateAndLogAsync().ConfigureAwait(false); + if (!Success) + throw new Exception("Database migration failed:" + Environment.NewLine + Output); + }, "Successfully migrated; base data refreshed.").ConfigureAwait(false); } } \ No newline at end of file diff --git a/samples/tests/Contoso.E2E.Runner/appsettings.json b/samples/tests/Contoso.E2E.Runner/appsettings.json index df3ef63a..f39efdc9 100644 --- a/samples/tests/Contoso.E2E.Runner/appsettings.json +++ b/samples/tests/Contoso.E2E.Runner/appsettings.json @@ -14,6 +14,9 @@ "TopicName": "contoso", "SubscriptionName": "shopping" }, + "Orders": { + "ConnectionString": "Data Source=127.0.0.1,1433;Initial Catalog=Contoso;User id=sa;Password=yourStrong(!)Password;TrustServerCertificate=true" + }, "RecentEventsDisplayCount": 10, "PerStepMinDelayMilliseconds": 100, "PerStepMaxDelayMilliseconds": 500, diff --git a/samples/tests/Contoso.Orders.Test.Api/Contoso.Orders.Test.Api.csproj b/samples/tests/Contoso.Orders.Test.Api/Contoso.Orders.Test.Api.csproj new file mode 100644 index 00000000..11cb971c --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Api/Contoso.Orders.Test.Api.csproj @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + PreserveNewest + + + + + + + + \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Api/GlobalUsing.cs b/samples/tests/Contoso.Orders.Test.Api/GlobalUsing.cs new file mode 100644 index 00000000..6cc92d84 --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Api/GlobalUsing.cs @@ -0,0 +1,13 @@ +global using Contoso.Orders.Contracts; +global using CoreEx; +global using CoreEx.Http.Abstractions; +global using CoreEx.Database.SqlServer.Outbox; +global using AwesomeAssertions; +global using NUnit.Framework; +global using Order = Contoso.Orders.Contracts.Order; +global using System.Net; +global using System.Text.Json; +global using UnitTestEx; +global using UnitTestEx.Expectations; +global using DbMigration = Contoso.Orders.Database.Program; +global using TestData = Contoso.Orders.Test.Common.TestData; \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Api/OrderMutateTests.Create.cs b/samples/tests/Contoso.Orders.Test.Api/OrderMutateTests.Create.cs new file mode 100644 index 00000000..58807169 --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Api/OrderMutateTests.Create.cs @@ -0,0 +1,79 @@ +namespace Contoso.Orders.Test.Api; + +public partial class OrderMutateTests +{ + [Test] + public void Order_Create_Bad_Data() + { + var order = new Contoso.Orders.Contracts.Order + { + CustomerId = null, + StatusCode = "XX" + }; + + Test.Http() + .Run(HttpMethod.Post, "/api/orders", order) + .AssertBadRequest() + .AssertErrors( + "Customer is required.", + "Order status is invalid."); + } + + [Test] + public void Order_Create_Success() + { + var order = new Contoso.Orders.Contracts.Order + { + CustomerId = "CUST-3001", + StatusCode = "P" + }; + + var created = Test.Http() + .ExpectIdentifier() + .ExpectETag() + .ExpectChangeLogCreated() + .ExpectSqlServerOutboxEvents(e => e.AssertWithValue("contoso", "contoso.orders.order.created.v1")) + .Run(HttpMethod.Post, "/api/orders", order) + .AssertCreated() + .AssertLocationHeader(r => new Uri($"/api/orders/{r!.Id}", UriKind.Relative)) + .Value!; + + created.Id.Should().NotBeNullOrEmpty(); + created.CustomerId.Should().Be(order.CustomerId); + created.StatusCode.Should().Be(order.StatusCode); + created.Items.Should().NotBeNull().And.BeEmpty(); + + Test.Http() + .Run(HttpMethod.Get, $"/api/orders/{created.Id}") + .AssertOK() + .AssertValue(created); + } + + [Test] + public void Order_Create_IdempotencyKey() + { + var order = new Contoso.Orders.Contracts.Order + { + CustomerId = "CUST-4001", + StatusCode = "C" + }; + + var ik = Guid.NewGuid().ToString(); + + var v1 = Test.Http() + .ExpectSqlServerOutboxEvents() + .Run(HttpMethod.Post, "/api/orders", order, requestModifier: r => r.WithIdempotencyKey(ik)) + .AssertCreated() + .AssertLocationHeader(r => new Uri($"/api/orders/{r!.Id}", UriKind.Relative)) + .Value!; + + var v2 = Test.Http() + .ExpectNoSqlServerOutboxEvents() + .Run(HttpMethod.Post, "/api/orders", order, requestModifier: r => r.WithIdempotencyKey(ik)) + .AssertCreated() + .AssertLocationHeader(r => new Uri($"/api/orders/{r!.Id}", UriKind.Relative)) + .Value!; + + ObjectComparer.Assert(v1, v2); + } +} \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Api/OrderMutateTests.Delete.cs b/samples/tests/Contoso.Orders.Test.Api/OrderMutateTests.Delete.cs new file mode 100644 index 00000000..0694da44 --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Api/OrderMutateTests.Delete.cs @@ -0,0 +1,35 @@ +namespace Contoso.Orders.Test.Api; + +public partial class OrderMutateTests +{ + [Test] + public void Order_Delete_NotFound() + { + Test.Http() + .Run(HttpMethod.Delete, "/api/orders/404") + .AssertNoContent(); + } + + [Test] + public void Order_Delete_Success() + { + var order = CreateOrder("DEL-OK"); + + Test.Http() + .Run(HttpMethod.Get, $"/api/orders/{order.Id}") + .AssertOK(); + + Test.Http() + .ExpectSqlServerOutboxEvents(e => e.AssertMetadata("contoso", "contoso.orders.order.deleted.v1", order.Id!)) + .Run(HttpMethod.Delete, $"/api/orders/{order.Id}") + .AssertNoContent(); + + Test.Http() + .Run(HttpMethod.Delete, $"/api/orders/{order.Id}") + .AssertNoContent(); + + Test.Http() + .Run(HttpMethod.Get, $"/api/orders/{order.Id}") + .AssertNotFound(); + } +} \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Api/OrderMutateTests.Patch.cs b/samples/tests/Contoso.Orders.Test.Api/OrderMutateTests.Patch.cs new file mode 100644 index 00000000..f2a4c434 --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Api/OrderMutateTests.Patch.cs @@ -0,0 +1,61 @@ +namespace Contoso.Orders.Test.Api; + +public partial class OrderMutateTests +{ + [Test] + public void Order_Patch_NotFound() + { + Test.Http() + .Run(HttpMethod.Patch, "/api/orders/404", new { status = "C" }, requestModifier: r => r.WithMergePatchJsonContentType()) + .AssertNotFound(); + } + + [Test] + public void Order_Patch_Validation() + { + var order = CreateOrder("PAT-VD"); + + Test.Http() + .Run(HttpMethod.Patch, $"/api/orders/{order.Id}", new { status = "ZZ" }, requestModifier: r => r.WithIfMatch(order.ETag).WithMergePatchJsonContentType()) + .AssertBadRequest() + .AssertErrors("Order status is invalid."); + } + + [Test] + public void Order_Patch_Success() + { + var order = CreateOrder("PAT-OK"); + + order.StatusCode = "X"; + + var updated = Test.Http() + .ExpectSqlServerOutboxEvents(e => e.AssertWithValue("contoso", "contoso.orders.order.updated.v1")) + .Run(HttpMethod.Patch, $"/api/orders/{order.Id}", new { status = "X" }, requestModifier: r => r.WithIfMatch(order.ETag).WithMergePatchJsonContentType()) + .AssertOK() + .AssertValue(order, "etag", "changelog") + .Value!; + + updated.StatusCode.Should().Be("X"); + updated.ETag.Should().NotBe(order.ETag); + + Test.Http() + .Run(HttpMethod.Get, $"/api/orders/{order.Id}") + .AssertOK() + .AssertValue(updated, "etag", "changelog"); + } + + [Test] + public void Order_Patch_NoChanges() + { + var order = CreateOrder("PAT-NC"); + + var updated = Test.Http() + .ExpectNoSqlServerOutboxEvents() + .Run(HttpMethod.Patch, $"/api/orders/{order.Id}", new { }, requestModifier: r => r.WithIfMatch(order.ETag).WithMergePatchJsonContentType()) + .AssertOK() + .AssertValue(order, "etag", "changelog") + .Value!; + + updated.ETag.Should().Be(order.ETag); + } +} \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Api/OrderMutateTests.Update.cs b/samples/tests/Contoso.Orders.Test.Api/OrderMutateTests.Update.cs new file mode 100644 index 00000000..46be7abb --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Api/OrderMutateTests.Update.cs @@ -0,0 +1,72 @@ +namespace Contoso.Orders.Test.Api; + +public partial class OrderMutateTests +{ + [Test] + public void Order_Update_NotFound() + { + var order = CreateOrder("UPD-NF"); + + Test.Http() + .Run(HttpMethod.Put, "/api/orders/404", order) + .AssertNotFound(); + } + + [Test] + public void Order_Update_Concurrency() + { + var order = CreateOrder("UPD-CC"); + + order.StatusCode = "C"; + + Test.Http() + .Run(HttpMethod.Put, $"/api/orders/{order.Id}", order, requestModifier: r => r.WithIfMatch("AAAAAAAA")) + .AssertPreconditionFailed(); + } + + [Test] + public void Order_Update_Success() + { + var order = CreateOrder("UPD-OK"); + + order.StatusCode = "C"; + order.CustomerId = "CUST-1001-UPDATED"; + + var updated = Test.Http() + .ExpectIdentifier() + .ExpectETag() + .ExpectChangeLogUpdated() + .ExpectValue(order) + .ExpectSqlServerOutboxEvents(e => e.AssertWithValue("contoso", "contoso.orders.order.updated.v1")) + .Run(HttpMethod.Put, $"/api/orders/{order.Id}", order) + .AssertOK() + .Value!; + + updated.CustomerId.Should().Be("CUST-1001-UPDATED"); + updated.StatusCode.Should().Be("C"); + updated.ETag.Should().NotBe(order.ETag); + + Test.Http() + .Run(HttpMethod.Get, $"/api/orders/{order.Id}") + .AssertOK() + .AssertValue(updated); + } + + [Test] + public void Order_Update_NoChanges() + { + var order = CreateOrder("UPD-NC"); + + var updated = Test.Http() + .ExpectNoSqlServerOutboxEvents() + .Run(HttpMethod.Put, $"/api/orders/{order.Id}", order) + .AssertOK() + .AssertValue(order, "etag", "changelog") + .Value!; + + updated.ETag.Should().Be(order.ETag); + updated.ChangeLog.Should().NotBeNull(); + updated.ChangeLog!.UpdatedBy.Should().BeNull(); + updated.ChangeLog.UpdatedOn.Should().BeNull(); + } +} \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Api/OrderMutateTests.cs b/samples/tests/Contoso.Orders.Test.Api/OrderMutateTests.cs new file mode 100644 index 00000000..d4f29809 --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Api/OrderMutateTests.cs @@ -0,0 +1,34 @@ +namespace Contoso.Orders.Test.Api; + +public partial class OrderMutateTests : WithApiTester +{ + private const string SqlConnectionString = "Data Source=127.0.0.1,1433;Initial Catalog=Contoso;User id=sa;Password=yourStrong(!)Password;TrustServerCertificate=true"; + + [OneTimeSetUp] + public async Task OneTimeSetUpAsync() + { + await Test.MigrateSqlServerDataAsync(DbMigration.ConfigureMigrationArgs, SqlConnectionString).ConfigureAwait(false); + await Test.ClearFusionCacheAsync().ConfigureAwait(false); + + Test.UseExpectedSqlServerOutboxPublisher(); + } + + private Contoso.Orders.Contracts.Order CreateOrder(string customerIdPrefix = "CUST") + { + var order = new Contoso.Orders.Contracts.Order + { + CustomerId = $"{customerIdPrefix}-{Guid.NewGuid():N}"[..21], + StatusCode = "P" + }; + + return Test.Http() + .ExpectIdentifier() + .ExpectETag() + .ExpectChangeLogCreated() + .ExpectSqlServerOutboxEvents(e => e.AssertWithValue("contoso", "contoso.orders.order.created.v1")) + .Run(HttpMethod.Post, "/api/orders", order) + .AssertCreated() + .AssertLocationHeader(r => new Uri($"/api/orders/{r!.Id}", UriKind.Relative)) + .Value!; + } +} \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Api/OtherTests.Health.cs b/samples/tests/Contoso.Orders.Test.Api/OtherTests.Health.cs new file mode 100644 index 00000000..46178316 --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Api/OtherTests.Health.cs @@ -0,0 +1,39 @@ +namespace Contoso.Orders.Test.Api; + +public partial class OtherTests +{ + [TestCase("/health/live")] + [TestCase("/health/startup")] + [TestCase("/health/ready")] + public void Health(string path) + { + Test.Http() + .Run(HttpMethod.Get, path) + .Response.StatusCode.Should().BeOneOf(HttpStatusCode.OK, HttpStatusCode.ServiceUnavailable); + } + + [TestCase("/health/live/detailed", true)] + [TestCase("/health/startup/detailed", false)] + [TestCase("/health/ready/detailed", false)] + public void Health_Detailed(string path, bool minimal) + { + string[] requiredServices = + [ + "reference-data-orchestrator", + "stackExchange.Redis", + "sqlServer" + ]; + + var r = Test.Http() + .Run(HttpMethod.Get, path) + .AssertContentTypeJson(); + + r.Response.StatusCode.Should().BeOneOf(HttpStatusCode.OK, HttpStatusCode.ServiceUnavailable); + + var json = r.GetContent(); + if (minimal) + json.Should().NotContainAny(requiredServices); + else + json.Should().ContainAll(requiredServices); + } +} \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Api/OtherTests.ReferenceData.cs b/samples/tests/Contoso.Orders.Test.Api/OtherTests.ReferenceData.cs new file mode 100644 index 00000000..3ec67703 --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Api/OtherTests.ReferenceData.cs @@ -0,0 +1,18 @@ +namespace Contoso.Orders.Test.Api; + +public partial class OtherTests +{ + [Test] + public void RefData_OrderStatuses() + { + var statuses = Test.Http() + .Run(HttpMethod.Get, "/api/refdata/order-statuses") + .AssertOK() + .Value; + + statuses.Should().HaveCountGreaterThanOrEqualTo(3); + statuses.Should().Contain(s => s.Code == "P" && s.Text == "Pending"); + statuses.Should().Contain(s => s.Code == "C" && s.Text == "Confirmed"); + statuses.Should().Contain(s => s.Code == "X" && s.Text == "Cancelled"); + } +} \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Api/OtherTests.Swagger.cs b/samples/tests/Contoso.Orders.Test.Api/OtherTests.Swagger.cs new file mode 100644 index 00000000..6cc09bdc --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Api/OtherTests.Swagger.cs @@ -0,0 +1,32 @@ +namespace Contoso.Orders.Test.Api; + +public partial class OtherTests +{ + [Test] + public void Swagger_UI() + { + Test.Http() + .Run(HttpMethod.Get, "/swagger") + .Assert(HttpStatusCode.Found) + .AssertLocationHeader(new Uri("/swagger/index.html", UriKind.Relative)); + + var html = Test.Http() + .Run(HttpMethod.Get, "/swagger/index.html") + .Assert(HttpStatusCode.OK) + .GetContent(); + + html.Should().Contain("Swagger UI"); + } + + [Test] + public void Swagger_Json() + { + var json = Test.Http() + .Run(HttpMethod.Get, "/swagger/v1/swagger.json") + .Assert(HttpStatusCode.OK) + .AssertContentTypeJson() + .GetContent(); + + json.Should().Contain("Contoso.Orders.Api"); + } +} \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Api/OtherTests.cs b/samples/tests/Contoso.Orders.Test.Api/OtherTests.cs new file mode 100644 index 00000000..e50fa60a --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Api/OtherTests.cs @@ -0,0 +1,15 @@ +namespace Contoso.Orders.Test.Api; + +public partial class OtherTests : WithApiTester +{ + private const string SqlConnectionString = "Data Source=127.0.0.1,1433;Initial Catalog=Contoso;User id=sa;Password=yourStrong(!)Password;TrustServerCertificate=true"; + + [OneTimeSetUp] + public async Task OneTimeSetUpAsync() + { + await Test.MigrateSqlServerDataAsync(DbMigration.ConfigureMigrationArgs, SqlConnectionString).ConfigureAwait(false); + await Test.ClearFusionCacheAsync().ConfigureAwait(false); + + Test.UseExpectedSqlServerOutboxPublisher(); + } +} \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Api/ReadTests.OrderGet.cs b/samples/tests/Contoso.Orders.Test.Api/ReadTests.OrderGet.cs new file mode 100644 index 00000000..c08ac18a --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Api/ReadTests.OrderGet.cs @@ -0,0 +1,42 @@ +namespace Contoso.Orders.Test.Api; + +public partial class ReadTests +{ + [Test] + public void Order_Get_NotFound() + { + Test.Http() + .Run(HttpMethod.Get, "/api/orders/404") + .AssertNotFound(); + } + + [Test] + public void Order_Get_Found() + { + var order = Test.Http() + .Run(HttpMethod.Get, "/api/orders/ORD-1001") + .AssertOK() + .Value!; + + order.Id.Should().Be("ORD-1001"); + order.CustomerId.Should().Be("CUST-1001"); + order.StatusCode.Should().Be("P"); + order.ETag.Should().NotBeNullOrEmpty(); + order.Items.Should().NotBeNull().And.BeEmpty(); + } + + [Test] + public void Order_Get_Not_Modified() + { + var r = Test.Http() + .Run(HttpMethod.Get, "/api/orders/ORD-1001") + .AssertOK() + .Response; + + r.Headers.ETag.Should().NotBeNull(); + + Test.Http() + .Run(HttpMethod.Get, "/api/orders/ORD-1001", requestModifier: rm => rm.WithIfNoneMatch(r.Headers.ETag!.Tag)) + .AssertNotModified(); + } +} \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Api/ReadTests.cs b/samples/tests/Contoso.Orders.Test.Api/ReadTests.cs new file mode 100644 index 00000000..947e9bca --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Api/ReadTests.cs @@ -0,0 +1,15 @@ +namespace Contoso.Orders.Test.Api; + +public partial class ReadTests : WithApiTester +{ + private const string SqlConnectionString = "Data Source=127.0.0.1,1433;Initial Catalog=Contoso;User id=sa;Password=yourStrong(!)Password;TrustServerCertificate=true"; + + [OneTimeSetUp] + public async Task OneTimeSetUpAsync() + { + await Test.MigrateSqlServerDataAsync(DbMigration.ConfigureMigrationArgs, SqlConnectionString).ConfigureAwait(false); + await Test.ClearFusionCacheAsync().ConfigureAwait(false); + + Test.UseExpectedSqlServerOutboxPublisher(); + } +} \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Api/appsettings.unittest.json b/samples/tests/Contoso.Orders.Test.Api/appsettings.unittest.json new file mode 100644 index 00000000..802cead8 --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Api/appsettings.unittest.json @@ -0,0 +1,26 @@ +{ + "ConnectionStrings": { + "SqlServer": "Data Source=127.0.0.1,1433;Initial Catalog=Contoso;User id=sa;Password=yourStrong(!)Password;TrustServerCertificate=true", + "redis": "127.0.0.1:6379" + }, + "Aspire": { + "Microsoft": { + "Data": { + "SqlClient": { + "ConnectionString": "Data Source=127.0.0.1,1433;Initial Catalog=Contoso;User id=sa;Password=yourStrong(!)Password;TrustServerCertificate=true" + } + } + } + }, + "Logging": { + "LogLevel": { + "Default": "Debug", + "System": "Information", + "Microsoft": "Information", + "Microsoft.EntityFrameworkCore": "Information", + "Microsoft.EntityFrameworkCore.Update": "Information", + "ZiggyCreature": "Warning", + "StackExchange": "Warning" + } + } +} \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Common/Contoso.Orders.Test.Common.csproj b/samples/tests/Contoso.Orders.Test.Common/Contoso.Orders.Test.Common.csproj new file mode 100644 index 00000000..cc8a284c --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Common/Contoso.Orders.Test.Common.csproj @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Common/Data/data.yaml b/samples/tests/Contoso.Orders.Test.Common/Data/data.yaml new file mode 100644 index 00000000..824cc200 --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Common/Data/data.yaml @@ -0,0 +1,3 @@ +Orders: + - Order: + - { OrderId: ORD-1001, CustomerId: CUST-1001, StatusCode: P } diff --git a/samples/tests/Contoso.Orders.Test.Common/TestData.cs b/samples/tests/Contoso.Orders.Test.Common/TestData.cs new file mode 100644 index 00000000..a5bde808 --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Common/TestData.cs @@ -0,0 +1,6 @@ +namespace Contoso.Orders.Test.Common; + +/// +/// Marker class for test data used across multiple test projects in the 'Contoso.Orders' sample. +/// +public sealed class TestData { } \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Unit/Contoso.Orders.Test.Unit.csproj b/samples/tests/Contoso.Orders.Test.Unit/Contoso.Orders.Test.Unit.csproj new file mode 100644 index 00000000..65dcbc1d --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Unit/Contoso.Orders.Test.Unit.csproj @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Unit/EntryPoint.cs b/samples/tests/Contoso.Orders.Test.Unit/EntryPoint.cs new file mode 100644 index 00000000..6b481503 --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Unit/EntryPoint.cs @@ -0,0 +1,28 @@ +namespace Contoso.Orders.Test.Unit; + +public class EntryPoint +{ + public static void ConfigureApplication(IHostApplicationBuilder builder) + { + builder.Services.AddExecutionContext(); + builder.Services.AddMemoryCache(); + builder.Services.AddReferenceDataOrchestrator(); + + var jdr = JsonDataReader.ParseYaml("ref-data.yaml", JsonDataReaderOptions.CreateForReferenceData()); + builder.Services.AddSingleton(new ReferenceDataProvider(jdr)); + } + + public class ReferenceDataProvider(JsonDataReader jdr) : IReferenceDataProvider + { + public IEnumerable<(Type, Type)> Types => + [ + (typeof(OrderStatus), typeof(OrderStatusCollection)) + ]; + + public Task GetAsync(Type type, CancellationToken cancellationToken = default) => type switch + { + _ when type == typeof(OrderStatus) => Task.FromResult((IReferenceDataCollection)jdr.Deserialize("Orders.$^OrderStatus")!), + _ => throw new InvalidOperationException($"Type {type.FullName} is not a known {nameof(IReferenceData)}.") + }; + } +} \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Unit/GlobalUsing.cs b/samples/tests/Contoso.Orders.Test.Unit/GlobalUsing.cs new file mode 100644 index 00000000..c6306de9 --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Unit/GlobalUsing.cs @@ -0,0 +1,11 @@ +global using Contoso.Orders.Application.Validators; +global using Contoso.Orders.Contracts; +global using CoreEx; +global using CoreEx.RefData; +global using CoreEx.RefData.Abstractions; +global using CoreEx.UnitTesting; +global using CoreEx.UnitTesting.Data; +global using CoreEx.Validation; +global using Microsoft.Extensions.DependencyInjection; +global using Microsoft.Extensions.Hosting; +global using UnitTestEx; \ No newline at end of file diff --git a/samples/tests/Contoso.Orders.Test.Unit/Validators/OrderValidatorTests.cs b/samples/tests/Contoso.Orders.Test.Unit/Validators/OrderValidatorTests.cs new file mode 100644 index 00000000..67f57c0b --- /dev/null +++ b/samples/tests/Contoso.Orders.Test.Unit/Validators/OrderValidatorTests.cs @@ -0,0 +1,37 @@ +namespace Contoso.Orders.Test.Unit.Validators; + +public class OrderValidatorTests : WithGenericTester +{ + [Test] + public void Empty_Required() => Test.Scoped(test => + { + var order = new Order(); + new OrderValidator().AssertErrors(order, + ("customerId", "Customer is required."), + ("status", "Order status is required.")); + }); + + [Test] + public void Invalid_ReferenceData() => Test.Scoped(test => + { + var order = new Order { CustomerId = "CUST-1001", StatusCode = "ZZ" }; + new OrderValidator().AssertErrors(order, + ("status", "Order status is invalid.")); + }); + + [Test] + public void Success() => Test.Scoped(test => + { + var order = new Order + { + CustomerId = "CUST-1001", + StatusCode = "P", + Items = + [ + new OrderItem { Id = "ORD-1001-1", ProductId = "PROD-100", Quantity = 1.00m, UnitPrice = 12.34m } + ] + }; + + new OrderValidator().AssertSuccess(order); + }); +} \ No newline at end of file diff --git a/src/CoreEx/Entities/Abstractions/CapabilitySupport.cs b/src/CoreEx/Entities/Abstractions/CapabilitySupport.cs new file mode 100644 index 00000000..0b063d39 --- /dev/null +++ b/src/CoreEx/Entities/Abstractions/CapabilitySupport.cs @@ -0,0 +1,22 @@ +namespace CoreEx.Entities.Abstractions; + +/// +/// Represents the capability support. +/// +public enum CapabilitySupport +{ + /// + /// Indicates that the capability is not supported. + /// + None, + + /// + /// Indicates that the capability is partially supported; i.e. is read-only. + /// + ReadOnly, + + /// + /// Indicates that the capability is fully supported; i.e. is mutable. + /// + Mutable +} \ No newline at end of file