1
0
Fork 0
mirror of https://github.com/s-frick/effigenix.git synced 2026-03-28 10:19:35 +01:00

feat(production): Vorwärts-Tracing für Rückruf-Szenario (US-P18)

BFS-Traversierung über Chargen-Genealogie (batch_consumptions.input_batch_id)
mit Cycle-Detection und Max-Depth-Guard. REST-Endpoint GET /{id}/trace-forward
liefert flache Liste mit Tiefenangabe für betroffene Endprodukt-Chargen.
This commit is contained in:
Sebastian Frick 2026-02-26 09:24:49 +01:00
parent 973c33d78f
commit ddb674d618
14 changed files with 822 additions and 1 deletions

View file

@ -0,0 +1,28 @@
package de.effigenix.application.production;
import de.effigenix.application.production.command.TraceBatchForwardCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import java.util.List;
public class TraceBatchForward {
private final BatchTraceabilityService traceabilityService;
private final AuthorizationPort authorizationPort;
public TraceBatchForward(BatchTraceabilityService traceabilityService, AuthorizationPort authorizationPort) {
this.traceabilityService = traceabilityService;
this.authorizationPort = authorizationPort;
}
public Result<BatchError, List<TracedBatch>> execute(TraceBatchForwardCommand command, ActorId performedBy) {
if (!authorizationPort.can(performedBy, ProductionAction.BATCH_READ)) {
return Result.failure(new BatchError.Unauthorized("Not authorized to read batches"));
}
return traceabilityService.traceForward(BatchId.of(command.batchId()));
}
}

View file

@ -0,0 +1,4 @@
package de.effigenix.application.production.command;
public record TraceBatchForwardCommand(String batchId) {
}

View file

@ -29,5 +29,7 @@ public interface BatchRepository {
Result<RepositoryError, List<Batch>> findByRecipeIdsSummary(List<RecipeId> recipeIds); Result<RepositoryError, List<Batch>> findByRecipeIdsSummary(List<RecipeId> recipeIds);
Result<RepositoryError, List<Batch>> findByInputBatchId(BatchId inputBatchId);
Result<RepositoryError, Void> save(Batch batch); Result<RepositoryError, Void> save(Batch batch);
} }

View file

@ -0,0 +1,67 @@
package de.effigenix.domain.production;
import de.effigenix.shared.common.Result;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class BatchTraceabilityService {
static final int DEFAULT_MAX_DEPTH = 10;
private final BatchRepository batchRepository;
public BatchTraceabilityService(BatchRepository batchRepository) {
this.batchRepository = batchRepository;
}
public Result<BatchError, List<TracedBatch>> traceForward(BatchId startBatchId) {
return traceForward(startBatchId, DEFAULT_MAX_DEPTH);
}
public Result<BatchError, List<TracedBatch>> traceForward(BatchId startBatchId, int maxDepth) {
switch (batchRepository.findById(startBatchId)) {
case Result.Failure(var err) ->
{ return Result.failure(new BatchError.RepositoryFailure(err.message())); }
case Result.Success(var opt) -> {
if (opt.isEmpty()) {
return Result.failure(new BatchError.BatchNotFound(startBatchId));
}
}
}
List<TracedBatch> result = new ArrayList<>();
Set<String> visited = new HashSet<>();
visited.add(startBatchId.value());
record BfsEntry(BatchId batchId, int depth) {}
var queue = new ArrayDeque<BfsEntry>();
queue.add(new BfsEntry(startBatchId, 0));
while (!queue.isEmpty()) {
var entry = queue.poll();
if (entry.depth() >= maxDepth) {
continue;
}
switch (batchRepository.findByInputBatchId(entry.batchId())) {
case Result.Failure(var err) ->
{ return Result.failure(new BatchError.RepositoryFailure(err.message())); }
case Result.Success(var children) -> {
int childDepth = entry.depth() + 1;
for (Batch child : children) {
if (visited.add(child.id().value())) {
result.add(new TracedBatch(child, childDepth));
queue.add(new BfsEntry(child.id(), childDepth));
}
}
}
}
}
return Result.success(result);
}
}

View file

@ -0,0 +1,4 @@
package de.effigenix.domain.production;
public record TracedBatch(Batch batch, int depth) {
}

View file

@ -15,6 +15,7 @@ import de.effigenix.application.production.StartProductionOrder;
import de.effigenix.application.production.CancelBatch; import de.effigenix.application.production.CancelBatch;
import de.effigenix.application.production.CompleteBatch; import de.effigenix.application.production.CompleteBatch;
import de.effigenix.application.production.CreateRecipe; import de.effigenix.application.production.CreateRecipe;
import de.effigenix.application.production.TraceBatchForward;
import de.effigenix.application.production.FindBatchByNumber; import de.effigenix.application.production.FindBatchByNumber;
import de.effigenix.application.production.GetBatch; import de.effigenix.application.production.GetBatch;
import de.effigenix.application.production.ListBatches; import de.effigenix.application.production.ListBatches;
@ -28,6 +29,7 @@ import de.effigenix.application.production.RemoveProductionStep;
import de.effigenix.application.production.RemoveRecipeIngredient; import de.effigenix.application.production.RemoveRecipeIngredient;
import de.effigenix.domain.production.BatchNumberGenerator; import de.effigenix.domain.production.BatchNumberGenerator;
import de.effigenix.domain.production.BatchRepository; import de.effigenix.domain.production.BatchRepository;
import de.effigenix.domain.production.BatchTraceabilityService;
import de.effigenix.domain.production.ProductionOrderRepository; import de.effigenix.domain.production.ProductionOrderRepository;
import de.effigenix.domain.production.RecipeRepository; import de.effigenix.domain.production.RecipeRepository;
import de.effigenix.shared.persistence.UnitOfWork; import de.effigenix.shared.persistence.UnitOfWork;
@ -142,6 +144,17 @@ public class ProductionUseCaseConfiguration {
return new CancelBatch(batchRepository, authorizationPort, unitOfWork); return new CancelBatch(batchRepository, authorizationPort, unitOfWork);
} }
@Bean
public BatchTraceabilityService batchTraceabilityService(BatchRepository batchRepository) {
return new BatchTraceabilityService(batchRepository);
}
@Bean
public TraceBatchForward traceBatchForward(BatchTraceabilityService batchTraceabilityService,
AuthorizationPort authorizationPort) {
return new TraceBatchForward(batchTraceabilityService, authorizationPort);
}
@Bean @Bean
public CreateProductionOrder createProductionOrder(ProductionOrderRepository productionOrderRepository, public CreateProductionOrder createProductionOrder(ProductionOrderRepository productionOrderRepository,
RecipeRepository recipeRepository, RecipeRepository recipeRepository,

View file

@ -192,6 +192,24 @@ public class JdbcBatchRepository implements BatchRepository {
} }
} }
@Override
public Result<RepositoryError, List<Batch>> findByInputBatchId(BatchId inputBatchId) {
try {
var batches = jdbc.sql("""
SELECT b.* FROM batches b
JOIN batch_consumptions bc ON b.id = bc.batch_id
WHERE bc.input_batch_id = :inputBatchId
""")
.param("inputBatchId", inputBatchId.value())
.query(this::mapBatchRow)
.list();
return Result.success(batches);
} catch (Exception e) {
logger.trace("Database error in findByInputBatchId", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override @Override
public Result<RepositoryError, Void> save(Batch batch) { public Result<RepositoryError, Void> save(Batch batch) {
try { try {

View file

@ -8,11 +8,13 @@ import de.effigenix.application.production.ListBatches;
import de.effigenix.application.production.PlanBatch; import de.effigenix.application.production.PlanBatch;
import de.effigenix.application.production.RecordConsumption; import de.effigenix.application.production.RecordConsumption;
import de.effigenix.application.production.StartBatch; import de.effigenix.application.production.StartBatch;
import de.effigenix.application.production.TraceBatchForward;
import de.effigenix.application.production.command.CancelBatchCommand; import de.effigenix.application.production.command.CancelBatchCommand;
import de.effigenix.application.production.command.CompleteBatchCommand; import de.effigenix.application.production.command.CompleteBatchCommand;
import de.effigenix.application.production.command.PlanBatchCommand; import de.effigenix.application.production.command.PlanBatchCommand;
import de.effigenix.application.production.command.RecordConsumptionCommand; import de.effigenix.application.production.command.RecordConsumptionCommand;
import de.effigenix.application.production.command.StartBatchCommand; import de.effigenix.application.production.command.StartBatchCommand;
import de.effigenix.application.production.command.TraceBatchForwardCommand;
import de.effigenix.domain.production.BatchError; import de.effigenix.domain.production.BatchError;
import de.effigenix.domain.production.BatchId; import de.effigenix.domain.production.BatchId;
import de.effigenix.domain.production.BatchNumber; import de.effigenix.domain.production.BatchNumber;
@ -20,6 +22,7 @@ import de.effigenix.domain.production.BatchStatus;
import de.effigenix.infrastructure.production.web.dto.BatchResponse; import de.effigenix.infrastructure.production.web.dto.BatchResponse;
import de.effigenix.infrastructure.production.web.dto.BatchSummaryResponse; import de.effigenix.infrastructure.production.web.dto.BatchSummaryResponse;
import de.effigenix.infrastructure.production.web.dto.ConsumptionResponse; import de.effigenix.infrastructure.production.web.dto.ConsumptionResponse;
import de.effigenix.infrastructure.production.web.dto.TraceBatchForwardResponse;
import de.effigenix.infrastructure.production.web.dto.CancelBatchRequest; import de.effigenix.infrastructure.production.web.dto.CancelBatchRequest;
import de.effigenix.infrastructure.production.web.dto.CompleteBatchRequest; import de.effigenix.infrastructure.production.web.dto.CompleteBatchRequest;
import de.effigenix.infrastructure.production.web.dto.PlanBatchRequest; import de.effigenix.infrastructure.production.web.dto.PlanBatchRequest;
@ -56,11 +59,12 @@ public class BatchController {
private final RecordConsumption recordConsumption; private final RecordConsumption recordConsumption;
private final CompleteBatch completeBatch; private final CompleteBatch completeBatch;
private final CancelBatch cancelBatch; private final CancelBatch cancelBatch;
private final TraceBatchForward traceBatchForward;
public BatchController(PlanBatch planBatch, GetBatch getBatch, ListBatches listBatches, public BatchController(PlanBatch planBatch, GetBatch getBatch, ListBatches listBatches,
FindBatchByNumber findBatchByNumber, StartBatch startBatch, FindBatchByNumber findBatchByNumber, StartBatch startBatch,
RecordConsumption recordConsumption, CompleteBatch completeBatch, RecordConsumption recordConsumption, CompleteBatch completeBatch,
CancelBatch cancelBatch) { CancelBatch cancelBatch, TraceBatchForward traceBatchForward) {
this.planBatch = planBatch; this.planBatch = planBatch;
this.getBatch = getBatch; this.getBatch = getBatch;
this.listBatches = listBatches; this.listBatches = listBatches;
@ -69,6 +73,7 @@ public class BatchController {
this.recordConsumption = recordConsumption; this.recordConsumption = recordConsumption;
this.completeBatch = completeBatch; this.completeBatch = completeBatch;
this.cancelBatch = cancelBatch; this.cancelBatch = cancelBatch;
this.traceBatchForward = traceBatchForward;
} }
@GetMapping("/{id}") @GetMapping("/{id}")
@ -244,6 +249,23 @@ public class BatchController {
return ResponseEntity.ok(BatchResponse.from(result.unsafeGetValue())); return ResponseEntity.ok(BatchResponse.from(result.unsafeGetValue()));
} }
@GetMapping("/{id}/trace-forward")
@PreAuthorize("hasAuthority('BATCH_READ')")
public ResponseEntity<TraceBatchForwardResponse> traceForward(
@PathVariable("id") String id,
Authentication authentication
) {
var actorId = ActorId.of(authentication.getName());
var cmd = new TraceBatchForwardCommand(id);
var result = traceBatchForward.execute(cmd, actorId);
if (result.isFailure()) {
throw new BatchDomainErrorException(result.unsafeGetError());
}
return ResponseEntity.ok(TraceBatchForwardResponse.from(id, result.unsafeGetValue()));
}
@PostMapping("/{id}/cancel") @PostMapping("/{id}/cancel")
@PreAuthorize("hasAuthority('BATCH_CANCEL')") @PreAuthorize("hasAuthority('BATCH_CANCEL')")
public ResponseEntity<BatchResponse> cancelBatch( public ResponseEntity<BatchResponse> cancelBatch(

View file

@ -0,0 +1,37 @@
package de.effigenix.infrastructure.production.web.dto;
import de.effigenix.domain.production.TracedBatch;
import java.util.List;
public record TraceBatchForwardResponse(
String originBatchId,
List<TracedBatchResponse> tracedBatches,
int totalCount
) {
public record TracedBatchResponse(
String id,
String batchNumber,
String recipeId,
String status,
int depth
) {
public static TracedBatchResponse from(TracedBatch traced) {
var batch = traced.batch();
return new TracedBatchResponse(
batch.id().value(),
batch.batchNumber().value(),
batch.recipeId().value(),
batch.status().name(),
traced.depth()
);
}
}
public static TraceBatchForwardResponse from(String originBatchId, List<TracedBatch> tracedBatches) {
var responses = tracedBatches.stream()
.map(TracedBatchResponse::from)
.toList();
return new TraceBatchForwardResponse(originBatchId, responses, responses.size());
}
}

View file

@ -72,6 +72,11 @@ public class StubBatchRepository implements BatchRepository {
return Result.failure(STUB_ERROR); return Result.failure(STUB_ERROR);
} }
@Override
public Result<RepositoryError, List<Batch>> findByInputBatchId(BatchId inputBatchId) {
return Result.failure(STUB_ERROR);
}
@Override @Override
public Result<RepositoryError, Void> save(Batch batch) { public Result<RepositoryError, Void> save(Batch batch) {
return Result.failure(STUB_ERROR); return Result.failure(STUB_ERROR);

View file

@ -0,0 +1,113 @@
package de.effigenix.application.production;
import de.effigenix.application.production.command.TraceBatchForwardCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Quantity;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
@ExtendWith(MockitoExtension.class)
@DisplayName("TraceBatchForward Use Case")
class TraceBatchForwardTest {
@Mock private BatchTraceabilityService traceabilityService;
@Mock private AuthorizationPort authPort;
private TraceBatchForward traceBatchForward;
private ActorId performedBy;
@BeforeEach
void setUp() {
traceBatchForward = new TraceBatchForward(traceabilityService, authPort);
performedBy = ActorId.of("admin-user");
}
@Test
@DisplayName("should fail with Unauthorized when actor lacks permission")
void should_FailWithUnauthorized_When_ActorLacksPermission() {
when(authPort.can(performedBy, ProductionAction.BATCH_READ)).thenReturn(false);
var result = traceBatchForward.execute(new TraceBatchForwardCommand("batch-1"), performedBy);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.Unauthorized.class);
verify(traceabilityService, never()).traceForward(any());
}
@Test
@DisplayName("should delegate to BatchTraceabilityService when authorized")
void should_DelegateToService_When_Authorized() {
when(authPort.can(performedBy, ProductionAction.BATCH_READ)).thenReturn(true);
var tracedBatch = new TracedBatch(sampleBatch("child-1"), 1);
when(traceabilityService.traceForward(BatchId.of("batch-1")))
.thenReturn(Result.success(List.of(tracedBatch)));
var result = traceBatchForward.execute(new TraceBatchForwardCommand("batch-1"), performedBy);
assertThat(result.isSuccess()).isTrue();
assertThat(result.unsafeGetValue()).hasSize(1);
assertThat(result.unsafeGetValue().get(0).batch().id().value()).isEqualTo("child-1");
verify(traceabilityService).traceForward(BatchId.of("batch-1"));
}
@Test
@DisplayName("should return empty list when no downstream batches exist")
void should_ReturnEmptyList_When_NoDownstream() {
when(authPort.can(performedBy, ProductionAction.BATCH_READ)).thenReturn(true);
when(traceabilityService.traceForward(BatchId.of("batch-1")))
.thenReturn(Result.success(List.of()));
var result = traceBatchForward.execute(new TraceBatchForwardCommand("batch-1"), performedBy);
assertThat(result.isSuccess()).isTrue();
assertThat(result.unsafeGetValue()).isEmpty();
}
@Test
@DisplayName("should propagate domain error from service")
void should_PropagateDomainError_FromService() {
when(authPort.can(performedBy, ProductionAction.BATCH_READ)).thenReturn(true);
when(traceabilityService.traceForward(BatchId.of("nonexistent")))
.thenReturn(Result.failure(new BatchError.BatchNotFound(BatchId.of("nonexistent"))));
var result = traceBatchForward.execute(new TraceBatchForwardCommand("nonexistent"), performedBy);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.BatchNotFound.class);
}
private Batch sampleBatch(String id) {
return Batch.reconstitute(
BatchId.of(id),
BatchNumber.generate(LocalDate.of(2026, 3, 1), 1),
RecipeId.of("recipe-1"),
BatchStatus.COMPLETED,
Quantity.of(new BigDecimal("100"), UnitOfMeasure.KILOGRAM).unsafeGetValue(),
null, null, null,
LocalDate.of(2026, 3, 1),
LocalDate.of(2026, 6, 1),
OffsetDateTime.now(ZoneOffset.UTC),
OffsetDateTime.now(ZoneOffset.UTC),
null, null, null,
0L, List.of()
);
}
}

View file

@ -0,0 +1,138 @@
package de.effigenix.domain.production;
import com.code_intelligence.jazzer.api.FuzzedDataProvider;
import com.code_intelligence.jazzer.junit.FuzzTest;
import de.effigenix.shared.common.Quantity;
import de.effigenix.shared.common.RepositoryError;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.*;
/**
* Fuzz test for BatchTraceabilityService.
*
* Generates random batch graphs (including cycles, diamonds, deep chains)
* and verifies that traceForward always terminates without exceptions.
* Uses an in-memory BatchRepository stub to build arbitrary graph topologies.
*
* Run: make fuzz | make fuzz/single TEST=BatchTraceabilityServiceFuzzTest
*/
class BatchTraceabilityServiceFuzzTest {
@FuzzTest(maxDuration = "5m")
void fuzzTraceForward(FuzzedDataProvider data) {
int nodeCount = data.consumeInt(1, 30);
List<String> nodeIds = new ArrayList<>();
for (int i = 0; i < nodeCount; i++) {
nodeIds.add("batch-" + i);
}
// Build random adjacency: for each node, pick random children
Map<String, List<String>> adjacency = new HashMap<>();
for (String nodeId : nodeIds) {
int childCount = data.consumeInt(0, Math.min(5, nodeCount));
List<String> children = new ArrayList<>();
for (int c = 0; c < childCount; c++) {
int childIdx = data.consumeInt(0, nodeCount - 1);
children.add(nodeIds.get(childIdx));
}
adjacency.put(nodeId, children);
}
var repo = new InMemoryGraphRepository(nodeIds, adjacency);
var service = new BatchTraceabilityService(repo);
// Pick random start node
String startId = nodeIds.get(data.consumeInt(0, nodeCount - 1));
int maxDepth = data.consumeInt(0, 15);
var result = service.traceForward(BatchId.of(startId), maxDepth);
// Must always terminate and return a valid Result never throw
switch (result) {
case Result.Success(var traced) -> {
// All depths must be within bounds
for (TracedBatch tb : traced) {
assert tb.depth() >= 1 && tb.depth() <= maxDepth;
}
// No duplicates
Set<String> seen = new HashSet<>();
for (TracedBatch tb : traced) {
assert seen.add(tb.batch().id().value()) : "Duplicate batch in result";
}
// Start batch must not be in result
for (TracedBatch tb : traced) {
assert !tb.batch().id().value().equals(startId) : "Start batch in result";
}
}
case Result.Failure(var err) -> {
// Failures are acceptable (e.g., RepositoryFailure), but must not be null
assert err != null;
}
}
}
private static Batch makeBatch(String id) {
return Batch.reconstitute(
BatchId.of(id),
BatchNumber.generate(LocalDate.of(2026, 1, 1), 1),
RecipeId.of("recipe-fuzz"),
BatchStatus.COMPLETED,
Quantity.of(new BigDecimal("100"), UnitOfMeasure.KILOGRAM).unsafeGetValue(),
null, null, null,
LocalDate.of(2026, 1, 1),
LocalDate.of(2026, 6, 1),
OffsetDateTime.now(ZoneOffset.UTC),
OffsetDateTime.now(ZoneOffset.UTC),
null, null, null,
0L, List.of()
);
}
/**
* In-memory BatchRepository that serves a pre-built graph.
* Only findById and findByInputBatchId are implemented (the rest throw).
*/
private static class InMemoryGraphRepository implements BatchRepository {
private final Set<String> nodeIds;
private final Map<String, List<String>> adjacency;
InMemoryGraphRepository(List<String> nodeIds, Map<String, List<String>> adjacency) {
this.nodeIds = new HashSet<>(nodeIds);
this.adjacency = adjacency;
}
@Override
public Result<RepositoryError, Optional<Batch>> findById(BatchId id) {
if (nodeIds.contains(id.value())) {
return Result.success(Optional.of(makeBatch(id.value())));
}
return Result.success(Optional.empty());
}
@Override
public Result<RepositoryError, List<Batch>> findByInputBatchId(BatchId inputBatchId) {
var children = adjacency.getOrDefault(inputBatchId.value(), List.of());
return Result.success(children.stream().map(BatchTraceabilityServiceFuzzTest::makeBatch).toList());
}
// --- Remaining methods are not used by BatchTraceabilityService ---
@Override public Result<RepositoryError, List<Batch>> findAll() { throw new UnsupportedOperationException(); }
@Override public Result<RepositoryError, Optional<Batch>> findByBatchNumber(BatchNumber n) { throw new UnsupportedOperationException(); }
@Override public Result<RepositoryError, List<Batch>> findByStatus(BatchStatus s) { throw new UnsupportedOperationException(); }
@Override public Result<RepositoryError, List<Batch>> findByProductionDate(LocalDate d) { throw new UnsupportedOperationException(); }
@Override public Result<RepositoryError, List<Batch>> findByRecipeIds(List<RecipeId> ids) { throw new UnsupportedOperationException(); }
@Override public Result<RepositoryError, List<Batch>> findAllSummary() { throw new UnsupportedOperationException(); }
@Override public Result<RepositoryError, List<Batch>> findByStatusSummary(BatchStatus s) { throw new UnsupportedOperationException(); }
@Override public Result<RepositoryError, List<Batch>> findByProductionDateSummary(LocalDate d) { throw new UnsupportedOperationException(); }
@Override public Result<RepositoryError, List<Batch>> findByRecipeIdsSummary(List<RecipeId> ids) { throw new UnsupportedOperationException(); }
@Override public Result<RepositoryError, Void> save(Batch batch) { throw new UnsupportedOperationException(); }
}
}

View file

@ -0,0 +1,255 @@
package de.effigenix.domain.production;
import de.effigenix.shared.common.Quantity;
import de.effigenix.shared.common.RepositoryError;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
@ExtendWith(MockitoExtension.class)
@DisplayName("BatchTraceabilityService")
class BatchTraceabilityServiceTest {
@Mock private BatchRepository batchRepository;
private BatchTraceabilityService service;
@BeforeEach
void setUp() {
service = new BatchTraceabilityService(batchRepository);
}
private Batch sampleBatch(String id) {
return Batch.reconstitute(
BatchId.of(id),
BatchNumber.generate(LocalDate.of(2026, 3, 1), 1),
RecipeId.of("recipe-1"),
BatchStatus.COMPLETED,
Quantity.of(new BigDecimal("100"), UnitOfMeasure.KILOGRAM).unsafeGetValue(),
null, null, null,
LocalDate.of(2026, 3, 1),
LocalDate.of(2026, 6, 1),
OffsetDateTime.now(ZoneOffset.UTC),
OffsetDateTime.now(ZoneOffset.UTC),
null, null, null,
0L, List.of()
);
}
@Nested
@DisplayName("traceForward")
class TraceForward {
@Test
@DisplayName("should return empty list when no downstream batches exist")
void should_ReturnEmptyList_When_NoDownstreamBatches() {
var startId = BatchId.of("start-batch");
when(batchRepository.findById(startId)).thenReturn(Result.success(Optional.of(sampleBatch("start-batch"))));
when(batchRepository.findByInputBatchId(startId)).thenReturn(Result.success(List.of()));
var result = service.traceForward(startId);
assertThat(result.isSuccess()).isTrue();
assertThat(result.unsafeGetValue()).isEmpty();
}
@Test
@DisplayName("should return direct downstream batches at depth 1")
void should_ReturnDirectDownstream_AtDepth1() {
var startId = BatchId.of("start-batch");
var child1 = sampleBatch("child-1");
var child2 = sampleBatch("child-2");
when(batchRepository.findById(startId)).thenReturn(Result.success(Optional.of(sampleBatch("start-batch"))));
when(batchRepository.findByInputBatchId(startId)).thenReturn(Result.success(List.of(child1, child2)));
when(batchRepository.findByInputBatchId(BatchId.of("child-1"))).thenReturn(Result.success(List.of()));
when(batchRepository.findByInputBatchId(BatchId.of("child-2"))).thenReturn(Result.success(List.of()));
var result = service.traceForward(startId);
assertThat(result.isSuccess()).isTrue();
var traced = result.unsafeGetValue();
assertThat(traced).hasSize(2);
assertThat(traced).allMatch(t -> t.depth() == 1);
assertThat(traced).extracting(t -> t.batch().id().value())
.containsExactlyInAnyOrder("child-1", "child-2");
}
@Test
@DisplayName("should traverse multi-level chain with correct depths")
void should_TraverseMultiLevel_WithCorrectDepths() {
var startId = BatchId.of("start");
var level1 = sampleBatch("level-1");
var level2 = sampleBatch("level-2");
var level3 = sampleBatch("level-3");
when(batchRepository.findById(startId)).thenReturn(Result.success(Optional.of(sampleBatch("start"))));
when(batchRepository.findByInputBatchId(startId)).thenReturn(Result.success(List.of(level1)));
when(batchRepository.findByInputBatchId(BatchId.of("level-1"))).thenReturn(Result.success(List.of(level2)));
when(batchRepository.findByInputBatchId(BatchId.of("level-2"))).thenReturn(Result.success(List.of(level3)));
when(batchRepository.findByInputBatchId(BatchId.of("level-3"))).thenReturn(Result.success(List.of()));
var result = service.traceForward(startId);
assertThat(result.isSuccess()).isTrue();
var traced = result.unsafeGetValue();
assertThat(traced).hasSize(3);
assertThat(traced.get(0).batch().id().value()).isEqualTo("level-1");
assertThat(traced.get(0).depth()).isEqualTo(1);
assertThat(traced.get(1).batch().id().value()).isEqualTo("level-2");
assertThat(traced.get(1).depth()).isEqualTo(2);
assertThat(traced.get(2).batch().id().value()).isEqualTo("level-3");
assertThat(traced.get(2).depth()).isEqualTo(3);
}
@Test
@DisplayName("should detect cycles and terminate without endless loop")
void should_DetectCycles_AndTerminate() {
var startId = BatchId.of("start");
var child = sampleBatch("child");
when(batchRepository.findById(startId)).thenReturn(Result.success(Optional.of(sampleBatch("start"))));
when(batchRepository.findByInputBatchId(startId)).thenReturn(Result.success(List.of(child)));
// child references back to start cycle
when(batchRepository.findByInputBatchId(BatchId.of("child")))
.thenReturn(Result.success(List.of(sampleBatch("start"))));
var result = service.traceForward(startId);
assertThat(result.isSuccess()).isTrue();
var traced = result.unsafeGetValue();
assertThat(traced).hasSize(1);
assertThat(traced.get(0).batch().id().value()).isEqualTo("child");
}
@Test
@DisplayName("should fail with BatchNotFound when start batch does not exist")
void should_FailWithBatchNotFound_When_StartBatchDoesNotExist() {
var startId = BatchId.of("nonexistent");
when(batchRepository.findById(startId)).thenReturn(Result.success(Optional.empty()));
var result = service.traceForward(startId);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.BatchNotFound.class);
}
@Test
@DisplayName("should stop at max depth limit")
void should_StopAtMaxDepth() {
var startId = BatchId.of("start");
when(batchRepository.findById(startId)).thenReturn(Result.success(Optional.of(sampleBatch("start"))));
var level1 = sampleBatch("level-1");
var level2 = sampleBatch("level-2");
when(batchRepository.findByInputBatchId(startId)).thenReturn(Result.success(List.of(level1)));
when(batchRepository.findByInputBatchId(BatchId.of("level-1"))).thenReturn(Result.success(List.of(level2)));
var result = service.traceForward(startId, 2);
assertThat(result.isSuccess()).isTrue();
var traced = result.unsafeGetValue();
assertThat(traced).hasSize(2);
assertThat(traced).extracting(TracedBatch::depth).containsExactly(1, 2);
}
@Test
@DisplayName("should fail with RepositoryFailure when findById returns error")
void should_FailWithRepositoryFailure_When_FindByIdReturnsError() {
var startId = BatchId.of("start");
when(batchRepository.findById(startId))
.thenReturn(Result.failure(new RepositoryError.DatabaseError("connection lost")));
var result = service.traceForward(startId);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.RepositoryFailure.class);
}
@Test
@DisplayName("should fail with RepositoryFailure when findByInputBatchId returns error")
void should_FailWithRepositoryFailure_When_FindByInputBatchIdReturnsError() {
var startId = BatchId.of("start");
when(batchRepository.findById(startId)).thenReturn(Result.success(Optional.of(sampleBatch("start"))));
when(batchRepository.findByInputBatchId(startId))
.thenReturn(Result.failure(new RepositoryError.DatabaseError("timeout")));
var result = service.traceForward(startId);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.RepositoryFailure.class);
}
@Test
@DisplayName("should deduplicate shared child in diamond graph")
void should_DeduplicateSharedChild_InDiamondGraph() {
// A B, A C, B D, C D D appears only once
var startId = BatchId.of("A");
var batchB = sampleBatch("B");
var batchC = sampleBatch("C");
var batchD = sampleBatch("D");
when(batchRepository.findById(startId)).thenReturn(Result.success(Optional.of(sampleBatch("A"))));
when(batchRepository.findByInputBatchId(startId)).thenReturn(Result.success(List.of(batchB, batchC)));
when(batchRepository.findByInputBatchId(BatchId.of("B"))).thenReturn(Result.success(List.of(batchD)));
when(batchRepository.findByInputBatchId(BatchId.of("C"))).thenReturn(Result.success(List.of(sampleBatch("D"))));
when(batchRepository.findByInputBatchId(BatchId.of("D"))).thenReturn(Result.success(List.of()));
var result = service.traceForward(startId);
assertThat(result.isSuccess()).isTrue();
var traced = result.unsafeGetValue();
assertThat(traced).extracting(t -> t.batch().id().value())
.containsExactlyInAnyOrder("B", "C", "D");
// D must appear only once despite being reachable from B and C
assertThat(traced.stream().filter(t -> t.batch().id().value().equals("D")).count())
.isEqualTo(1);
}
@Test
@DisplayName("should return empty list when maxDepth is 0")
void should_ReturnEmptyList_When_MaxDepthIsZero() {
var startId = BatchId.of("start");
when(batchRepository.findById(startId)).thenReturn(Result.success(Optional.of(sampleBatch("start"))));
var result = service.traceForward(startId, 0);
assertThat(result.isSuccess()).isTrue();
assertThat(result.unsafeGetValue()).isEmpty();
}
@Test
@DisplayName("should not include start batch in result")
void should_NotIncludeStartBatch_InResult() {
var startId = BatchId.of("start");
var child = sampleBatch("child");
when(batchRepository.findById(startId)).thenReturn(Result.success(Optional.of(sampleBatch("start"))));
when(batchRepository.findByInputBatchId(startId)).thenReturn(Result.success(List.of(child)));
when(batchRepository.findByInputBatchId(BatchId.of("child"))).thenReturn(Result.success(List.of()));
var result = service.traceForward(startId);
assertThat(result.isSuccess()).isTrue();
assertThat(result.unsafeGetValue()).extracting(t -> t.batch().id().value())
.doesNotContain("start");
}
}
}

View file

@ -13,6 +13,7 @@ import java.time.LocalDate;
import java.util.Set; import java.util.Set;
import java.util.UUID; import java.util.UUID;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
@ -666,6 +667,120 @@ class BatchControllerIntegrationTest extends AbstractIntegrationTest {
} }
} }
@Nested
@DisplayName("GET /api/production/batches/{id}/trace-forward Vorwärts-Tracing")
class TraceForwardEndpoint {
private String traceToken;
@BeforeEach
void setUpTraceToken() {
traceToken = generateToken(UUID.randomUUID().toString(), "trace.admin",
"BATCH_WRITE,BATCH_READ,RECIPE_WRITE,RECIPE_READ");
}
@Test
@DisplayName("Kein Downstream → 200, leere Liste")
void traceForward_noDownstream_returnsEmptyList() throws Exception {
String batchId = createPlannedBatchWith(traceToken);
mockMvc.perform(get("/api/production/batches/{id}/trace-forward", batchId)
.header("Authorization", "Bearer " + traceToken))
.andExpect(status().isOk())
.andExpect(jsonPath("$.originBatchId").value(batchId))
.andExpect(jsonPath("$.tracedBatches").isArray())
.andExpect(jsonPath("$.tracedBatches").isEmpty())
.andExpect(jsonPath("$.totalCount").value(0));
}
@Test
@DisplayName("Single-Level Kette → 200, depth=1")
void traceForward_singleLevel_returnsDepth1() throws Exception {
// Rohstoff-Charge (Input)
String inputBatchId = createPlannedBatchWith(traceToken);
// Endprodukt-Charge verbraucht die Input-Charge
String outputBatchId = createBatchWithConsumption(traceToken, inputBatchId);
mockMvc.perform(get("/api/production/batches/{id}/trace-forward", inputBatchId)
.header("Authorization", "Bearer " + traceToken))
.andExpect(status().isOk())
.andExpect(jsonPath("$.originBatchId").value(inputBatchId))
.andExpect(jsonPath("$.totalCount").value(1))
.andExpect(jsonPath("$.tracedBatches[0].id").value(outputBatchId))
.andExpect(jsonPath("$.tracedBatches[0].depth").value(1))
.andExpect(jsonPath("$.tracedBatches[0].batchNumber").isNotEmpty())
.andExpect(jsonPath("$.tracedBatches[0].status").isNotEmpty());
}
@Test
@DisplayName("Batch nicht gefunden → 404")
void traceForward_notFound_returns404() throws Exception {
mockMvc.perform(get("/api/production/batches/{id}/trace-forward", UUID.randomUUID().toString())
.header("Authorization", "Bearer " + traceToken))
.andExpect(status().isNotFound())
.andExpect(jsonPath("$.code").value("BATCH_NOT_FOUND"));
}
@Test
@DisplayName("Ohne Token → 401")
void traceForward_withoutToken_returns401() throws Exception {
mockMvc.perform(get("/api/production/batches/{id}/trace-forward", UUID.randomUUID().toString()))
.andExpect(status().isUnauthorized());
}
@Test
@DisplayName("Ohne BATCH_READ → 403")
void traceForward_withoutPermission_returns403() throws Exception {
mockMvc.perform(get("/api/production/batches/{id}/trace-forward", UUID.randomUUID().toString())
.header("Authorization", "Bearer " + viewerToken))
.andExpect(status().isForbidden());
}
private String createPlannedBatchWith(String token) throws Exception {
String recipeId = createActiveRecipeWith(token);
var planRequest = new PlanBatchRequest(
recipeId, "100", "KILOGRAM", PRODUCTION_DATE, BEST_BEFORE_DATE);
var planResult = mockMvc.perform(post("/api/production/batches")
.header("Authorization", "Bearer " + token)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(planRequest)))
.andExpect(status().isCreated())
.andReturn();
return objectMapper.readTree(planResult.getResponse().getContentAsString()).get("id").asText();
}
private String createBatchWithConsumption(String token, String inputBatchId) throws Exception {
String recipeId = createActiveRecipeWith(token);
var planRequest = new PlanBatchRequest(
recipeId, "50", "KILOGRAM", PRODUCTION_DATE, BEST_BEFORE_DATE);
var planResult = mockMvc.perform(post("/api/production/batches")
.header("Authorization", "Bearer " + token)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(planRequest)))
.andExpect(status().isCreated())
.andReturn();
String batchId = objectMapper.readTree(planResult.getResponse().getContentAsString()).get("id").asText();
// Start production
mockMvc.perform(post("/api/production/batches/{id}/start", batchId)
.header("Authorization", "Bearer " + token))
.andExpect(status().isOk());
// Record consumption with inputBatchId
String consumptionJson = """
{"inputBatchId": "%s", "articleId": "%s", "quantityUsed": "10.0", "quantityUnit": "KILOGRAM"}
""".formatted(inputBatchId, UUID.randomUUID().toString());
mockMvc.perform(post("/api/production/batches/{id}/consumptions", batchId)
.header("Authorization", "Bearer " + token)
.contentType(MediaType.APPLICATION_JSON)
.content(consumptionJson))
.andExpect(status().isCreated());
return batchId;
}
}
// ==================== POST /api/production/batches/{id}/start ungültige Übergänge ==================== // ==================== POST /api/production/batches/{id}/start ungültige Übergänge ====================
@Nested @Nested