1
0
Fork 0
mirror of https://github.com/s-frick/effigenix.git synced 2026-03-28 10:19:35 +01:00

feat(production): Produktion starten und Rohstoffverbrauch dokumentieren (StartBatch, RecordConsumption)

PLANNED-Chargen können in Produktion genommen werden (IN_PRODUCTION),
anschließend wird der Rohstoff-Verbrauch pro InputBatch dokumentiert.
Bildet die Grundlage für die Chargen-Genealogie (Tracing).
This commit is contained in:
Sebastian Frick 2026-02-20 12:15:06 +01:00
parent 8c042925eb
commit a9f5956812
31 changed files with 1733 additions and 11 deletions

View file

@ -0,0 +1,63 @@
package de.effigenix.application.production;
import de.effigenix.application.production.command.RecordConsumptionCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class RecordConsumption {
private final BatchRepository batchRepository;
private final AuthorizationPort authorizationPort;
public RecordConsumption(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
this.batchRepository = batchRepository;
this.authorizationPort = authorizationPort;
}
public Result<BatchError, Consumption> execute(RecordConsumptionCommand cmd, ActorId performedBy) {
if (!authorizationPort.can(performedBy, ProductionAction.BATCH_WRITE)) {
return Result.failure(new BatchError.Unauthorized("Not authorized to record consumptions"));
}
var batchId = BatchId.of(cmd.batchId());
Batch batch;
switch (batchRepository.findById(batchId)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
}
case Result.Success(var opt) -> {
if (opt.isEmpty()) {
return Result.failure(new BatchError.BatchNotFound(batchId));
}
batch = opt.get();
}
}
var draft = new ConsumptionDraft(
cmd.inputBatchId(),
cmd.articleId(),
cmd.quantityUsed(),
cmd.quantityUnit()
);
Consumption consumption;
switch (batch.recordConsumption(draft)) {
case Result.Failure(var err) -> { return Result.failure(err); }
case Result.Success(var val) -> consumption = val;
}
switch (batchRepository.save(batch)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
}
case Result.Success(var ignored) -> { }
}
return Result.success(consumption);
}
}

View file

@ -0,0 +1,55 @@
package de.effigenix.application.production;
import de.effigenix.application.production.command.StartBatchCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class StartBatch {
private final BatchRepository batchRepository;
private final AuthorizationPort authorizationPort;
public StartBatch(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
this.batchRepository = batchRepository;
this.authorizationPort = authorizationPort;
}
public Result<BatchError, Batch> execute(StartBatchCommand cmd, ActorId performedBy) {
if (!authorizationPort.can(performedBy, ProductionAction.BATCH_WRITE)) {
return Result.failure(new BatchError.Unauthorized("Not authorized to start batches"));
}
var batchId = BatchId.of(cmd.batchId());
Batch batch;
switch (batchRepository.findById(batchId)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
}
case Result.Success(var opt) -> {
if (opt.isEmpty()) {
return Result.failure(new BatchError.BatchNotFound(batchId));
}
batch = opt.get();
}
}
switch (batch.startProduction()) {
case Result.Failure(var err) -> { return Result.failure(err); }
case Result.Success(var ignored) -> { }
}
switch (batchRepository.save(batch)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
}
case Result.Success(var ignored) -> { }
}
return Result.success(batch);
}
}

View file

@ -0,0 +1,9 @@
package de.effigenix.application.production.command;
public record RecordConsumptionCommand(
String batchId,
String inputBatchId,
String articleId,
String quantityUsed,
String quantityUnit
) {}

View file

@ -0,0 +1,3 @@
package de.effigenix.application.production.command;
public record StartBatchCommand(String batchId) {}

View file

@ -8,6 +8,9 @@ import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Batch aggregate root.
@ -18,6 +21,10 @@ import java.time.ZoneOffset;
* 3. BatchNumber is auto-generated (format P-YYYY-MM-DD-XXX)
* 4. New batches always start in PLANNED status
* 5. RecipeId must reference an ACTIVE recipe (enforced by Use Case)
* 6. startProduction() only allowed from PLANNED status
* 7. recordConsumption() only allowed in IN_PRODUCTION status
* 8. No duplicate inputBatchId within consumptions
* 9. Consumption quantity must be positive
*/
public class Batch {
@ -30,6 +37,7 @@ public class Batch {
private final LocalDate bestBeforeDate;
private final OffsetDateTime createdAt;
private OffsetDateTime updatedAt;
private final List<Consumption> consumptions;
private Batch(
BatchId id,
@ -40,7 +48,8 @@ public class Batch {
LocalDate productionDate,
LocalDate bestBeforeDate,
OffsetDateTime createdAt,
OffsetDateTime updatedAt
OffsetDateTime updatedAt,
List<Consumption> consumptions
) {
this.id = id;
this.batchNumber = batchNumber;
@ -51,6 +60,7 @@ public class Batch {
this.bestBeforeDate = bestBeforeDate;
this.createdAt = createdAt;
this.updatedAt = updatedAt;
this.consumptions = consumptions;
}
public static Result<BatchError, Batch> plan(BatchDraft draft, BatchNumber batchNumber) {
@ -99,10 +109,45 @@ public class Batch {
draft.productionDate(),
draft.bestBeforeDate(),
now,
now
now,
new ArrayList<>()
));
}
public Result<BatchError, Void> startProduction() {
if (status != BatchStatus.PLANNED) {
return Result.failure(new BatchError.InvalidStatusTransition(status, BatchStatus.IN_PRODUCTION));
}
this.status = BatchStatus.IN_PRODUCTION;
this.updatedAt = OffsetDateTime.now(ZoneOffset.UTC);
return Result.success(null);
}
public Result<BatchError, Consumption> recordConsumption(ConsumptionDraft draft) {
if (status != BatchStatus.IN_PRODUCTION) {
return Result.failure(new BatchError.NotInProduction(id));
}
if (draft.inputBatchId() != null && !draft.inputBatchId().isBlank()) {
var inputId = BatchId.of(draft.inputBatchId());
boolean duplicate = consumptions.stream()
.anyMatch(c -> c.inputBatchId().equals(inputId));
if (duplicate) {
return Result.failure(new BatchError.DuplicateInputBatch(inputId));
}
}
Consumption consumption;
switch (Consumption.create(draft)) {
case Result.Failure(var err) -> { return Result.failure(err); }
case Result.Success(var val) -> consumption = val;
}
consumptions.add(consumption);
this.updatedAt = OffsetDateTime.now(ZoneOffset.UTC);
return Result.success(consumption);
}
public static Batch reconstitute(
BatchId id,
BatchNumber batchNumber,
@ -112,9 +157,11 @@ public class Batch {
LocalDate productionDate,
LocalDate bestBeforeDate,
OffsetDateTime createdAt,
OffsetDateTime updatedAt
OffsetDateTime updatedAt,
List<Consumption> consumptions
) {
return new Batch(id, batchNumber, recipeId, status, plannedQuantity, productionDate, bestBeforeDate, createdAt, updatedAt);
return new Batch(id, batchNumber, recipeId, status, plannedQuantity, productionDate,
bestBeforeDate, createdAt, updatedAt, new ArrayList<>(consumptions));
}
public BatchId id() { return id; }
@ -126,4 +173,5 @@ public class Batch {
public LocalDate bestBeforeDate() { return bestBeforeDate; }
public OffsetDateTime createdAt() { return createdAt; }
public OffsetDateTime updatedAt() { return updatedAt; }
public List<Consumption> consumptions() { return Collections.unmodifiableList(consumptions); }
}

View file

@ -34,6 +34,26 @@ public sealed interface BatchError {
@Override public String code() { return "BATCH_VALIDATION_ERROR"; }
}
record InvalidStatusTransition(BatchStatus current, BatchStatus target) implements BatchError {
@Override public String code() { return "BATCH_INVALID_STATUS_TRANSITION"; }
@Override public String message() { return "Cannot transition from " + current + " to " + target; }
}
record NotInProduction(BatchId id) implements BatchError {
@Override public String code() { return "BATCH_NOT_IN_PRODUCTION"; }
@Override public String message() { return "Batch '" + id.value() + "' is not in IN_PRODUCTION status"; }
}
record DuplicateInputBatch(BatchId inputBatchId) implements BatchError {
@Override public String code() { return "BATCH_DUPLICATE_INPUT_BATCH"; }
@Override public String message() { return "Input batch '" + inputBatchId.value() + "' already recorded"; }
}
record InvalidConsumptionQuantity(String reason) implements BatchError {
@Override public String code() { return "BATCH_INVALID_CONSUMPTION_QUANTITY"; }
@Override public String message() { return "Invalid consumption quantity: " + reason; }
}
record Unauthorized(String message) implements BatchError {
@Override public String code() { return "UNAUTHORIZED"; }
}

View file

@ -0,0 +1,74 @@
package de.effigenix.domain.production;
import de.effigenix.domain.masterdata.ArticleId;
import de.effigenix.shared.common.Quantity;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import java.math.BigDecimal;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
public class Consumption {
private final ConsumptionId id;
private final BatchId inputBatchId;
private final ArticleId articleId;
private final Quantity quantityUsed;
private final OffsetDateTime consumedAt;
private Consumption(ConsumptionId id, BatchId inputBatchId, ArticleId articleId,
Quantity quantityUsed, OffsetDateTime consumedAt) {
this.id = id;
this.inputBatchId = inputBatchId;
this.articleId = articleId;
this.quantityUsed = quantityUsed;
this.consumedAt = consumedAt;
}
public static Result<BatchError, Consumption> create(ConsumptionDraft draft) {
if (draft.inputBatchId() == null || draft.inputBatchId().isBlank()) {
return Result.failure(new BatchError.ValidationFailure("inputBatchId must not be blank"));
}
if (draft.articleId() == null || draft.articleId().isBlank()) {
return Result.failure(new BatchError.ValidationFailure("articleId must not be blank"));
}
Quantity quantity;
try {
var amount = new BigDecimal(draft.quantityUsed());
var uom = UnitOfMeasure.valueOf(draft.quantityUnit());
switch (Quantity.of(amount, uom)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.InvalidConsumptionQuantity(err.toString()));
}
case Result.Success(var qty) -> quantity = qty;
}
} catch (NumberFormatException e) {
return Result.failure(new BatchError.InvalidConsumptionQuantity(
"Invalid amount format: " + draft.quantityUsed()));
} catch (IllegalArgumentException e) {
return Result.failure(new BatchError.InvalidConsumptionQuantity(
"Invalid unit: " + draft.quantityUnit()));
}
return Result.success(new Consumption(
ConsumptionId.generate(),
BatchId.of(draft.inputBatchId()),
ArticleId.of(draft.articleId()),
quantity,
OffsetDateTime.now(ZoneOffset.UTC)
));
}
public static Consumption reconstitute(ConsumptionId id, BatchId inputBatchId, ArticleId articleId,
Quantity quantityUsed, OffsetDateTime consumedAt) {
return new Consumption(id, inputBatchId, articleId, quantityUsed, consumedAt);
}
public ConsumptionId id() { return id; }
public BatchId inputBatchId() { return inputBatchId; }
public ArticleId articleId() { return articleId; }
public Quantity quantityUsed() { return quantityUsed; }
public OffsetDateTime consumedAt() { return consumedAt; }
}

View file

@ -0,0 +1,8 @@
package de.effigenix.domain.production;
public record ConsumptionDraft(
String inputBatchId,
String articleId,
String quantityUsed,
String quantityUnit
) {}

View file

@ -0,0 +1,20 @@
package de.effigenix.domain.production;
import java.util.UUID;
public record ConsumptionId(String value) {
public ConsumptionId {
if (value == null || value.isBlank()) {
throw new IllegalArgumentException("ConsumptionId must not be blank");
}
}
public static ConsumptionId generate() {
return new ConsumptionId(UUID.randomUUID().toString());
}
public static ConsumptionId of(String value) {
return new ConsumptionId(value);
}
}

View file

@ -0,0 +1,7 @@
package de.effigenix.domain.production.event;
import de.effigenix.domain.production.BatchId;
import java.time.OffsetDateTime;
public record BatchStarted(BatchId batchId, OffsetDateTime startedAt) {}

View file

@ -0,0 +1,6 @@
package de.effigenix.domain.production.event;
import de.effigenix.domain.production.BatchId;
import de.effigenix.domain.production.ConsumptionId;
public record ConsumptionRecorded(BatchId batchId, ConsumptionId consumptionId, BatchId inputBatchId) {}

View file

@ -9,6 +9,8 @@ import de.effigenix.application.production.FindBatchByNumber;
import de.effigenix.application.production.GetBatch;
import de.effigenix.application.production.ListBatches;
import de.effigenix.application.production.PlanBatch;
import de.effigenix.application.production.RecordConsumption;
import de.effigenix.application.production.StartBatch;
import de.effigenix.application.production.RecipeCycleChecker;
import de.effigenix.application.production.GetRecipe;
import de.effigenix.application.production.ListRecipes;
@ -96,4 +98,14 @@ public class ProductionUseCaseConfiguration {
public FindBatchByNumber findBatchByNumber(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
return new FindBatchByNumber(batchRepository, authorizationPort);
}
@Bean
public StartBatch startBatch(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
return new StartBatch(batchRepository, authorizationPort);
}
@Bean
public RecordConsumption recordConsumption(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
return new RecordConsumption(batchRepository, authorizationPort);
}
}

View file

@ -5,6 +5,8 @@ import jakarta.persistence.*;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.List;
@Entity
@Table(name = "batches")
@ -41,6 +43,9 @@ public class BatchEntity {
@Column(name = "updated_at", nullable = false)
private OffsetDateTime updatedAt;
@OneToMany(mappedBy = "batch", cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER)
private List<ConsumptionEntity> consumptions = new ArrayList<>();
protected BatchEntity() {}
public BatchEntity(
@ -77,4 +82,6 @@ public class BatchEntity {
public LocalDate getBestBeforeDate() { return bestBeforeDate; }
public OffsetDateTime getCreatedAt() { return createdAt; }
public OffsetDateTime getUpdatedAt() { return updatedAt; }
public List<ConsumptionEntity> getConsumptions() { return consumptions; }
public void setConsumptions(List<ConsumptionEntity> consumptions) { this.consumptions = consumptions; }
}

View file

@ -0,0 +1,55 @@
package de.effigenix.infrastructure.production.persistence.entity;
import jakarta.persistence.*;
import java.math.BigDecimal;
import java.time.OffsetDateTime;
@Entity
@Table(name = "batch_consumptions")
public class ConsumptionEntity {
@Id
@Column(name = "id", nullable = false, length = 36)
private String id;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "batch_id", nullable = false)
private BatchEntity batch;
@Column(name = "input_batch_id", nullable = false, length = 36)
private String inputBatchId;
@Column(name = "article_id", nullable = false, length = 36)
private String articleId;
@Column(name = "quantity_used_amount", nullable = false, precision = 19, scale = 6)
private BigDecimal quantityUsedAmount;
@Column(name = "quantity_used_unit", nullable = false, length = 10)
private String quantityUsedUnit;
@Column(name = "consumed_at", nullable = false)
private OffsetDateTime consumedAt;
protected ConsumptionEntity() {}
public ConsumptionEntity(String id, BatchEntity batch, String inputBatchId, String articleId,
BigDecimal quantityUsedAmount, String quantityUsedUnit, OffsetDateTime consumedAt) {
this.id = id;
this.batch = batch;
this.inputBatchId = inputBatchId;
this.articleId = articleId;
this.quantityUsedAmount = quantityUsedAmount;
this.quantityUsedUnit = quantityUsedUnit;
this.consumedAt = consumedAt;
}
public String getId() { return id; }
public BatchEntity getBatch() { return batch; }
public String getInputBatchId() { return inputBatchId; }
public String getArticleId() { return articleId; }
public BigDecimal getQuantityUsedAmount() { return quantityUsedAmount; }
public String getQuantityUsedUnit() { return quantityUsedUnit; }
public OffsetDateTime getConsumedAt() { return consumedAt; }
}

View file

@ -1,16 +1,21 @@
package de.effigenix.infrastructure.production.persistence.mapper;
import de.effigenix.domain.masterdata.ArticleId;
import de.effigenix.domain.production.*;
import de.effigenix.infrastructure.production.persistence.entity.BatchEntity;
import de.effigenix.infrastructure.production.persistence.entity.ConsumptionEntity;
import de.effigenix.shared.common.Quantity;
import de.effigenix.shared.common.UnitOfMeasure;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
@Component
public class BatchMapper {
public BatchEntity toEntity(Batch batch) {
return new BatchEntity(
var entity = new BatchEntity(
batch.id().value(),
batch.batchNumber().value(),
batch.recipeId().value(),
@ -22,9 +27,38 @@ public class BatchMapper {
batch.createdAt(),
batch.updatedAt()
);
List<ConsumptionEntity> consumptionEntities = new ArrayList<>();
for (Consumption c : batch.consumptions()) {
consumptionEntities.add(new ConsumptionEntity(
c.id().value(),
entity,
c.inputBatchId().value(),
c.articleId().value(),
c.quantityUsed().amount(),
c.quantityUsed().uom().name(),
c.consumedAt()
));
}
entity.setConsumptions(consumptionEntities);
return entity;
}
public Batch toDomain(BatchEntity entity) {
List<Consumption> consumptions = entity.getConsumptions().stream()
.map(ce -> Consumption.reconstitute(
ConsumptionId.of(ce.getId()),
BatchId.of(ce.getInputBatchId()),
ArticleId.of(ce.getArticleId()),
Quantity.reconstitute(
ce.getQuantityUsedAmount(),
UnitOfMeasure.valueOf(ce.getQuantityUsedUnit())
),
ce.getConsumedAt()
))
.toList();
return Batch.reconstitute(
BatchId.of(entity.getId()),
new BatchNumber(entity.getBatchNumber()),
@ -37,7 +71,8 @@ public class BatchMapper {
entity.getProductionDate(),
entity.getBestBeforeDate(),
entity.getCreatedAt(),
entity.getUpdatedAt()
entity.getUpdatedAt(),
consumptions
);
}
}

View file

@ -4,14 +4,20 @@ import de.effigenix.application.production.FindBatchByNumber;
import de.effigenix.application.production.GetBatch;
import de.effigenix.application.production.ListBatches;
import de.effigenix.application.production.PlanBatch;
import de.effigenix.application.production.RecordConsumption;
import de.effigenix.application.production.StartBatch;
import de.effigenix.application.production.command.PlanBatchCommand;
import de.effigenix.application.production.command.RecordConsumptionCommand;
import de.effigenix.application.production.command.StartBatchCommand;
import de.effigenix.domain.production.BatchError;
import de.effigenix.domain.production.BatchId;
import de.effigenix.domain.production.BatchNumber;
import de.effigenix.domain.production.BatchStatus;
import de.effigenix.infrastructure.production.web.dto.BatchResponse;
import de.effigenix.infrastructure.production.web.dto.BatchSummaryResponse;
import de.effigenix.infrastructure.production.web.dto.ConsumptionResponse;
import de.effigenix.infrastructure.production.web.dto.PlanBatchRequest;
import de.effigenix.infrastructure.production.web.dto.RecordConsumptionRequest;
import de.effigenix.shared.security.ActorId;
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
import io.swagger.v3.oas.annotations.tags.Tag;
@ -40,13 +46,18 @@ public class BatchController {
private final GetBatch getBatch;
private final ListBatches listBatches;
private final FindBatchByNumber findBatchByNumber;
private final StartBatch startBatch;
private final RecordConsumption recordConsumption;
public BatchController(PlanBatch planBatch, GetBatch getBatch, ListBatches listBatches,
FindBatchByNumber findBatchByNumber) {
FindBatchByNumber findBatchByNumber, StartBatch startBatch,
RecordConsumption recordConsumption) {
this.planBatch = planBatch;
this.getBatch = getBatch;
this.listBatches = listBatches;
this.findBatchByNumber = findBatchByNumber;
this.startBatch = startBatch;
this.recordConsumption = recordConsumption;
}
@GetMapping("/{id}")
@ -150,6 +161,51 @@ public class BatchController {
.body(BatchResponse.from(result.unsafeGetValue()));
}
@PostMapping("/{id}/start")
@PreAuthorize("hasAuthority('BATCH_WRITE')")
public ResponseEntity<BatchResponse> startBatch(
@PathVariable("id") String id,
Authentication authentication
) {
logger.info("Starting batch: {} by actor: {}", id, authentication.getName());
var cmd = new StartBatchCommand(id);
var result = startBatch.execute(cmd, ActorId.of(authentication.getName()));
if (result.isFailure()) {
throw new BatchDomainErrorException(result.unsafeGetError());
}
return ResponseEntity.ok(BatchResponse.from(result.unsafeGetValue()));
}
@PostMapping("/{id}/consumptions")
@PreAuthorize("hasAuthority('BATCH_WRITE')")
public ResponseEntity<ConsumptionResponse> recordConsumption(
@PathVariable("id") String id,
@Valid @RequestBody RecordConsumptionRequest request,
Authentication authentication
) {
logger.info("Recording consumption for batch: {} by actor: {}", id, authentication.getName());
var cmd = new RecordConsumptionCommand(
id,
request.inputBatchId(),
request.articleId(),
request.quantityUsed(),
request.quantityUnit()
);
var result = recordConsumption.execute(cmd, ActorId.of(authentication.getName()));
if (result.isFailure()) {
throw new BatchDomainErrorException(result.unsafeGetError());
}
return ResponseEntity.status(HttpStatus.CREATED)
.body(ConsumptionResponse.from(result.unsafeGetValue()));
}
private static String filterType(String status, LocalDate productionDate, String articleId) {
int count = (status != null ? 1 : 0) + (productionDate != null ? 1 : 0) + (articleId != null ? 1 : 0);
if (count > 1) return "ambiguous";

View file

@ -4,6 +4,7 @@ import de.effigenix.domain.production.Batch;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.util.List;
public record BatchResponse(
String id,
@ -14,10 +15,15 @@ public record BatchResponse(
String plannedQuantityUnit,
LocalDate productionDate,
LocalDate bestBeforeDate,
List<ConsumptionResponse> consumptions,
OffsetDateTime createdAt,
OffsetDateTime updatedAt
) {
public static BatchResponse from(Batch batch) {
var consumptions = batch.consumptions().stream()
.map(ConsumptionResponse::from)
.toList();
return new BatchResponse(
batch.id().value(),
batch.batchNumber().value(),
@ -27,6 +33,7 @@ public record BatchResponse(
batch.plannedQuantity().uom().name(),
batch.productionDate(),
batch.bestBeforeDate(),
consumptions,
batch.createdAt(),
batch.updatedAt()
);

View file

@ -0,0 +1,25 @@
package de.effigenix.infrastructure.production.web.dto;
import de.effigenix.domain.production.Consumption;
import java.time.OffsetDateTime;
public record ConsumptionResponse(
String id,
String inputBatchId,
String articleId,
String quantityUsed,
String quantityUsedUnit,
OffsetDateTime consumedAt
) {
public static ConsumptionResponse from(Consumption consumption) {
return new ConsumptionResponse(
consumption.id().value(),
consumption.inputBatchId().value(),
consumption.articleId().value(),
consumption.quantityUsed().amount().toPlainString(),
consumption.quantityUsed().uom().name(),
consumption.consumedAt()
);
}
}

View file

@ -0,0 +1,10 @@
package de.effigenix.infrastructure.production.web.dto;
import jakarta.validation.constraints.NotBlank;
public record RecordConsumptionRequest(
@NotBlank String inputBatchId,
@NotBlank String articleId,
@NotBlank String quantityUsed,
@NotBlank String quantityUnit
) {}

View file

@ -33,6 +33,10 @@ public final class ProductionErrorHttpStatusMapper {
case BatchError.InvalidPlannedQuantity e -> 400;
case BatchError.InvalidDates e -> 400;
case BatchError.RecipeNotActive e -> 409;
case BatchError.InvalidStatusTransition e -> 409;
case BatchError.NotInProduction e -> 409;
case BatchError.DuplicateInputBatch e -> 409;
case BatchError.InvalidConsumptionQuantity e -> 400;
case BatchError.ValidationFailure e -> 400;
case BatchError.Unauthorized e -> 403;
case BatchError.RepositoryFailure e -> 500;

View file

@ -0,0 +1,48 @@
<?xml version="1.0" encoding="UTF-8"?>
<databaseChangeLog
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-latest.xsd">
<changeSet id="019-create-batch-consumptions-table" author="effigenix">
<createTable tableName="batch_consumptions">
<column name="id" type="varchar(36)">
<constraints primaryKey="true" nullable="false"/>
</column>
<column name="batch_id" type="varchar(36)">
<constraints nullable="false"
foreignKeyName="fk_consumption_batch"
references="batches(id)"/>
</column>
<column name="input_batch_id" type="varchar(36)">
<constraints nullable="false"/>
</column>
<column name="article_id" type="varchar(36)">
<constraints nullable="false"/>
</column>
<column name="quantity_used_amount" type="decimal(19,6)">
<constraints nullable="false"/>
</column>
<column name="quantity_used_unit" type="varchar(10)">
<constraints nullable="false"/>
</column>
<column name="consumed_at" type="timestamptz">
<constraints nullable="false"/>
</column>
</createTable>
<addUniqueConstraint tableName="batch_consumptions"
columnNames="batch_id, input_batch_id"
constraintName="uq_consumption_batch_input"/>
<createIndex tableName="batch_consumptions" indexName="idx_consumption_batch_id">
<column name="batch_id"/>
</createIndex>
<createIndex tableName="batch_consumptions" indexName="idx_consumption_input_batch_id">
<column name="input_batch_id"/>
</createIndex>
</changeSet>
</databaseChangeLog>

View file

@ -23,5 +23,6 @@
<include file="db/changelog/changes/016-create-batch-number-sequences-table.xml"/>
<include file="db/changelog/changes/017-timestamps-to-timestamptz.xml"/>
<include file="db/changelog/changes/018-add-article-id-to-recipes.xml"/>
<include file="db/changelog/changes/019-create-batch-consumptions-table.xml"/>
</databaseChangeLog>

View file

@ -18,6 +18,7 @@ import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
@ -51,7 +52,8 @@ class FindBatchByNumberTest {
LocalDate.of(2026, 3, 1),
LocalDate.of(2026, 6, 1),
OffsetDateTime.now(ZoneOffset.UTC),
OffsetDateTime.now(ZoneOffset.UTC)
OffsetDateTime.now(ZoneOffset.UTC),
List.of()
);
}

View file

@ -18,6 +18,7 @@ import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
@ -49,7 +50,8 @@ class GetBatchTest {
LocalDate.of(2026, 3, 1),
LocalDate.of(2026, 6, 1),
OffsetDateTime.now(ZoneOffset.UTC),
OffsetDateTime.now(ZoneOffset.UTC)
OffsetDateTime.now(ZoneOffset.UTC),
List.of()
);
}

View file

@ -54,7 +54,8 @@ class ListBatchesTest {
PRODUCTION_DATE,
LocalDate.of(2026, 6, 1),
OffsetDateTime.now(ZoneOffset.UTC),
OffsetDateTime.now(ZoneOffset.UTC)
OffsetDateTime.now(ZoneOffset.UTC),
List.of()
);
}

View file

@ -0,0 +1,199 @@
package de.effigenix.application.production;
import de.effigenix.application.production.command.RecordConsumptionCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Quantity;
import de.effigenix.shared.common.RepositoryError;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
@ExtendWith(MockitoExtension.class)
@DisplayName("RecordConsumption Use Case")
class RecordConsumptionTest {
@Mock private BatchRepository batchRepository;
@Mock private AuthorizationPort authPort;
private RecordConsumption recordConsumption;
private ActorId performedBy;
@BeforeEach
void setUp() {
recordConsumption = new RecordConsumption(batchRepository, authPort);
performedBy = ActorId.of("admin-user");
}
private Batch inProductionBatch(String id) {
return Batch.reconstitute(
BatchId.of(id),
BatchNumber.generate(LocalDate.of(2026, 3, 1), 1),
RecipeId.of("recipe-1"),
BatchStatus.IN_PRODUCTION,
Quantity.of(new BigDecimal("100"), UnitOfMeasure.KILOGRAM).unsafeGetValue(),
LocalDate.of(2026, 3, 1),
LocalDate.of(2026, 6, 1),
OffsetDateTime.now(ZoneOffset.UTC),
OffsetDateTime.now(ZoneOffset.UTC),
List.of()
);
}
private Batch plannedBatch(String id) {
return Batch.reconstitute(
BatchId.of(id),
BatchNumber.generate(LocalDate.of(2026, 3, 1), 1),
RecipeId.of("recipe-1"),
BatchStatus.PLANNED,
Quantity.of(new BigDecimal("100"), UnitOfMeasure.KILOGRAM).unsafeGetValue(),
LocalDate.of(2026, 3, 1),
LocalDate.of(2026, 6, 1),
OffsetDateTime.now(ZoneOffset.UTC),
OffsetDateTime.now(ZoneOffset.UTC),
List.of()
);
}
private RecordConsumptionCommand validCommand(String batchId) {
return new RecordConsumptionCommand(batchId, "input-batch-1", "article-1", "5.0", "KILOGRAM");
}
@Test
@DisplayName("should record consumption when batch is IN_PRODUCTION")
void should_RecordConsumption_When_InProduction() {
var batchId = BatchId.of("batch-1");
var batch = inProductionBatch("batch-1");
when(authPort.can(performedBy, ProductionAction.BATCH_WRITE)).thenReturn(true);
when(batchRepository.findById(batchId)).thenReturn(Result.success(Optional.of(batch)));
when(batchRepository.save(any())).thenReturn(Result.success(null));
var result = recordConsumption.execute(validCommand("batch-1"), performedBy);
assertThat(result.isSuccess()).isTrue();
assertThat(result.unsafeGetValue().inputBatchId().value()).isEqualTo("input-batch-1");
assertThat(result.unsafeGetValue().articleId().value()).isEqualTo("article-1");
verify(batchRepository).save(batch);
}
@Test
@DisplayName("should fail when batch not found")
void should_Fail_When_BatchNotFound() {
when(authPort.can(performedBy, ProductionAction.BATCH_WRITE)).thenReturn(true);
when(batchRepository.findById(any())).thenReturn(Result.success(Optional.empty()));
var result = recordConsumption.execute(validCommand("nonexistent"), performedBy);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.BatchNotFound.class);
verify(batchRepository, never()).save(any());
}
@Test
@DisplayName("should fail when batch is not IN_PRODUCTION")
void should_Fail_When_NotInProduction() {
var batchId = BatchId.of("batch-1");
var batch = plannedBatch("batch-1");
when(authPort.can(performedBy, ProductionAction.BATCH_WRITE)).thenReturn(true);
when(batchRepository.findById(batchId)).thenReturn(Result.success(Optional.of(batch)));
var result = recordConsumption.execute(validCommand("batch-1"), performedBy);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.NotInProduction.class);
verify(batchRepository, never()).save(any());
}
@Test
@DisplayName("should fail when duplicate inputBatchId")
void should_Fail_When_DuplicateInputBatch() {
var batchId = BatchId.of("batch-1");
var batch = inProductionBatch("batch-1");
when(authPort.can(performedBy, ProductionAction.BATCH_WRITE)).thenReturn(true);
when(batchRepository.findById(batchId)).thenReturn(Result.success(Optional.of(batch)));
when(batchRepository.save(any())).thenReturn(Result.success(null));
// First consumption succeeds
recordConsumption.execute(validCommand("batch-1"), performedBy);
// Second with same inputBatchId fails
var result = recordConsumption.execute(validCommand("batch-1"), performedBy);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.DuplicateInputBatch.class);
}
@Test
@DisplayName("should fail when consumption quantity is invalid")
void should_Fail_When_InvalidQuantity() {
var batchId = BatchId.of("batch-1");
var batch = inProductionBatch("batch-1");
when(authPort.can(performedBy, ProductionAction.BATCH_WRITE)).thenReturn(true);
when(batchRepository.findById(batchId)).thenReturn(Result.success(Optional.of(batch)));
var cmd = new RecordConsumptionCommand("batch-1", "input-1", "article-1", "0", "KILOGRAM");
var result = recordConsumption.execute(cmd, performedBy);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.InvalidConsumptionQuantity.class);
verify(batchRepository, never()).save(any());
}
@Test
@DisplayName("should fail with Unauthorized when actor lacks permission")
void should_FailWithUnauthorized_When_ActorLacksPermission() {
when(authPort.can(performedBy, ProductionAction.BATCH_WRITE)).thenReturn(false);
var result = recordConsumption.execute(validCommand("batch-1"), performedBy);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.Unauthorized.class);
verify(batchRepository, never()).findById(any());
}
@Test
@DisplayName("should fail with RepositoryFailure on repository error during find")
void should_FailWithRepositoryFailure_When_FindFails() {
when(authPort.can(performedBy, ProductionAction.BATCH_WRITE)).thenReturn(true);
when(batchRepository.findById(any()))
.thenReturn(Result.failure(new RepositoryError.DatabaseError("connection lost")));
var result = recordConsumption.execute(validCommand("batch-1"), performedBy);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.RepositoryFailure.class);
}
@Test
@DisplayName("should fail with RepositoryFailure on repository error during save")
void should_FailWithRepositoryFailure_When_SaveFails() {
var batchId = BatchId.of("batch-1");
var batch = inProductionBatch("batch-1");
when(authPort.can(performedBy, ProductionAction.BATCH_WRITE)).thenReturn(true);
when(batchRepository.findById(batchId)).thenReturn(Result.success(Optional.of(batch)));
when(batchRepository.save(any()))
.thenReturn(Result.failure(new RepositoryError.DatabaseError("write error")));
var result = recordConsumption.execute(validCommand("batch-1"), performedBy);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.RepositoryFailure.class);
}
}

View file

@ -0,0 +1,159 @@
package de.effigenix.application.production;
import de.effigenix.application.production.command.StartBatchCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Quantity;
import de.effigenix.shared.common.RepositoryError;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
@ExtendWith(MockitoExtension.class)
@DisplayName("StartBatch Use Case")
class StartBatchTest {
@Mock private BatchRepository batchRepository;
@Mock private AuthorizationPort authPort;
private StartBatch startBatch;
private ActorId performedBy;
@BeforeEach
void setUp() {
startBatch = new StartBatch(batchRepository, authPort);
performedBy = ActorId.of("admin-user");
}
private Batch plannedBatch(String id) {
return Batch.reconstitute(
BatchId.of(id),
BatchNumber.generate(LocalDate.of(2026, 3, 1), 1),
RecipeId.of("recipe-1"),
BatchStatus.PLANNED,
Quantity.of(new BigDecimal("100"), UnitOfMeasure.KILOGRAM).unsafeGetValue(),
LocalDate.of(2026, 3, 1),
LocalDate.of(2026, 6, 1),
OffsetDateTime.now(ZoneOffset.UTC),
OffsetDateTime.now(ZoneOffset.UTC),
List.of()
);
}
private Batch inProductionBatch(String id) {
return Batch.reconstitute(
BatchId.of(id),
BatchNumber.generate(LocalDate.of(2026, 3, 1), 1),
RecipeId.of("recipe-1"),
BatchStatus.IN_PRODUCTION,
Quantity.of(new BigDecimal("100"), UnitOfMeasure.KILOGRAM).unsafeGetValue(),
LocalDate.of(2026, 3, 1),
LocalDate.of(2026, 6, 1),
OffsetDateTime.now(ZoneOffset.UTC),
OffsetDateTime.now(ZoneOffset.UTC),
List.of()
);
}
@Test
@DisplayName("should start batch when PLANNED")
void should_StartBatch_When_Planned() {
var batchId = BatchId.of("batch-1");
var batch = plannedBatch("batch-1");
when(authPort.can(performedBy, ProductionAction.BATCH_WRITE)).thenReturn(true);
when(batchRepository.findById(batchId)).thenReturn(Result.success(Optional.of(batch)));
when(batchRepository.save(any())).thenReturn(Result.success(null));
var result = startBatch.execute(new StartBatchCommand("batch-1"), performedBy);
assertThat(result.isSuccess()).isTrue();
assertThat(result.unsafeGetValue().status()).isEqualTo(BatchStatus.IN_PRODUCTION);
verify(batchRepository).save(batch);
}
@Test
@DisplayName("should fail when batch not found")
void should_Fail_When_BatchNotFound() {
var batchId = BatchId.of("nonexistent");
when(authPort.can(performedBy, ProductionAction.BATCH_WRITE)).thenReturn(true);
when(batchRepository.findById(batchId)).thenReturn(Result.success(Optional.empty()));
var result = startBatch.execute(new StartBatchCommand("nonexistent"), performedBy);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.BatchNotFound.class);
verify(batchRepository, never()).save(any());
}
@Test
@DisplayName("should fail when batch already IN_PRODUCTION")
void should_Fail_When_AlreadyInProduction() {
var batchId = BatchId.of("batch-1");
var batch = inProductionBatch("batch-1");
when(authPort.can(performedBy, ProductionAction.BATCH_WRITE)).thenReturn(true);
when(batchRepository.findById(batchId)).thenReturn(Result.success(Optional.of(batch)));
var result = startBatch.execute(new StartBatchCommand("batch-1"), performedBy);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.InvalidStatusTransition.class);
verify(batchRepository, never()).save(any());
}
@Test
@DisplayName("should fail with Unauthorized when actor lacks permission")
void should_FailWithUnauthorized_When_ActorLacksPermission() {
when(authPort.can(performedBy, ProductionAction.BATCH_WRITE)).thenReturn(false);
var result = startBatch.execute(new StartBatchCommand("batch-1"), performedBy);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.Unauthorized.class);
verify(batchRepository, never()).findById(any());
}
@Test
@DisplayName("should fail with RepositoryFailure on repository error during find")
void should_FailWithRepositoryFailure_When_FindFails() {
when(authPort.can(performedBy, ProductionAction.BATCH_WRITE)).thenReturn(true);
when(batchRepository.findById(any()))
.thenReturn(Result.failure(new RepositoryError.DatabaseError("connection lost")));
var result = startBatch.execute(new StartBatchCommand("batch-1"), performedBy);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.RepositoryFailure.class);
}
@Test
@DisplayName("should fail with RepositoryFailure on repository error during save")
void should_FailWithRepositoryFailure_When_SaveFails() {
var batchId = BatchId.of("batch-1");
var batch = plannedBatch("batch-1");
when(authPort.can(performedBy, ProductionAction.BATCH_WRITE)).thenReturn(true);
when(batchRepository.findById(batchId)).thenReturn(Result.success(Optional.of(batch)));
when(batchRepository.save(any()))
.thenReturn(Result.failure(new RepositoryError.DatabaseError("write error")));
var result = startBatch.execute(new StartBatchCommand("batch-1"), performedBy);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.RepositoryFailure.class);
}
}

View file

@ -10,6 +10,7 @@ import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
@ -173,6 +174,194 @@ class BatchTest {
}
}
@Nested
@DisplayName("startProduction()")
class StartProduction {
@Test
@DisplayName("should transition PLANNED to IN_PRODUCTION")
void should_TransitionToInProduction_When_Planned() {
var batch = Batch.plan(validDraft(), BATCH_NUMBER).unsafeGetValue();
assertThat(batch.status()).isEqualTo(BatchStatus.PLANNED);
var result = batch.startProduction();
assertThat(result.isSuccess()).isTrue();
assertThat(batch.status()).isEqualTo(BatchStatus.IN_PRODUCTION);
}
@Test
@DisplayName("should update updatedAt on transition")
void should_UpdateTimestamp_When_StartingProduction() {
var batch = Batch.plan(validDraft(), BATCH_NUMBER).unsafeGetValue();
var beforeUpdate = batch.updatedAt();
batch.startProduction();
assertThat(batch.updatedAt()).isAfterOrEqualTo(beforeUpdate);
}
@Test
@DisplayName("should fail when already IN_PRODUCTION")
void should_Fail_When_AlreadyInProduction() {
var batch = Batch.reconstitute(
BatchId.of("b-1"), BATCH_NUMBER, RecipeId.of("r-1"),
BatchStatus.IN_PRODUCTION,
Quantity.of(new BigDecimal("100"), UnitOfMeasure.KILOGRAM).unsafeGetValue(),
PRODUCTION_DATE, BEST_BEFORE_DATE,
OffsetDateTime.now(ZoneOffset.UTC), OffsetDateTime.now(ZoneOffset.UTC),
List.of()
);
var result = batch.startProduction();
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.InvalidStatusTransition.class);
var err = (BatchError.InvalidStatusTransition) result.unsafeGetError();
assertThat(err.current()).isEqualTo(BatchStatus.IN_PRODUCTION);
assertThat(err.target()).isEqualTo(BatchStatus.IN_PRODUCTION);
}
@Test
@DisplayName("should fail when COMPLETED")
void should_Fail_When_Completed() {
var batch = Batch.reconstitute(
BatchId.of("b-1"), BATCH_NUMBER, RecipeId.of("r-1"),
BatchStatus.COMPLETED,
Quantity.of(new BigDecimal("100"), UnitOfMeasure.KILOGRAM).unsafeGetValue(),
PRODUCTION_DATE, BEST_BEFORE_DATE,
OffsetDateTime.now(ZoneOffset.UTC), OffsetDateTime.now(ZoneOffset.UTC),
List.of()
);
var result = batch.startProduction();
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.InvalidStatusTransition.class);
}
@Test
@DisplayName("should fail when CANCELLED")
void should_Fail_When_Cancelled() {
var batch = Batch.reconstitute(
BatchId.of("b-1"), BATCH_NUMBER, RecipeId.of("r-1"),
BatchStatus.CANCELLED,
Quantity.of(new BigDecimal("100"), UnitOfMeasure.KILOGRAM).unsafeGetValue(),
PRODUCTION_DATE, BEST_BEFORE_DATE,
OffsetDateTime.now(ZoneOffset.UTC), OffsetDateTime.now(ZoneOffset.UTC),
List.of()
);
var result = batch.startProduction();
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.InvalidStatusTransition.class);
}
}
@Nested
@DisplayName("recordConsumption()")
class RecordConsumption {
private Batch inProductionBatch() {
var batch = Batch.plan(validDraft(), BATCH_NUMBER).unsafeGetValue();
batch.startProduction();
return batch;
}
@Test
@DisplayName("should record consumption when IN_PRODUCTION")
void should_RecordConsumption_When_InProduction() {
var batch = inProductionBatch();
var draft = new ConsumptionDraft("input-1", "article-1", "5.0", "KILOGRAM");
var result = batch.recordConsumption(draft);
assertThat(result.isSuccess()).isTrue();
assertThat(batch.consumptions()).hasSize(1);
assertThat(result.unsafeGetValue().inputBatchId().value()).isEqualTo("input-1");
}
@Test
@DisplayName("should record multiple different consumptions")
void should_RecordMultiple_When_DifferentInputBatches() {
var batch = inProductionBatch();
batch.recordConsumption(new ConsumptionDraft("input-1", "article-1", "5.0", "KILOGRAM"));
batch.recordConsumption(new ConsumptionDraft("input-2", "article-2", "3.0", "LITER"));
assertThat(batch.consumptions()).hasSize(2);
}
@Test
@DisplayName("should fail when not IN_PRODUCTION (PLANNED)")
void should_Fail_When_Planned() {
var batch = Batch.plan(validDraft(), BATCH_NUMBER).unsafeGetValue();
var draft = new ConsumptionDraft("input-1", "article-1", "5.0", "KILOGRAM");
var result = batch.recordConsumption(draft);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.NotInProduction.class);
}
@Test
@DisplayName("should fail when not IN_PRODUCTION (COMPLETED)")
void should_Fail_When_Completed() {
var batch = Batch.reconstitute(
BatchId.of("b-1"), BATCH_NUMBER, RecipeId.of("r-1"),
BatchStatus.COMPLETED,
Quantity.of(new BigDecimal("100"), UnitOfMeasure.KILOGRAM).unsafeGetValue(),
PRODUCTION_DATE, BEST_BEFORE_DATE,
OffsetDateTime.now(ZoneOffset.UTC), OffsetDateTime.now(ZoneOffset.UTC),
List.of()
);
var draft = new ConsumptionDraft("input-1", "article-1", "5.0", "KILOGRAM");
var result = batch.recordConsumption(draft);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.NotInProduction.class);
}
@Test
@DisplayName("should fail when duplicate inputBatchId")
void should_Fail_When_DuplicateInputBatch() {
var batch = inProductionBatch();
batch.recordConsumption(new ConsumptionDraft("input-1", "article-1", "5.0", "KILOGRAM"));
var result = batch.recordConsumption(
new ConsumptionDraft("input-1", "article-2", "3.0", "LITER"));
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.DuplicateInputBatch.class);
assertThat(batch.consumptions()).hasSize(1);
}
@Test
@DisplayName("should fail when consumption quantity is invalid")
void should_Fail_When_InvalidQuantity() {
var batch = inProductionBatch();
var draft = new ConsumptionDraft("input-1", "article-1", "0", "KILOGRAM");
var result = batch.recordConsumption(draft);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.InvalidConsumptionQuantity.class);
assertThat(batch.consumptions()).isEmpty();
}
@Test
@DisplayName("should update updatedAt when recording consumption")
void should_UpdateTimestamp_When_RecordingConsumption() {
var batch = inProductionBatch();
var beforeRecord = batch.updatedAt();
batch.recordConsumption(new ConsumptionDraft("input-1", "article-1", "5.0", "KILOGRAM"));
assertThat(batch.updatedAt()).isAfterOrEqualTo(beforeRecord);
}
}
@Nested
@DisplayName("reconstitute()")
class Reconstitute {
@ -189,7 +378,8 @@ class BatchTest {
PRODUCTION_DATE,
BEST_BEFORE_DATE,
OffsetDateTime.now(ZoneOffset.UTC),
OffsetDateTime.now(ZoneOffset.UTC)
OffsetDateTime.now(ZoneOffset.UTC),
List.of()
);
assertThat(batch.id().value()).isEqualTo("batch-1");

View file

@ -0,0 +1,125 @@
package de.effigenix.domain.production;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import java.math.BigDecimal;
import static org.assertj.core.api.Assertions.assertThat;
@DisplayName("Consumption Entity")
class ConsumptionTest {
private ConsumptionDraft validDraft() {
return new ConsumptionDraft("input-batch-1", "article-1", "10.5", "KILOGRAM");
}
@Nested
@DisplayName("create()")
class Create {
@Test
@DisplayName("should create consumption with valid draft")
void should_CreateConsumption_When_ValidDraft() {
var result = Consumption.create(validDraft());
assertThat(result.isSuccess()).isTrue();
var consumption = result.unsafeGetValue();
assertThat(consumption.id()).isNotNull();
assertThat(consumption.inputBatchId().value()).isEqualTo("input-batch-1");
assertThat(consumption.articleId().value()).isEqualTo("article-1");
assertThat(consumption.quantityUsed().amount()).isEqualByComparingTo(new BigDecimal("10.5"));
assertThat(consumption.quantityUsed().uom().name()).isEqualTo("KILOGRAM");
assertThat(consumption.consumedAt()).isNotNull();
}
@Test
@DisplayName("should fail when inputBatchId is blank")
void should_Fail_When_InputBatchIdBlank() {
var draft = new ConsumptionDraft("", "article-1", "10", "KILOGRAM");
var result = Consumption.create(draft);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.ValidationFailure.class);
}
@Test
@DisplayName("should fail when inputBatchId is null")
void should_Fail_When_InputBatchIdNull() {
var draft = new ConsumptionDraft(null, "article-1", "10", "KILOGRAM");
var result = Consumption.create(draft);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.ValidationFailure.class);
}
@Test
@DisplayName("should fail when articleId is blank")
void should_Fail_When_ArticleIdBlank() {
var draft = new ConsumptionDraft("input-batch-1", "", "10", "KILOGRAM");
var result = Consumption.create(draft);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.ValidationFailure.class);
}
@Test
@DisplayName("should fail when articleId is null")
void should_Fail_When_ArticleIdNull() {
var draft = new ConsumptionDraft("input-batch-1", null, "10", "KILOGRAM");
var result = Consumption.create(draft);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.ValidationFailure.class);
}
@Test
@DisplayName("should fail when quantity is zero")
void should_Fail_When_QuantityZero() {
var draft = new ConsumptionDraft("input-batch-1", "article-1", "0", "KILOGRAM");
var result = Consumption.create(draft);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.InvalidConsumptionQuantity.class);
}
@Test
@DisplayName("should fail when quantity is negative")
void should_Fail_When_QuantityNegative() {
var draft = new ConsumptionDraft("input-batch-1", "article-1", "-5", "KILOGRAM");
var result = Consumption.create(draft);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.InvalidConsumptionQuantity.class);
}
@Test
@DisplayName("should fail when quantity is not a number")
void should_Fail_When_QuantityNotANumber() {
var draft = new ConsumptionDraft("input-batch-1", "article-1", "abc", "KILOGRAM");
var result = Consumption.create(draft);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.InvalidConsumptionQuantity.class);
}
@Test
@DisplayName("should fail when unit is invalid")
void should_Fail_When_UnitInvalid() {
var draft = new ConsumptionDraft("input-batch-1", "article-1", "10", "INVALID_UNIT");
var result = Consumption.create(draft);
assertThat(result.isFailure()).isTrue();
assertThat(result.unsafeGetError()).isInstanceOf(BatchError.InvalidConsumptionQuantity.class);
}
}
}

View file

@ -0,0 +1,306 @@
package de.effigenix.infrastructure.production.web;
import de.effigenix.domain.usermanagement.RoleName;
import de.effigenix.infrastructure.AbstractIntegrationTest;
import de.effigenix.infrastructure.production.web.dto.PlanBatchRequest;
import de.effigenix.infrastructure.production.web.dto.RecordConsumptionRequest;
import de.effigenix.infrastructure.usermanagement.persistence.entity.RoleEntity;
import de.effigenix.infrastructure.usermanagement.persistence.entity.UserEntity;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.springframework.http.MediaType;
import java.time.LocalDate;
import java.util.Set;
import java.util.UUID;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
@DisplayName("RecordConsumption Integration Tests")
class RecordConsumptionIntegrationTest extends AbstractIntegrationTest {
private String adminToken;
private String viewerToken;
private static final LocalDate PRODUCTION_DATE = LocalDate.of(2026, 3, 1);
private static final LocalDate BEST_BEFORE_DATE = LocalDate.of(2026, 6, 1);
@BeforeEach
void setUp() throws Exception {
RoleEntity adminRole = createRole(RoleName.ADMIN, "Admin");
RoleEntity viewerRole = createRole(RoleName.PRODUCTION_WORKER, "Viewer");
UserEntity admin = createUser("cons.admin", "cons.admin@test.com", Set.of(adminRole), "BRANCH-01");
UserEntity viewer = createUser("cons.viewer", "cons.viewer@test.com", Set.of(viewerRole), "BRANCH-01");
adminToken = generateToken(admin.getId(), "cons.admin", "BATCH_WRITE,BATCH_READ,RECIPE_WRITE,RECIPE_READ");
viewerToken = generateToken(viewer.getId(), "cons.viewer", "USER_READ");
}
@Nested
@DisplayName("POST /api/production/batches/{id}/consumptions Verbrauch dokumentieren")
class RecordConsumptionEndpoint {
@Test
@DisplayName("Verbrauch dokumentieren bei IN_PRODUCTION → 201")
void recordConsumption_withInProductionBatch_returns201() throws Exception {
String batchId = createInProductionBatch();
var request = new RecordConsumptionRequest(
UUID.randomUUID().toString(), UUID.randomUUID().toString(), "10.5", "KILOGRAM");
mockMvc.perform(post("/api/production/batches/{id}/consumptions", batchId)
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(request)))
.andExpect(status().isCreated())
.andExpect(jsonPath("$.id").isNotEmpty())
.andExpect(jsonPath("$.inputBatchId").value(request.inputBatchId()))
.andExpect(jsonPath("$.articleId").value(request.articleId()))
.andExpect(jsonPath("$.quantityUsed").value("10.500000"))
.andExpect(jsonPath("$.quantityUsedUnit").value("KILOGRAM"))
.andExpect(jsonPath("$.consumedAt").isNotEmpty());
}
@Test
@DisplayName("Verbrauch wird in BatchResponse angezeigt")
void recordConsumption_visibleInBatchResponse() throws Exception {
String batchId = createInProductionBatch();
String inputBatchId = UUID.randomUUID().toString();
var request = new RecordConsumptionRequest(inputBatchId, UUID.randomUUID().toString(), "5.0", "LITER");
mockMvc.perform(post("/api/production/batches/{id}/consumptions", batchId)
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(request)))
.andExpect(status().isCreated());
mockMvc.perform(get("/api/production/batches/{id}", batchId)
.header("Authorization", "Bearer " + adminToken))
.andExpect(status().isOk())
.andExpect(jsonPath("$.consumptions").isArray())
.andExpect(jsonPath("$.consumptions.length()").value(1))
.andExpect(jsonPath("$.consumptions[0].inputBatchId").value(inputBatchId));
}
@Test
@DisplayName("Mehrere Verbrauchseinträge möglich")
void recordConsumption_multipleConsumptions() throws Exception {
String batchId = createInProductionBatch();
var request1 = new RecordConsumptionRequest(
UUID.randomUUID().toString(), UUID.randomUUID().toString(), "5.0", "KILOGRAM");
var request2 = new RecordConsumptionRequest(
UUID.randomUUID().toString(), UUID.randomUUID().toString(), "3.0", "LITER");
mockMvc.perform(post("/api/production/batches/{id}/consumptions", batchId)
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(request1)))
.andExpect(status().isCreated());
mockMvc.perform(post("/api/production/batches/{id}/consumptions", batchId)
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(request2)))
.andExpect(status().isCreated());
mockMvc.perform(get("/api/production/batches/{id}", batchId)
.header("Authorization", "Bearer " + adminToken))
.andExpect(jsonPath("$.consumptions.length()").value(2));
}
@Test
@DisplayName("Doppelte InputBatchId → 409")
void recordConsumption_duplicateInputBatch_returns409() throws Exception {
String batchId = createInProductionBatch();
String inputBatchId = UUID.randomUUID().toString();
var request = new RecordConsumptionRequest(inputBatchId, UUID.randomUUID().toString(), "5.0", "KILOGRAM");
mockMvc.perform(post("/api/production/batches/{id}/consumptions", batchId)
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(request)))
.andExpect(status().isCreated());
mockMvc.perform(post("/api/production/batches/{id}/consumptions", batchId)
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(request)))
.andExpect(status().isConflict())
.andExpect(jsonPath("$.code").value("BATCH_DUPLICATE_INPUT_BATCH"));
}
@Test
@DisplayName("Charge nicht IN_PRODUCTION (PLANNED) → 409")
void recordConsumption_plannedBatch_returns409() throws Exception {
String batchId = createPlannedBatch();
var request = new RecordConsumptionRequest(
UUID.randomUUID().toString(), UUID.randomUUID().toString(), "5.0", "KILOGRAM");
mockMvc.perform(post("/api/production/batches/{id}/consumptions", batchId)
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(request)))
.andExpect(status().isConflict())
.andExpect(jsonPath("$.code").value("BATCH_NOT_IN_PRODUCTION"));
}
@Test
@DisplayName("Ungültige Menge (0) → 400")
void recordConsumption_zeroQuantity_returns400() throws Exception {
String batchId = createInProductionBatch();
var request = new RecordConsumptionRequest(
UUID.randomUUID().toString(), UUID.randomUUID().toString(), "0", "KILOGRAM");
mockMvc.perform(post("/api/production/batches/{id}/consumptions", batchId)
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(request)))
.andExpect(status().isBadRequest())
.andExpect(jsonPath("$.code").value("BATCH_INVALID_CONSUMPTION_QUANTITY"));
}
@Test
@DisplayName("Ungültige Unit → 400")
void recordConsumption_invalidUnit_returns400() throws Exception {
String batchId = createInProductionBatch();
var request = new RecordConsumptionRequest(
UUID.randomUUID().toString(), UUID.randomUUID().toString(), "5.0", "INVALID_UNIT");
mockMvc.perform(post("/api/production/batches/{id}/consumptions", batchId)
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(request)))
.andExpect(status().isBadRequest())
.andExpect(jsonPath("$.code").value("BATCH_INVALID_CONSUMPTION_QUANTITY"));
}
@Test
@DisplayName("Charge nicht gefunden → 404")
void recordConsumption_batchNotFound_returns404() throws Exception {
var request = new RecordConsumptionRequest(
UUID.randomUUID().toString(), UUID.randomUUID().toString(), "5.0", "KILOGRAM");
mockMvc.perform(post("/api/production/batches/{id}/consumptions", UUID.randomUUID().toString())
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(request)))
.andExpect(status().isNotFound())
.andExpect(jsonPath("$.code").value("BATCH_NOT_FOUND"));
}
@Test
@DisplayName("Leere Pflichtfelder → 400 (Bean Validation)")
void recordConsumption_blankFields_returns400() throws Exception {
var request = new RecordConsumptionRequest("", "", "", "");
mockMvc.perform(post("/api/production/batches/{id}/consumptions", UUID.randomUUID().toString())
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(request)))
.andExpect(status().isBadRequest());
}
}
@Nested
@DisplayName("Authorization")
class AuthTests {
@Test
@DisplayName("Ohne BATCH_WRITE → 403")
void recordConsumption_withViewerToken_returns403() throws Exception {
var request = new RecordConsumptionRequest(
UUID.randomUUID().toString(), UUID.randomUUID().toString(), "5.0", "KILOGRAM");
mockMvc.perform(post("/api/production/batches/{id}/consumptions", UUID.randomUUID().toString())
.header("Authorization", "Bearer " + viewerToken)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(request)))
.andExpect(status().isForbidden());
}
@Test
@DisplayName("Ohne Token → 401")
void recordConsumption_withoutToken_returns401() throws Exception {
var request = new RecordConsumptionRequest(
UUID.randomUUID().toString(), UUID.randomUUID().toString(), "5.0", "KILOGRAM");
mockMvc.perform(post("/api/production/batches/{id}/consumptions", UUID.randomUUID().toString())
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(request)))
.andExpect(status().isUnauthorized());
}
}
// ==================== Hilfsmethoden ====================
private String createPlannedBatch() throws Exception {
String recipeId = createActiveRecipe();
var request = new PlanBatchRequest(recipeId, "100", "KILOGRAM", PRODUCTION_DATE, BEST_BEFORE_DATE);
var result = mockMvc.perform(post("/api/production/batches")
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(request)))
.andExpect(status().isCreated())
.andReturn();
return objectMapper.readTree(result.getResponse().getContentAsString()).get("id").asText();
}
private String createInProductionBatch() throws Exception {
String batchId = createPlannedBatch();
mockMvc.perform(post("/api/production/batches/{id}/start", batchId)
.header("Authorization", "Bearer " + adminToken))
.andExpect(status().isOk());
return batchId;
}
private String createActiveRecipe() throws Exception {
String json = """
{
"name": "Test-Rezept-%s",
"version": 1,
"type": "FINISHED_PRODUCT",
"description": "Testrezept",
"yieldPercentage": 85,
"shelfLifeDays": 14,
"outputQuantity": "100",
"outputUom": "KILOGRAM",
"articleId": "article-123"
}
""".formatted(UUID.randomUUID().toString().substring(0, 8));
var result = mockMvc.perform(post("/api/recipes")
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)
.content(json))
.andExpect(status().isCreated())
.andReturn();
String recipeId = objectMapper.readTree(result.getResponse().getContentAsString()).get("id").asText();
String ingredientJson = """
{"position": 1, "articleId": "%s", "quantity": "5.5", "uom": "KILOGRAM", "substitutable": false}
""".formatted(UUID.randomUUID().toString());
mockMvc.perform(post("/api/recipes/{id}/ingredients", recipeId)
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)
.content(ingredientJson))
.andExpect(status().isCreated());
mockMvc.perform(post("/api/recipes/{id}/activate", recipeId)
.header("Authorization", "Bearer " + adminToken))
.andExpect(status().isOk());
return recipeId;
}
}

View file

@ -0,0 +1,165 @@
package de.effigenix.infrastructure.production.web;
import de.effigenix.domain.usermanagement.RoleName;
import de.effigenix.infrastructure.AbstractIntegrationTest;
import de.effigenix.infrastructure.production.web.dto.PlanBatchRequest;
import de.effigenix.infrastructure.usermanagement.persistence.entity.RoleEntity;
import de.effigenix.infrastructure.usermanagement.persistence.entity.UserEntity;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.springframework.http.MediaType;
import java.time.LocalDate;
import java.util.Set;
import java.util.UUID;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
@DisplayName("StartBatch Integration Tests")
class StartBatchIntegrationTest extends AbstractIntegrationTest {
private String adminToken;
private String viewerToken;
private static final LocalDate PRODUCTION_DATE = LocalDate.of(2026, 3, 1);
private static final LocalDate BEST_BEFORE_DATE = LocalDate.of(2026, 6, 1);
@BeforeEach
void setUp() throws Exception {
RoleEntity adminRole = createRole(RoleName.ADMIN, "Admin");
RoleEntity viewerRole = createRole(RoleName.PRODUCTION_WORKER, "Viewer");
UserEntity admin = createUser("start.admin", "start.admin@test.com", Set.of(adminRole), "BRANCH-01");
UserEntity viewer = createUser("start.viewer", "start.viewer@test.com", Set.of(viewerRole), "BRANCH-01");
adminToken = generateToken(admin.getId(), "start.admin", "BATCH_WRITE,BATCH_READ,RECIPE_WRITE,RECIPE_READ");
viewerToken = generateToken(viewer.getId(), "start.viewer", "USER_READ");
}
@Nested
@DisplayName("POST /api/production/batches/{id}/start Produktion starten")
class StartBatchEndpoint {
@Test
@DisplayName("PLANNED Charge starten → 200 mit IN_PRODUCTION Status")
void startBatch_withPlannedBatch_returns200() throws Exception {
String batchId = createPlannedBatch();
mockMvc.perform(post("/api/production/batches/{id}/start", batchId)
.header("Authorization", "Bearer " + adminToken))
.andExpect(status().isOk())
.andExpect(jsonPath("$.id").value(batchId))
.andExpect(jsonPath("$.status").value("IN_PRODUCTION"))
.andExpect(jsonPath("$.consumptions").isArray())
.andExpect(jsonPath("$.consumptions").isEmpty());
}
@Test
@DisplayName("Bereits gestartete Charge → 409")
void startBatch_alreadyInProduction_returns409() throws Exception {
String batchId = createPlannedBatch();
// Start once
mockMvc.perform(post("/api/production/batches/{id}/start", batchId)
.header("Authorization", "Bearer " + adminToken))
.andExpect(status().isOk());
// Start again
mockMvc.perform(post("/api/production/batches/{id}/start", batchId)
.header("Authorization", "Bearer " + adminToken))
.andExpect(status().isConflict())
.andExpect(jsonPath("$.code").value("BATCH_INVALID_STATUS_TRANSITION"));
}
@Test
@DisplayName("Charge nicht gefunden → 404")
void startBatch_notFound_returns404() throws Exception {
mockMvc.perform(post("/api/production/batches/{id}/start", UUID.randomUUID().toString())
.header("Authorization", "Bearer " + adminToken))
.andExpect(status().isNotFound())
.andExpect(jsonPath("$.code").value("BATCH_NOT_FOUND"));
}
}
@Nested
@DisplayName("Authorization")
class AuthTests {
@Test
@DisplayName("Ohne BATCH_WRITE → 403")
void startBatch_withViewerToken_returns403() throws Exception {
mockMvc.perform(post("/api/production/batches/{id}/start", UUID.randomUUID().toString())
.header("Authorization", "Bearer " + viewerToken))
.andExpect(status().isForbidden());
}
@Test
@DisplayName("Ohne Token → 401")
void startBatch_withoutToken_returns401() throws Exception {
mockMvc.perform(post("/api/production/batches/{id}/start", UUID.randomUUID().toString()))
.andExpect(status().isUnauthorized());
}
}
// ==================== Hilfsmethoden ====================
private String createPlannedBatch() throws Exception {
String recipeId = createActiveRecipe();
var request = new PlanBatchRequest(
recipeId, "100", "KILOGRAM", PRODUCTION_DATE, BEST_BEFORE_DATE);
var result = mockMvc.perform(post("/api/production/batches")
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(request)))
.andExpect(status().isCreated())
.andReturn();
return objectMapper.readTree(result.getResponse().getContentAsString()).get("id").asText();
}
private String createActiveRecipe() throws Exception {
String json = """
{
"name": "Test-Rezept-%s",
"version": 1,
"type": "FINISHED_PRODUCT",
"description": "Testrezept",
"yieldPercentage": 85,
"shelfLifeDays": 14,
"outputQuantity": "100",
"outputUom": "KILOGRAM",
"articleId": "article-123"
}
""".formatted(UUID.randomUUID().toString().substring(0, 8));
var result = mockMvc.perform(post("/api/recipes")
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)
.content(json))
.andExpect(status().isCreated())
.andReturn();
String recipeId = objectMapper.readTree(result.getResponse().getContentAsString()).get("id").asText();
String ingredientJson = """
{"position": 1, "articleId": "%s", "quantity": "5.5", "uom": "KILOGRAM", "substitutable": false}
""".formatted(UUID.randomUUID().toString());
mockMvc.perform(post("/api/recipes/{id}/ingredients", recipeId)
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)
.content(ingredientJson))
.andExpect(status().isCreated());
mockMvc.perform(post("/api/recipes/{id}/activate", recipeId)
.header("Authorization", "Bearer " + adminToken))
.andExpect(status().isOk());
return recipeId;
}
}