1
0
Fork 0
mirror of https://github.com/s-frick/effigenix.git synced 2026-03-28 15:59:35 +01:00

feat(production): Produktion starten und Rohstoffverbrauch dokumentieren (StartBatch, RecordConsumption)

PLANNED-Chargen können in Produktion genommen werden (IN_PRODUCTION),
anschließend wird der Rohstoff-Verbrauch pro InputBatch dokumentiert.
Bildet die Grundlage für die Chargen-Genealogie (Tracing).
This commit is contained in:
Sebastian Frick 2026-02-20 12:15:06 +01:00
parent 8c042925eb
commit a9f5956812
31 changed files with 1733 additions and 11 deletions

View file

@ -0,0 +1,63 @@
package de.effigenix.application.production;
import de.effigenix.application.production.command.RecordConsumptionCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class RecordConsumption {
private final BatchRepository batchRepository;
private final AuthorizationPort authorizationPort;
public RecordConsumption(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
this.batchRepository = batchRepository;
this.authorizationPort = authorizationPort;
}
public Result<BatchError, Consumption> execute(RecordConsumptionCommand cmd, ActorId performedBy) {
if (!authorizationPort.can(performedBy, ProductionAction.BATCH_WRITE)) {
return Result.failure(new BatchError.Unauthorized("Not authorized to record consumptions"));
}
var batchId = BatchId.of(cmd.batchId());
Batch batch;
switch (batchRepository.findById(batchId)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
}
case Result.Success(var opt) -> {
if (opt.isEmpty()) {
return Result.failure(new BatchError.BatchNotFound(batchId));
}
batch = opt.get();
}
}
var draft = new ConsumptionDraft(
cmd.inputBatchId(),
cmd.articleId(),
cmd.quantityUsed(),
cmd.quantityUnit()
);
Consumption consumption;
switch (batch.recordConsumption(draft)) {
case Result.Failure(var err) -> { return Result.failure(err); }
case Result.Success(var val) -> consumption = val;
}
switch (batchRepository.save(batch)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
}
case Result.Success(var ignored) -> { }
}
return Result.success(consumption);
}
}

View file

@ -0,0 +1,55 @@
package de.effigenix.application.production;
import de.effigenix.application.production.command.StartBatchCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class StartBatch {
private final BatchRepository batchRepository;
private final AuthorizationPort authorizationPort;
public StartBatch(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
this.batchRepository = batchRepository;
this.authorizationPort = authorizationPort;
}
public Result<BatchError, Batch> execute(StartBatchCommand cmd, ActorId performedBy) {
if (!authorizationPort.can(performedBy, ProductionAction.BATCH_WRITE)) {
return Result.failure(new BatchError.Unauthorized("Not authorized to start batches"));
}
var batchId = BatchId.of(cmd.batchId());
Batch batch;
switch (batchRepository.findById(batchId)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
}
case Result.Success(var opt) -> {
if (opt.isEmpty()) {
return Result.failure(new BatchError.BatchNotFound(batchId));
}
batch = opt.get();
}
}
switch (batch.startProduction()) {
case Result.Failure(var err) -> { return Result.failure(err); }
case Result.Success(var ignored) -> { }
}
switch (batchRepository.save(batch)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
}
case Result.Success(var ignored) -> { }
}
return Result.success(batch);
}
}

View file

@ -0,0 +1,9 @@
package de.effigenix.application.production.command;
public record RecordConsumptionCommand(
String batchId,
String inputBatchId,
String articleId,
String quantityUsed,
String quantityUnit
) {}

View file

@ -0,0 +1,3 @@
package de.effigenix.application.production.command;
public record StartBatchCommand(String batchId) {}

View file

@ -8,6 +8,9 @@ import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Batch aggregate root.
@ -18,6 +21,10 @@ import java.time.ZoneOffset;
* 3. BatchNumber is auto-generated (format P-YYYY-MM-DD-XXX)
* 4. New batches always start in PLANNED status
* 5. RecipeId must reference an ACTIVE recipe (enforced by Use Case)
* 6. startProduction() only allowed from PLANNED status
* 7. recordConsumption() only allowed in IN_PRODUCTION status
* 8. No duplicate inputBatchId within consumptions
* 9. Consumption quantity must be positive
*/
public class Batch {
@ -30,6 +37,7 @@ public class Batch {
private final LocalDate bestBeforeDate;
private final OffsetDateTime createdAt;
private OffsetDateTime updatedAt;
private final List<Consumption> consumptions;
private Batch(
BatchId id,
@ -40,7 +48,8 @@ public class Batch {
LocalDate productionDate,
LocalDate bestBeforeDate,
OffsetDateTime createdAt,
OffsetDateTime updatedAt
OffsetDateTime updatedAt,
List<Consumption> consumptions
) {
this.id = id;
this.batchNumber = batchNumber;
@ -51,6 +60,7 @@ public class Batch {
this.bestBeforeDate = bestBeforeDate;
this.createdAt = createdAt;
this.updatedAt = updatedAt;
this.consumptions = consumptions;
}
public static Result<BatchError, Batch> plan(BatchDraft draft, BatchNumber batchNumber) {
@ -99,10 +109,45 @@ public class Batch {
draft.productionDate(),
draft.bestBeforeDate(),
now,
now
now,
new ArrayList<>()
));
}
public Result<BatchError, Void> startProduction() {
if (status != BatchStatus.PLANNED) {
return Result.failure(new BatchError.InvalidStatusTransition(status, BatchStatus.IN_PRODUCTION));
}
this.status = BatchStatus.IN_PRODUCTION;
this.updatedAt = OffsetDateTime.now(ZoneOffset.UTC);
return Result.success(null);
}
public Result<BatchError, Consumption> recordConsumption(ConsumptionDraft draft) {
if (status != BatchStatus.IN_PRODUCTION) {
return Result.failure(new BatchError.NotInProduction(id));
}
if (draft.inputBatchId() != null && !draft.inputBatchId().isBlank()) {
var inputId = BatchId.of(draft.inputBatchId());
boolean duplicate = consumptions.stream()
.anyMatch(c -> c.inputBatchId().equals(inputId));
if (duplicate) {
return Result.failure(new BatchError.DuplicateInputBatch(inputId));
}
}
Consumption consumption;
switch (Consumption.create(draft)) {
case Result.Failure(var err) -> { return Result.failure(err); }
case Result.Success(var val) -> consumption = val;
}
consumptions.add(consumption);
this.updatedAt = OffsetDateTime.now(ZoneOffset.UTC);
return Result.success(consumption);
}
public static Batch reconstitute(
BatchId id,
BatchNumber batchNumber,
@ -112,9 +157,11 @@ public class Batch {
LocalDate productionDate,
LocalDate bestBeforeDate,
OffsetDateTime createdAt,
OffsetDateTime updatedAt
OffsetDateTime updatedAt,
List<Consumption> consumptions
) {
return new Batch(id, batchNumber, recipeId, status, plannedQuantity, productionDate, bestBeforeDate, createdAt, updatedAt);
return new Batch(id, batchNumber, recipeId, status, plannedQuantity, productionDate,
bestBeforeDate, createdAt, updatedAt, new ArrayList<>(consumptions));
}
public BatchId id() { return id; }
@ -126,4 +173,5 @@ public class Batch {
public LocalDate bestBeforeDate() { return bestBeforeDate; }
public OffsetDateTime createdAt() { return createdAt; }
public OffsetDateTime updatedAt() { return updatedAt; }
public List<Consumption> consumptions() { return Collections.unmodifiableList(consumptions); }
}

View file

@ -34,6 +34,26 @@ public sealed interface BatchError {
@Override public String code() { return "BATCH_VALIDATION_ERROR"; }
}
record InvalidStatusTransition(BatchStatus current, BatchStatus target) implements BatchError {
@Override public String code() { return "BATCH_INVALID_STATUS_TRANSITION"; }
@Override public String message() { return "Cannot transition from " + current + " to " + target; }
}
record NotInProduction(BatchId id) implements BatchError {
@Override public String code() { return "BATCH_NOT_IN_PRODUCTION"; }
@Override public String message() { return "Batch '" + id.value() + "' is not in IN_PRODUCTION status"; }
}
record DuplicateInputBatch(BatchId inputBatchId) implements BatchError {
@Override public String code() { return "BATCH_DUPLICATE_INPUT_BATCH"; }
@Override public String message() { return "Input batch '" + inputBatchId.value() + "' already recorded"; }
}
record InvalidConsumptionQuantity(String reason) implements BatchError {
@Override public String code() { return "BATCH_INVALID_CONSUMPTION_QUANTITY"; }
@Override public String message() { return "Invalid consumption quantity: " + reason; }
}
record Unauthorized(String message) implements BatchError {
@Override public String code() { return "UNAUTHORIZED"; }
}

View file

@ -0,0 +1,74 @@
package de.effigenix.domain.production;
import de.effigenix.domain.masterdata.ArticleId;
import de.effigenix.shared.common.Quantity;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import java.math.BigDecimal;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
public class Consumption {
private final ConsumptionId id;
private final BatchId inputBatchId;
private final ArticleId articleId;
private final Quantity quantityUsed;
private final OffsetDateTime consumedAt;
private Consumption(ConsumptionId id, BatchId inputBatchId, ArticleId articleId,
Quantity quantityUsed, OffsetDateTime consumedAt) {
this.id = id;
this.inputBatchId = inputBatchId;
this.articleId = articleId;
this.quantityUsed = quantityUsed;
this.consumedAt = consumedAt;
}
public static Result<BatchError, Consumption> create(ConsumptionDraft draft) {
if (draft.inputBatchId() == null || draft.inputBatchId().isBlank()) {
return Result.failure(new BatchError.ValidationFailure("inputBatchId must not be blank"));
}
if (draft.articleId() == null || draft.articleId().isBlank()) {
return Result.failure(new BatchError.ValidationFailure("articleId must not be blank"));
}
Quantity quantity;
try {
var amount = new BigDecimal(draft.quantityUsed());
var uom = UnitOfMeasure.valueOf(draft.quantityUnit());
switch (Quantity.of(amount, uom)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.InvalidConsumptionQuantity(err.toString()));
}
case Result.Success(var qty) -> quantity = qty;
}
} catch (NumberFormatException e) {
return Result.failure(new BatchError.InvalidConsumptionQuantity(
"Invalid amount format: " + draft.quantityUsed()));
} catch (IllegalArgumentException e) {
return Result.failure(new BatchError.InvalidConsumptionQuantity(
"Invalid unit: " + draft.quantityUnit()));
}
return Result.success(new Consumption(
ConsumptionId.generate(),
BatchId.of(draft.inputBatchId()),
ArticleId.of(draft.articleId()),
quantity,
OffsetDateTime.now(ZoneOffset.UTC)
));
}
public static Consumption reconstitute(ConsumptionId id, BatchId inputBatchId, ArticleId articleId,
Quantity quantityUsed, OffsetDateTime consumedAt) {
return new Consumption(id, inputBatchId, articleId, quantityUsed, consumedAt);
}
public ConsumptionId id() { return id; }
public BatchId inputBatchId() { return inputBatchId; }
public ArticleId articleId() { return articleId; }
public Quantity quantityUsed() { return quantityUsed; }
public OffsetDateTime consumedAt() { return consumedAt; }
}

View file

@ -0,0 +1,8 @@
package de.effigenix.domain.production;
public record ConsumptionDraft(
String inputBatchId,
String articleId,
String quantityUsed,
String quantityUnit
) {}

View file

@ -0,0 +1,20 @@
package de.effigenix.domain.production;
import java.util.UUID;
public record ConsumptionId(String value) {
public ConsumptionId {
if (value == null || value.isBlank()) {
throw new IllegalArgumentException("ConsumptionId must not be blank");
}
}
public static ConsumptionId generate() {
return new ConsumptionId(UUID.randomUUID().toString());
}
public static ConsumptionId of(String value) {
return new ConsumptionId(value);
}
}

View file

@ -0,0 +1,7 @@
package de.effigenix.domain.production.event;
import de.effigenix.domain.production.BatchId;
import java.time.OffsetDateTime;
public record BatchStarted(BatchId batchId, OffsetDateTime startedAt) {}

View file

@ -0,0 +1,6 @@
package de.effigenix.domain.production.event;
import de.effigenix.domain.production.BatchId;
import de.effigenix.domain.production.ConsumptionId;
public record ConsumptionRecorded(BatchId batchId, ConsumptionId consumptionId, BatchId inputBatchId) {}

View file

@ -9,6 +9,8 @@ import de.effigenix.application.production.FindBatchByNumber;
import de.effigenix.application.production.GetBatch;
import de.effigenix.application.production.ListBatches;
import de.effigenix.application.production.PlanBatch;
import de.effigenix.application.production.RecordConsumption;
import de.effigenix.application.production.StartBatch;
import de.effigenix.application.production.RecipeCycleChecker;
import de.effigenix.application.production.GetRecipe;
import de.effigenix.application.production.ListRecipes;
@ -96,4 +98,14 @@ public class ProductionUseCaseConfiguration {
public FindBatchByNumber findBatchByNumber(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
return new FindBatchByNumber(batchRepository, authorizationPort);
}
@Bean
public StartBatch startBatch(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
return new StartBatch(batchRepository, authorizationPort);
}
@Bean
public RecordConsumption recordConsumption(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
return new RecordConsumption(batchRepository, authorizationPort);
}
}

View file

@ -5,6 +5,8 @@ import jakarta.persistence.*;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.List;
@Entity
@Table(name = "batches")
@ -41,6 +43,9 @@ public class BatchEntity {
@Column(name = "updated_at", nullable = false)
private OffsetDateTime updatedAt;
@OneToMany(mappedBy = "batch", cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER)
private List<ConsumptionEntity> consumptions = new ArrayList<>();
protected BatchEntity() {}
public BatchEntity(
@ -77,4 +82,6 @@ public class BatchEntity {
public LocalDate getBestBeforeDate() { return bestBeforeDate; }
public OffsetDateTime getCreatedAt() { return createdAt; }
public OffsetDateTime getUpdatedAt() { return updatedAt; }
public List<ConsumptionEntity> getConsumptions() { return consumptions; }
public void setConsumptions(List<ConsumptionEntity> consumptions) { this.consumptions = consumptions; }
}

View file

@ -0,0 +1,55 @@
package de.effigenix.infrastructure.production.persistence.entity;
import jakarta.persistence.*;
import java.math.BigDecimal;
import java.time.OffsetDateTime;
@Entity
@Table(name = "batch_consumptions")
public class ConsumptionEntity {
@Id
@Column(name = "id", nullable = false, length = 36)
private String id;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "batch_id", nullable = false)
private BatchEntity batch;
@Column(name = "input_batch_id", nullable = false, length = 36)
private String inputBatchId;
@Column(name = "article_id", nullable = false, length = 36)
private String articleId;
@Column(name = "quantity_used_amount", nullable = false, precision = 19, scale = 6)
private BigDecimal quantityUsedAmount;
@Column(name = "quantity_used_unit", nullable = false, length = 10)
private String quantityUsedUnit;
@Column(name = "consumed_at", nullable = false)
private OffsetDateTime consumedAt;
protected ConsumptionEntity() {}
public ConsumptionEntity(String id, BatchEntity batch, String inputBatchId, String articleId,
BigDecimal quantityUsedAmount, String quantityUsedUnit, OffsetDateTime consumedAt) {
this.id = id;
this.batch = batch;
this.inputBatchId = inputBatchId;
this.articleId = articleId;
this.quantityUsedAmount = quantityUsedAmount;
this.quantityUsedUnit = quantityUsedUnit;
this.consumedAt = consumedAt;
}
public String getId() { return id; }
public BatchEntity getBatch() { return batch; }
public String getInputBatchId() { return inputBatchId; }
public String getArticleId() { return articleId; }
public BigDecimal getQuantityUsedAmount() { return quantityUsedAmount; }
public String getQuantityUsedUnit() { return quantityUsedUnit; }
public OffsetDateTime getConsumedAt() { return consumedAt; }
}

View file

@ -1,16 +1,21 @@
package de.effigenix.infrastructure.production.persistence.mapper;
import de.effigenix.domain.masterdata.ArticleId;
import de.effigenix.domain.production.*;
import de.effigenix.infrastructure.production.persistence.entity.BatchEntity;
import de.effigenix.infrastructure.production.persistence.entity.ConsumptionEntity;
import de.effigenix.shared.common.Quantity;
import de.effigenix.shared.common.UnitOfMeasure;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
@Component
public class BatchMapper {
public BatchEntity toEntity(Batch batch) {
return new BatchEntity(
var entity = new BatchEntity(
batch.id().value(),
batch.batchNumber().value(),
batch.recipeId().value(),
@ -22,9 +27,38 @@ public class BatchMapper {
batch.createdAt(),
batch.updatedAt()
);
List<ConsumptionEntity> consumptionEntities = new ArrayList<>();
for (Consumption c : batch.consumptions()) {
consumptionEntities.add(new ConsumptionEntity(
c.id().value(),
entity,
c.inputBatchId().value(),
c.articleId().value(),
c.quantityUsed().amount(),
c.quantityUsed().uom().name(),
c.consumedAt()
));
}
entity.setConsumptions(consumptionEntities);
return entity;
}
public Batch toDomain(BatchEntity entity) {
List<Consumption> consumptions = entity.getConsumptions().stream()
.map(ce -> Consumption.reconstitute(
ConsumptionId.of(ce.getId()),
BatchId.of(ce.getInputBatchId()),
ArticleId.of(ce.getArticleId()),
Quantity.reconstitute(
ce.getQuantityUsedAmount(),
UnitOfMeasure.valueOf(ce.getQuantityUsedUnit())
),
ce.getConsumedAt()
))
.toList();
return Batch.reconstitute(
BatchId.of(entity.getId()),
new BatchNumber(entity.getBatchNumber()),
@ -37,7 +71,8 @@ public class BatchMapper {
entity.getProductionDate(),
entity.getBestBeforeDate(),
entity.getCreatedAt(),
entity.getUpdatedAt()
entity.getUpdatedAt(),
consumptions
);
}
}

View file

@ -4,14 +4,20 @@ import de.effigenix.application.production.FindBatchByNumber;
import de.effigenix.application.production.GetBatch;
import de.effigenix.application.production.ListBatches;
import de.effigenix.application.production.PlanBatch;
import de.effigenix.application.production.RecordConsumption;
import de.effigenix.application.production.StartBatch;
import de.effigenix.application.production.command.PlanBatchCommand;
import de.effigenix.application.production.command.RecordConsumptionCommand;
import de.effigenix.application.production.command.StartBatchCommand;
import de.effigenix.domain.production.BatchError;
import de.effigenix.domain.production.BatchId;
import de.effigenix.domain.production.BatchNumber;
import de.effigenix.domain.production.BatchStatus;
import de.effigenix.infrastructure.production.web.dto.BatchResponse;
import de.effigenix.infrastructure.production.web.dto.BatchSummaryResponse;
import de.effigenix.infrastructure.production.web.dto.ConsumptionResponse;
import de.effigenix.infrastructure.production.web.dto.PlanBatchRequest;
import de.effigenix.infrastructure.production.web.dto.RecordConsumptionRequest;
import de.effigenix.shared.security.ActorId;
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
import io.swagger.v3.oas.annotations.tags.Tag;
@ -40,13 +46,18 @@ public class BatchController {
private final GetBatch getBatch;
private final ListBatches listBatches;
private final FindBatchByNumber findBatchByNumber;
private final StartBatch startBatch;
private final RecordConsumption recordConsumption;
public BatchController(PlanBatch planBatch, GetBatch getBatch, ListBatches listBatches,
FindBatchByNumber findBatchByNumber) {
FindBatchByNumber findBatchByNumber, StartBatch startBatch,
RecordConsumption recordConsumption) {
this.planBatch = planBatch;
this.getBatch = getBatch;
this.listBatches = listBatches;
this.findBatchByNumber = findBatchByNumber;
this.startBatch = startBatch;
this.recordConsumption = recordConsumption;
}
@GetMapping("/{id}")
@ -150,6 +161,51 @@ public class BatchController {
.body(BatchResponse.from(result.unsafeGetValue()));
}
@PostMapping("/{id}/start")
@PreAuthorize("hasAuthority('BATCH_WRITE')")
public ResponseEntity<BatchResponse> startBatch(
@PathVariable("id") String id,
Authentication authentication
) {
logger.info("Starting batch: {} by actor: {}", id, authentication.getName());
var cmd = new StartBatchCommand(id);
var result = startBatch.execute(cmd, ActorId.of(authentication.getName()));
if (result.isFailure()) {
throw new BatchDomainErrorException(result.unsafeGetError());
}
return ResponseEntity.ok(BatchResponse.from(result.unsafeGetValue()));
}
@PostMapping("/{id}/consumptions")
@PreAuthorize("hasAuthority('BATCH_WRITE')")
public ResponseEntity<ConsumptionResponse> recordConsumption(
@PathVariable("id") String id,
@Valid @RequestBody RecordConsumptionRequest request,
Authentication authentication
) {
logger.info("Recording consumption for batch: {} by actor: {}", id, authentication.getName());
var cmd = new RecordConsumptionCommand(
id,
request.inputBatchId(),
request.articleId(),
request.quantityUsed(),
request.quantityUnit()
);
var result = recordConsumption.execute(cmd, ActorId.of(authentication.getName()));
if (result.isFailure()) {
throw new BatchDomainErrorException(result.unsafeGetError());
}
return ResponseEntity.status(HttpStatus.CREATED)
.body(ConsumptionResponse.from(result.unsafeGetValue()));
}
private static String filterType(String status, LocalDate productionDate, String articleId) {
int count = (status != null ? 1 : 0) + (productionDate != null ? 1 : 0) + (articleId != null ? 1 : 0);
if (count > 1) return "ambiguous";

View file

@ -4,6 +4,7 @@ import de.effigenix.domain.production.Batch;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.util.List;
public record BatchResponse(
String id,
@ -14,10 +15,15 @@ public record BatchResponse(
String plannedQuantityUnit,
LocalDate productionDate,
LocalDate bestBeforeDate,
List<ConsumptionResponse> consumptions,
OffsetDateTime createdAt,
OffsetDateTime updatedAt
) {
public static BatchResponse from(Batch batch) {
var consumptions = batch.consumptions().stream()
.map(ConsumptionResponse::from)
.toList();
return new BatchResponse(
batch.id().value(),
batch.batchNumber().value(),
@ -27,6 +33,7 @@ public record BatchResponse(
batch.plannedQuantity().uom().name(),
batch.productionDate(),
batch.bestBeforeDate(),
consumptions,
batch.createdAt(),
batch.updatedAt()
);

View file

@ -0,0 +1,25 @@
package de.effigenix.infrastructure.production.web.dto;
import de.effigenix.domain.production.Consumption;
import java.time.OffsetDateTime;
public record ConsumptionResponse(
String id,
String inputBatchId,
String articleId,
String quantityUsed,
String quantityUsedUnit,
OffsetDateTime consumedAt
) {
public static ConsumptionResponse from(Consumption consumption) {
return new ConsumptionResponse(
consumption.id().value(),
consumption.inputBatchId().value(),
consumption.articleId().value(),
consumption.quantityUsed().amount().toPlainString(),
consumption.quantityUsed().uom().name(),
consumption.consumedAt()
);
}
}

View file

@ -0,0 +1,10 @@
package de.effigenix.infrastructure.production.web.dto;
import jakarta.validation.constraints.NotBlank;
public record RecordConsumptionRequest(
@NotBlank String inputBatchId,
@NotBlank String articleId,
@NotBlank String quantityUsed,
@NotBlank String quantityUnit
) {}

View file

@ -33,6 +33,10 @@ public final class ProductionErrorHttpStatusMapper {
case BatchError.InvalidPlannedQuantity e -> 400;
case BatchError.InvalidDates e -> 400;
case BatchError.RecipeNotActive e -> 409;
case BatchError.InvalidStatusTransition e -> 409;
case BatchError.NotInProduction e -> 409;
case BatchError.DuplicateInputBatch e -> 409;
case BatchError.InvalidConsumptionQuantity e -> 400;
case BatchError.ValidationFailure e -> 400;
case BatchError.Unauthorized e -> 403;
case BatchError.RepositoryFailure e -> 500;

View file

@ -0,0 +1,48 @@
<?xml version="1.0" encoding="UTF-8"?>
<databaseChangeLog
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-latest.xsd">
<changeSet id="019-create-batch-consumptions-table" author="effigenix">
<createTable tableName="batch_consumptions">
<column name="id" type="varchar(36)">
<constraints primaryKey="true" nullable="false"/>
</column>
<column name="batch_id" type="varchar(36)">
<constraints nullable="false"
foreignKeyName="fk_consumption_batch"
references="batches(id)"/>
</column>
<column name="input_batch_id" type="varchar(36)">
<constraints nullable="false"/>
</column>
<column name="article_id" type="varchar(36)">
<constraints nullable="false"/>
</column>
<column name="quantity_used_amount" type="decimal(19,6)">
<constraints nullable="false"/>
</column>
<column name="quantity_used_unit" type="varchar(10)">
<constraints nullable="false"/>
</column>
<column name="consumed_at" type="timestamptz">
<constraints nullable="false"/>
</column>
</createTable>
<addUniqueConstraint tableName="batch_consumptions"
columnNames="batch_id, input_batch_id"
constraintName="uq_consumption_batch_input"/>
<createIndex tableName="batch_consumptions" indexName="idx_consumption_batch_id">
<column name="batch_id"/>
</createIndex>
<createIndex tableName="batch_consumptions" indexName="idx_consumption_input_batch_id">
<column name="input_batch_id"/>
</createIndex>
</changeSet>
</databaseChangeLog>

View file

@ -23,5 +23,6 @@
<include file="db/changelog/changes/016-create-batch-number-sequences-table.xml"/>
<include file="db/changelog/changes/017-timestamps-to-timestamptz.xml"/>
<include file="db/changelog/changes/018-add-article-id-to-recipes.xml"/>
<include file="db/changelog/changes/019-create-batch-consumptions-table.xml"/>
</databaseChangeLog>