1
0
Fork 0
mirror of https://github.com/s-frick/effigenix.git synced 2026-03-28 08:29:36 +01:00

refactor(production): UnitOfWork-Pattern + JdbcClient-Migration für Production-BC

Production-BC von JPA auf JdbcClient migriert und UnitOfWork-Port
eingeführt, der Transaktionen explizit steuert und bei Result.failure
zurückrollt — löst das Problem, dass @Transactional bei funktionalem
Error-Handling keinen Rollback auslöst.

- UnitOfWork-Interface (shared) + SpringUnitOfWork-Implementierung
- JdbcProductionOrderRepository, JdbcRecipeRepository, JdbcBatchRepository,
  JdbcBatchNumberGenerator ersetzen JPA-Pendants
- 17 JPA-Dateien entfernt (Entities, Mapper, Spring Data Interfaces)
- Alle Production-Use-Cases nutzen UnitOfWork statt @Transactional
- Liquibase-Changelogs H2-kompatibel gemacht (dbms-Attribute)
- Tests auf Liquibase-Schema umgestellt (ddl-auto: none)
This commit is contained in:
Sebastian Frick 2026-02-25 00:18:44 +01:00
parent bfae3eff73
commit e5bc5690da
64 changed files with 1248 additions and 1585 deletions

View file

@ -3,19 +3,20 @@ package de.effigenix.application.production;
import de.effigenix.application.production.command.ActivateRecipeCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.persistence.UnitOfWork;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class ActivateRecipe {
private final RecipeRepository recipeRepository;
private final AuthorizationPort authorizationPort;
private final UnitOfWork unitOfWork;
public ActivateRecipe(RecipeRepository recipeRepository, AuthorizationPort authorizationPort) {
public ActivateRecipe(RecipeRepository recipeRepository, AuthorizationPort authorizationPort, UnitOfWork unitOfWork) {
this.recipeRepository = recipeRepository;
this.authorizationPort = authorizationPort;
this.unitOfWork = unitOfWork;
}
public Result<RecipeError, Recipe> execute(ActivateRecipeCommand cmd, ActorId performedBy) {
@ -42,12 +43,13 @@ public class ActivateRecipe {
case Result.Success(var ignored) -> { }
}
switch (recipeRepository.save(recipe)) {
case Result.Failure(var err) ->
{ return Result.failure(new RecipeError.RepositoryFailure(err.message())); }
case Result.Success(var ignored) -> { }
}
return Result.success(recipe);
return unitOfWork.executeAtomically(() -> {
switch (recipeRepository.save(recipe)) {
case Result.Failure(var err) ->
{ return Result.failure(new RecipeError.RepositoryFailure(err.message())); }
case Result.Success(var ignored) -> { }
}
return Result.success(recipe);
});
}
}

View file

@ -3,19 +3,21 @@ package de.effigenix.application.production;
import de.effigenix.application.production.command.AddProductionStepCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.persistence.UnitOfWork;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class AddProductionStep {
private final RecipeRepository recipeRepository;
private final AuthorizationPort authorizationPort;
private final UnitOfWork unitOfWork;
public AddProductionStep(RecipeRepository recipeRepository, AuthorizationPort authorizationPort) {
public AddProductionStep(RecipeRepository recipeRepository, AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
this.recipeRepository = recipeRepository;
this.authorizationPort = authorizationPort;
this.unitOfWork = unitOfWork;
}
public Result<RecipeError, Recipe> execute(AddProductionStepCommand cmd, ActorId performedBy) {
@ -47,12 +49,13 @@ public class AddProductionStep {
case Result.Success(var ignored) -> { }
}
switch (recipeRepository.save(recipe)) {
case Result.Failure(var err) ->
{ return Result.failure(new RecipeError.RepositoryFailure(err.message())); }
case Result.Success(var ignored) -> { }
}
return Result.success(recipe);
return unitOfWork.executeAtomically(() -> {
switch (recipeRepository.save(recipe)) {
case Result.Failure(var err) ->
{ return Result.failure(new RecipeError.RepositoryFailure(err.message())); }
case Result.Success(var ignored) -> { }
}
return Result.success(recipe);
});
}
}

View file

@ -3,22 +3,23 @@ package de.effigenix.application.production;
import de.effigenix.application.production.command.AddRecipeIngredientCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.persistence.UnitOfWork;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class AddRecipeIngredient {
private final RecipeRepository recipeRepository;
private final AuthorizationPort authorizationPort;
private final RecipeCycleChecker cycleChecker;
private final UnitOfWork unitOfWork;
public AddRecipeIngredient(RecipeRepository recipeRepository, AuthorizationPort authorizationPort,
RecipeCycleChecker cycleChecker) {
RecipeCycleChecker cycleChecker, UnitOfWork unitOfWork) {
this.recipeRepository = recipeRepository;
this.authorizationPort = authorizationPort;
this.cycleChecker = cycleChecker;
this.unitOfWork = unitOfWork;
}
public Result<RecipeError, Recipe> execute(AddRecipeIngredientCommand cmd, ActorId performedBy) {
@ -57,12 +58,13 @@ public class AddRecipeIngredient {
case Result.Success(var ignored) -> { }
}
switch (recipeRepository.save(recipe)) {
case Result.Failure(var err) ->
{ return Result.failure(new RecipeError.RepositoryFailure(err.message())); }
case Result.Success(var ignored) -> { }
}
return Result.success(recipe);
return unitOfWork.executeAtomically(() -> {
switch (recipeRepository.save(recipe)) {
case Result.Failure(var err) ->
{ return Result.failure(new RecipeError.RepositoryFailure(err.message())); }
case Result.Success(var ignored) -> { }
}
return Result.success(recipe);
});
}
}

View file

@ -3,19 +3,20 @@ package de.effigenix.application.production;
import de.effigenix.application.production.command.ArchiveRecipeCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.persistence.UnitOfWork;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class ArchiveRecipe {
private final RecipeRepository recipeRepository;
private final AuthorizationPort authorizationPort;
private final UnitOfWork unitOfWork;
public ArchiveRecipe(RecipeRepository recipeRepository, AuthorizationPort authorizationPort) {
public ArchiveRecipe(RecipeRepository recipeRepository, AuthorizationPort authorizationPort, UnitOfWork unitOfWork) {
this.recipeRepository = recipeRepository;
this.authorizationPort = authorizationPort;
this.unitOfWork = unitOfWork;
}
public Result<RecipeError, Recipe> execute(ArchiveRecipeCommand cmd, ActorId performedBy) {
@ -42,12 +43,13 @@ public class ArchiveRecipe {
case Result.Success(var ignored) -> { }
}
switch (recipeRepository.save(recipe)) {
case Result.Failure(var err) ->
{ return Result.failure(new RecipeError.RepositoryFailure(err.message())); }
case Result.Success(var ignored) -> { }
}
return Result.success(recipe);
return unitOfWork.executeAtomically(() -> {
switch (recipeRepository.save(recipe)) {
case Result.Failure(var err) ->
{ return Result.failure(new RecipeError.RepositoryFailure(err.message())); }
case Result.Success(var ignored) -> { }
}
return Result.success(recipe);
});
}
}

View file

@ -3,19 +3,20 @@ package de.effigenix.application.production;
import de.effigenix.application.production.command.CancelBatchCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.persistence.UnitOfWork;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class CancelBatch {
private final BatchRepository batchRepository;
private final AuthorizationPort authorizationPort;
private final UnitOfWork unitOfWork;
public CancelBatch(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
public CancelBatch(BatchRepository batchRepository, AuthorizationPort authorizationPort, UnitOfWork unitOfWork) {
this.batchRepository = batchRepository;
this.authorizationPort = authorizationPort;
this.unitOfWork = unitOfWork;
}
public Result<BatchError, Batch> execute(CancelBatchCommand cmd, ActorId performedBy) {
@ -45,13 +46,14 @@ public class CancelBatch {
case Result.Success(var ignored) -> { }
}
switch (batchRepository.save(batch)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
return unitOfWork.executeAtomically(() -> {
switch (batchRepository.save(batch)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
}
case Result.Success(var ignored) -> { }
}
case Result.Success(var ignored) -> { }
}
return Result.success(batch);
return Result.success(batch);
});
}
}

View file

@ -3,19 +3,20 @@ package de.effigenix.application.production;
import de.effigenix.application.production.command.CompleteBatchCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.persistence.UnitOfWork;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class CompleteBatch {
private final BatchRepository batchRepository;
private final AuthorizationPort authorizationPort;
private final UnitOfWork unitOfWork;
public CompleteBatch(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
public CompleteBatch(BatchRepository batchRepository, AuthorizationPort authorizationPort, UnitOfWork unitOfWork) {
this.batchRepository = batchRepository;
this.authorizationPort = authorizationPort;
this.unitOfWork = unitOfWork;
}
public Result<BatchError, Batch> execute(CompleteBatchCommand cmd, ActorId performedBy) {
@ -51,13 +52,14 @@ public class CompleteBatch {
case Result.Success(var ignored) -> { }
}
switch (batchRepository.save(batch)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
return unitOfWork.executeAtomically(() -> {
switch (batchRepository.save(batch)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
}
case Result.Success(var ignored) -> { }
}
case Result.Success(var ignored) -> { }
}
return Result.success(batch);
return Result.success(batch);
});
}
}

View file

@ -3,25 +3,27 @@ package de.effigenix.application.production;
import de.effigenix.application.production.command.CreateProductionOrderCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.persistence.UnitOfWork;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class CreateProductionOrder {
private final ProductionOrderRepository productionOrderRepository;
private final RecipeRepository recipeRepository;
private final AuthorizationPort authorizationPort;
private final UnitOfWork unitOfWork;
public CreateProductionOrder(
ProductionOrderRepository productionOrderRepository,
RecipeRepository recipeRepository,
AuthorizationPort authorizationPort
AuthorizationPort authorizationPort,
UnitOfWork unitOfWork
) {
this.productionOrderRepository = productionOrderRepository;
this.recipeRepository = recipeRepository;
this.authorizationPort = authorizationPort;
this.unitOfWork = unitOfWork;
}
public Result<ProductionOrderError, ProductionOrder> execute(CreateProductionOrderCommand cmd, ActorId performedBy) {
@ -64,14 +66,14 @@ public class CreateProductionOrder {
case Result.Success(var val) -> order = val;
}
// Persist
switch (productionOrderRepository.save(order)) {
case Result.Failure(var err) -> {
return Result.failure(new ProductionOrderError.RepositoryFailure(err.message()));
return unitOfWork.executeAtomically(() -> {
switch (productionOrderRepository.save(order)) {
case Result.Failure(var err) -> {
return Result.failure(new ProductionOrderError.RepositoryFailure(err.message()));
}
case Result.Success(var ignored) -> { }
}
case Result.Success(var ignored) -> { }
}
return Result.success(order);
return Result.success(order);
});
}
}

View file

@ -3,19 +3,20 @@ package de.effigenix.application.production;
import de.effigenix.application.production.command.CreateRecipeCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.persistence.UnitOfWork;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class CreateRecipe {
private final RecipeRepository recipeRepository;
private final AuthorizationPort authorizationPort;
private final UnitOfWork unitOfWork;
public CreateRecipe(RecipeRepository recipeRepository, AuthorizationPort authorizationPort) {
public CreateRecipe(RecipeRepository recipeRepository, AuthorizationPort authorizationPort, UnitOfWork unitOfWork) {
this.recipeRepository = recipeRepository;
this.authorizationPort = authorizationPort;
this.unitOfWork = unitOfWork;
}
public Result<RecipeError, Recipe> execute(CreateRecipeCommand cmd, ActorId performedBy) {
@ -46,12 +47,13 @@ public class CreateRecipe {
}
}
switch (recipeRepository.save(recipe)) {
case Result.Failure(var err) ->
{ return Result.failure(new RecipeError.RepositoryFailure(err.message())); }
case Result.Success(var ignored) -> { }
}
return Result.success(recipe);
return unitOfWork.executeAtomically(() -> {
switch (recipeRepository.save(recipe)) {
case Result.Failure(var err) ->
{ return Result.failure(new RecipeError.RepositoryFailure(err.message())); }
case Result.Success(var ignored) -> { }
}
return Result.success(recipe);
});
}
}

View file

@ -4,9 +4,7 @@ import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional(readOnly = true)
public class FindBatchByNumber {
private final BatchRepository batchRepository;

View file

@ -4,9 +4,7 @@ import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional(readOnly = true)
public class GetBatch {
private final BatchRepository batchRepository;

View file

@ -4,9 +4,7 @@ import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional(readOnly = true)
public class GetRecipe {
private final RecipeRepository recipeRepository;

View file

@ -4,12 +4,10 @@ import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
import java.time.LocalDate;
import java.util.List;
@Transactional(readOnly = true)
public class ListBatches {
private final BatchRepository batchRepository;

View file

@ -4,11 +4,9 @@ import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
@Transactional(readOnly = true)
public class ListRecipes {
private final RecipeRepository recipeRepository;

View file

@ -3,28 +3,30 @@ package de.effigenix.application.production;
import de.effigenix.application.production.command.PlanBatchCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.persistence.UnitOfWork;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class PlanBatch {
private final BatchRepository batchRepository;
private final RecipeRepository recipeRepository;
private final BatchNumberGenerator batchNumberGenerator;
private final AuthorizationPort authorizationPort;
private final UnitOfWork unitOfWork;
public PlanBatch(
BatchRepository batchRepository,
RecipeRepository recipeRepository,
BatchNumberGenerator batchNumberGenerator,
AuthorizationPort authorizationPort
AuthorizationPort authorizationPort,
UnitOfWork unitOfWork
) {
this.batchRepository = batchRepository;
this.recipeRepository = recipeRepository;
this.batchNumberGenerator = batchNumberGenerator;
this.authorizationPort = authorizationPort;
this.unitOfWork = unitOfWork;
}
public Result<BatchError, Batch> execute(PlanBatchCommand cmd, ActorId performedBy) {
@ -73,14 +75,14 @@ public class PlanBatch {
case Result.Success(var val) -> batch = val;
}
// Persist
switch (batchRepository.save(batch)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
return unitOfWork.executeAtomically(() -> {
switch (batchRepository.save(batch)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
}
case Result.Success(var ignored) -> { }
}
case Result.Success(var ignored) -> { }
}
return Result.success(batch);
return Result.success(batch);
});
}
}

View file

@ -3,19 +3,21 @@ package de.effigenix.application.production;
import de.effigenix.application.production.command.RecordConsumptionCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.persistence.UnitOfWork;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class RecordConsumption {
private final BatchRepository batchRepository;
private final AuthorizationPort authorizationPort;
private final UnitOfWork unitOfWork;
public RecordConsumption(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
public RecordConsumption(BatchRepository batchRepository, AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
this.batchRepository = batchRepository;
this.authorizationPort = authorizationPort;
this.unitOfWork = unitOfWork;
}
public Result<BatchError, Consumption> execute(RecordConsumptionCommand cmd, ActorId performedBy) {
@ -51,13 +53,14 @@ public class RecordConsumption {
case Result.Success(var val) -> consumption = val;
}
switch (batchRepository.save(batch)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
return unitOfWork.executeAtomically(() -> {
switch (batchRepository.save(batch)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
}
case Result.Success(var ignored) -> { }
}
case Result.Success(var ignored) -> { }
}
return Result.success(consumption);
return Result.success(consumption);
});
}
}

View file

@ -3,25 +3,27 @@ package de.effigenix.application.production;
import de.effigenix.application.production.command.ReleaseProductionOrderCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.persistence.UnitOfWork;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class ReleaseProductionOrder {
private final ProductionOrderRepository productionOrderRepository;
private final RecipeRepository recipeRepository;
private final AuthorizationPort authorizationPort;
private final UnitOfWork unitOfWork;
public ReleaseProductionOrder(
ProductionOrderRepository productionOrderRepository,
RecipeRepository recipeRepository,
AuthorizationPort authorizationPort
AuthorizationPort authorizationPort,
UnitOfWork unitOfWork
) {
this.productionOrderRepository = productionOrderRepository;
this.recipeRepository = recipeRepository;
this.authorizationPort = authorizationPort;
this.unitOfWork = unitOfWork;
}
public Result<ProductionOrderError, ProductionOrder> execute(ReleaseProductionOrderCommand cmd, ActorId performedBy) {
@ -68,14 +70,14 @@ public class ReleaseProductionOrder {
case Result.Success(var ignored) -> { }
}
// Persist
switch (productionOrderRepository.save(order)) {
case Result.Failure(var err) -> {
return Result.failure(new ProductionOrderError.RepositoryFailure(err.message()));
return unitOfWork.executeAtomically(() -> {
switch (productionOrderRepository.save(order)) {
case Result.Failure(var err) -> {
return Result.failure(new ProductionOrderError.RepositoryFailure(err.message()));
}
case Result.Success(var ignored) -> { }
}
case Result.Success(var ignored) -> { }
}
return Result.success(order);
return Result.success(order);
});
}
}

View file

@ -3,19 +3,21 @@ package de.effigenix.application.production;
import de.effigenix.application.production.command.RemoveProductionStepCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.persistence.UnitOfWork;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class RemoveProductionStep {
private final RecipeRepository recipeRepository;
private final AuthorizationPort authorizationPort;
private final UnitOfWork unitOfWork;
public RemoveProductionStep(RecipeRepository recipeRepository, AuthorizationPort authorizationPort) {
public RemoveProductionStep(RecipeRepository recipeRepository, AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
this.recipeRepository = recipeRepository;
this.authorizationPort = authorizationPort;
this.unitOfWork = unitOfWork;
}
public Result<RecipeError, Recipe> execute(RemoveProductionStepCommand cmd, ActorId performedBy) {
@ -42,12 +44,13 @@ public class RemoveProductionStep {
case Result.Success(var ignored) -> { }
}
switch (recipeRepository.save(recipe)) {
case Result.Failure(var err) ->
{ return Result.failure(new RecipeError.RepositoryFailure(err.message())); }
case Result.Success(var ignored) -> { }
}
return Result.success(recipe);
return unitOfWork.executeAtomically(() -> {
switch (recipeRepository.save(recipe)) {
case Result.Failure(var err) ->
{ return Result.failure(new RecipeError.RepositoryFailure(err.message())); }
case Result.Success(var ignored) -> { }
}
return Result.success(recipe);
});
}
}

View file

@ -3,19 +3,21 @@ package de.effigenix.application.production;
import de.effigenix.application.production.command.RemoveRecipeIngredientCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.persistence.UnitOfWork;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class RemoveRecipeIngredient {
private final RecipeRepository recipeRepository;
private final AuthorizationPort authorizationPort;
private final UnitOfWork unitOfWork;
public RemoveRecipeIngredient(RecipeRepository recipeRepository, AuthorizationPort authorizationPort) {
public RemoveRecipeIngredient(RecipeRepository recipeRepository, AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
this.recipeRepository = recipeRepository;
this.authorizationPort = authorizationPort;
this.unitOfWork = unitOfWork;
}
public Result<RecipeError, Recipe> execute(RemoveRecipeIngredientCommand cmd, ActorId performedBy) {
@ -42,12 +44,13 @@ public class RemoveRecipeIngredient {
case Result.Success(var ignored) -> { }
}
switch (recipeRepository.save(recipe)) {
case Result.Failure(var err) ->
{ return Result.failure(new RecipeError.RepositoryFailure(err.message())); }
case Result.Success(var ignored) -> { }
}
return Result.success(recipe);
return unitOfWork.executeAtomically(() -> {
switch (recipeRepository.save(recipe)) {
case Result.Failure(var err) ->
{ return Result.failure(new RecipeError.RepositoryFailure(err.message())); }
case Result.Success(var ignored) -> { }
}
return Result.success(recipe);
});
}
}

View file

@ -3,19 +3,20 @@ package de.effigenix.application.production;
import de.effigenix.application.production.command.StartBatchCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.persistence.UnitOfWork;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class StartBatch {
private final BatchRepository batchRepository;
private final AuthorizationPort authorizationPort;
private final UnitOfWork unitOfWork;
public StartBatch(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
public StartBatch(BatchRepository batchRepository, AuthorizationPort authorizationPort, UnitOfWork unitOfWork) {
this.batchRepository = batchRepository;
this.authorizationPort = authorizationPort;
this.unitOfWork = unitOfWork;
}
public Result<BatchError, Batch> execute(StartBatchCommand cmd, ActorId performedBy) {
@ -43,13 +44,14 @@ public class StartBatch {
case Result.Success(var ignored) -> { }
}
switch (batchRepository.save(batch)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
return unitOfWork.executeAtomically(() -> {
switch (batchRepository.save(batch)) {
case Result.Failure(var err) -> {
return Result.failure(new BatchError.RepositoryFailure(err.message()));
}
case Result.Success(var ignored) -> { }
}
case Result.Success(var ignored) -> { }
}
return Result.success(batch);
return Result.success(batch);
});
}
}

View file

@ -3,25 +3,27 @@ package de.effigenix.application.production;
import de.effigenix.application.production.command.StartProductionOrderCommand;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.persistence.UnitOfWork;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class StartProductionOrder {
private final ProductionOrderRepository productionOrderRepository;
private final BatchRepository batchRepository;
private final AuthorizationPort authorizationPort;
private final UnitOfWork unitOfWork;
public StartProductionOrder(
ProductionOrderRepository productionOrderRepository,
BatchRepository batchRepository,
AuthorizationPort authorizationPort
AuthorizationPort authorizationPort,
UnitOfWork unitOfWork
) {
this.productionOrderRepository = productionOrderRepository;
this.batchRepository = batchRepository;
this.authorizationPort = authorizationPort;
this.unitOfWork = unitOfWork;
}
public Result<ProductionOrderError, ProductionOrder> execute(StartProductionOrderCommand cmd, ActorId performedBy) {
@ -71,13 +73,13 @@ public class StartProductionOrder {
"Batch recipe '" + batch.recipeId().value() + "' does not match order recipe '" + order.recipeId().value() + "'"));
}
// Start production on order (RELEASED IN_PROGRESS, assigns batchId)
// Start production on order (RELEASED -> IN_PROGRESS, assigns batchId)
switch (order.startProduction(batchId)) {
case Result.Failure(var err) -> { return Result.failure(err); }
case Result.Success(var ignored) -> { }
}
// Start production on batch (PLANNED IN_PRODUCTION)
// Start production on batch (PLANNED -> IN_PRODUCTION)
switch (batch.startProduction()) {
case Result.Failure(var err) -> {
return Result.failure(new ProductionOrderError.ValidationFailure(err.message()));
@ -85,21 +87,23 @@ public class StartProductionOrder {
case Result.Success(var ignored) -> { }
}
// Persist both
switch (productionOrderRepository.save(order)) {
case Result.Failure(var err) -> {
return Result.failure(new ProductionOrderError.RepositoryFailure(err.message()));
// Persist both atomically
return unitOfWork.executeAtomically(() -> {
switch (productionOrderRepository.save(order)) {
case Result.Failure(var err) -> {
return Result.failure(new ProductionOrderError.RepositoryFailure(err.message()));
}
case Result.Success(var ignored) -> { }
}
case Result.Success(var ignored) -> { }
}
switch (batchRepository.save(batch)) {
case Result.Failure(var err) -> {
return Result.failure(new ProductionOrderError.RepositoryFailure(err.message()));
switch (batchRepository.save(batch)) {
case Result.Failure(var err) -> {
return Result.failure(new ProductionOrderError.RepositoryFailure(err.message()));
}
case Result.Success(var ignored) -> { }
}
case Result.Success(var ignored) -> { }
}
return Result.success(order);
return Result.success(order);
});
}
}

View file

@ -25,6 +25,7 @@ import de.effigenix.domain.production.BatchNumberGenerator;
import de.effigenix.domain.production.BatchRepository;
import de.effigenix.domain.production.ProductionOrderRepository;
import de.effigenix.domain.production.RecipeRepository;
import de.effigenix.shared.persistence.UnitOfWork;
import de.effigenix.shared.security.AuthorizationPort;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@ -33,8 +34,9 @@ import org.springframework.context.annotation.Configuration;
public class ProductionUseCaseConfiguration {
@Bean
public CreateRecipe createRecipe(RecipeRepository recipeRepository, AuthorizationPort authorizationPort) {
return new CreateRecipe(recipeRepository, authorizationPort);
public CreateRecipe createRecipe(RecipeRepository recipeRepository, AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
return new CreateRecipe(recipeRepository, authorizationPort, unitOfWork);
}
@Bean
@ -44,23 +46,26 @@ public class ProductionUseCaseConfiguration {
@Bean
public AddRecipeIngredient addRecipeIngredient(RecipeRepository recipeRepository, AuthorizationPort authorizationPort,
RecipeCycleChecker recipeCycleChecker) {
return new AddRecipeIngredient(recipeRepository, authorizationPort, recipeCycleChecker);
RecipeCycleChecker recipeCycleChecker, UnitOfWork unitOfWork) {
return new AddRecipeIngredient(recipeRepository, authorizationPort, recipeCycleChecker, unitOfWork);
}
@Bean
public RemoveRecipeIngredient removeRecipeIngredient(RecipeRepository recipeRepository, AuthorizationPort authorizationPort) {
return new RemoveRecipeIngredient(recipeRepository, authorizationPort);
public RemoveRecipeIngredient removeRecipeIngredient(RecipeRepository recipeRepository, AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
return new RemoveRecipeIngredient(recipeRepository, authorizationPort, unitOfWork);
}
@Bean
public AddProductionStep addProductionStep(RecipeRepository recipeRepository, AuthorizationPort authorizationPort) {
return new AddProductionStep(recipeRepository, authorizationPort);
public AddProductionStep addProductionStep(RecipeRepository recipeRepository, AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
return new AddProductionStep(recipeRepository, authorizationPort, unitOfWork);
}
@Bean
public RemoveProductionStep removeProductionStep(RecipeRepository recipeRepository, AuthorizationPort authorizationPort) {
return new RemoveProductionStep(recipeRepository, authorizationPort);
public RemoveProductionStep removeProductionStep(RecipeRepository recipeRepository, AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
return new RemoveProductionStep(recipeRepository, authorizationPort, unitOfWork);
}
@Bean
@ -74,19 +79,22 @@ public class ProductionUseCaseConfiguration {
}
@Bean
public ActivateRecipe activateRecipe(RecipeRepository recipeRepository, AuthorizationPort authorizationPort) {
return new ActivateRecipe(recipeRepository, authorizationPort);
public ActivateRecipe activateRecipe(RecipeRepository recipeRepository, AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
return new ActivateRecipe(recipeRepository, authorizationPort, unitOfWork);
}
@Bean
public ArchiveRecipe archiveRecipe(RecipeRepository recipeRepository, AuthorizationPort authorizationPort) {
return new ArchiveRecipe(recipeRepository, authorizationPort);
public ArchiveRecipe archiveRecipe(RecipeRepository recipeRepository, AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
return new ArchiveRecipe(recipeRepository, authorizationPort, unitOfWork);
}
@Bean
public PlanBatch planBatch(BatchRepository batchRepository, RecipeRepository recipeRepository,
BatchNumberGenerator batchNumberGenerator, AuthorizationPort authorizationPort) {
return new PlanBatch(batchRepository, recipeRepository, batchNumberGenerator, authorizationPort);
BatchNumberGenerator batchNumberGenerator, AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
return new PlanBatch(batchRepository, recipeRepository, batchNumberGenerator, authorizationPort, unitOfWork);
}
@Bean
@ -106,43 +114,50 @@ public class ProductionUseCaseConfiguration {
}
@Bean
public StartBatch startBatch(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
return new StartBatch(batchRepository, authorizationPort);
public StartBatch startBatch(BatchRepository batchRepository, AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
return new StartBatch(batchRepository, authorizationPort, unitOfWork);
}
@Bean
public RecordConsumption recordConsumption(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
return new RecordConsumption(batchRepository, authorizationPort);
public RecordConsumption recordConsumption(BatchRepository batchRepository, AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
return new RecordConsumption(batchRepository, authorizationPort, unitOfWork);
}
@Bean
public CompleteBatch completeBatch(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
return new CompleteBatch(batchRepository, authorizationPort);
public CompleteBatch completeBatch(BatchRepository batchRepository, AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
return new CompleteBatch(batchRepository, authorizationPort, unitOfWork);
}
@Bean
public CancelBatch cancelBatch(BatchRepository batchRepository, AuthorizationPort authorizationPort) {
return new CancelBatch(batchRepository, authorizationPort);
public CancelBatch cancelBatch(BatchRepository batchRepository, AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
return new CancelBatch(batchRepository, authorizationPort, unitOfWork);
}
@Bean
public CreateProductionOrder createProductionOrder(ProductionOrderRepository productionOrderRepository,
RecipeRepository recipeRepository,
AuthorizationPort authorizationPort) {
return new CreateProductionOrder(productionOrderRepository, recipeRepository, authorizationPort);
AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
return new CreateProductionOrder(productionOrderRepository, recipeRepository, authorizationPort, unitOfWork);
}
@Bean
public ReleaseProductionOrder releaseProductionOrder(ProductionOrderRepository productionOrderRepository,
RecipeRepository recipeRepository,
AuthorizationPort authorizationPort) {
return new ReleaseProductionOrder(productionOrderRepository, recipeRepository, authorizationPort);
AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
return new ReleaseProductionOrder(productionOrderRepository, recipeRepository, authorizationPort, unitOfWork);
}
@Bean
public StartProductionOrder startProductionOrder(ProductionOrderRepository productionOrderRepository,
BatchRepository batchRepository,
AuthorizationPort authorizationPort) {
return new StartProductionOrder(productionOrderRepository, batchRepository, authorizationPort);
AuthorizationPort authorizationPort,
UnitOfWork unitOfWork) {
return new StartProductionOrder(productionOrderRepository, batchRepository, authorizationPort, unitOfWork);
}
}

View file

@ -0,0 +1,37 @@
package de.effigenix.infrastructure.config;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.persistence.UnitOfWork;
import org.springframework.stereotype.Component;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.support.DefaultTransactionDefinition;
import java.util.function.Supplier;
@Component
public class SpringUnitOfWork implements UnitOfWork {
private final PlatformTransactionManager txManager;
public SpringUnitOfWork(PlatformTransactionManager txManager) {
this.txManager = txManager;
}
@Override
public <E, T> Result<E, T> executeAtomically(Supplier<Result<E, T>> work) {
var txDef = new DefaultTransactionDefinition();
var status = txManager.getTransaction(txDef);
try {
var result = work.get();
if (result.isFailure()) {
txManager.rollback(status);
} else {
txManager.commit(status);
}
return result;
} catch (Exception e) {
txManager.rollback(status);
throw e;
}
}
}

View file

@ -3,10 +3,9 @@ package de.effigenix.infrastructure.production.persistence;
import de.effigenix.domain.production.BatchError;
import de.effigenix.domain.production.BatchNumber;
import de.effigenix.domain.production.BatchNumberGenerator;
import de.effigenix.infrastructure.production.persistence.entity.BatchNumberSequenceEntity;
import de.effigenix.infrastructure.production.persistence.repository.BatchNumberSequenceJpaRepository;
import de.effigenix.shared.common.Result;
import org.springframework.context.annotation.Profile;
import org.springframework.jdbc.core.simple.JdbcClient;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
@ -14,31 +13,44 @@ import java.time.LocalDate;
@Component
@Profile("!no-db")
public class JpaBatchNumberGenerator implements BatchNumberGenerator {
public class JdbcBatchNumberGenerator implements BatchNumberGenerator {
private final BatchNumberSequenceJpaRepository sequenceRepository;
private final JdbcClient jdbc;
public JpaBatchNumberGenerator(BatchNumberSequenceJpaRepository sequenceRepository) {
this.sequenceRepository = sequenceRepository;
public JdbcBatchNumberGenerator(JdbcClient jdbc) {
this.jdbc = jdbc;
}
@Override
@Transactional
public Result<BatchError, BatchNumber> generateNext(LocalDate date) {
try {
var sequence = sequenceRepository.findByProductionDate(date);
var existing = jdbc.sql(
"SELECT last_sequence FROM batch_number_sequences WHERE production_date = :date FOR UPDATE")
.param("date", date)
.query(Integer.class)
.optional();
int nextSequence;
if (sequence.isPresent()) {
nextSequence = sequence.get().getLastSequence() + 1;
sequence.get().setLastSequence(nextSequence);
if (existing.isPresent()) {
nextSequence = existing.get() + 1;
jdbc.sql("UPDATE batch_number_sequences SET last_sequence = :seq WHERE production_date = :date")
.param("seq", nextSequence)
.param("date", date)
.update();
} else {
nextSequence = 1;
sequenceRepository.save(new BatchNumberSequenceEntity(date, 1));
jdbc.sql("INSERT INTO batch_number_sequences (production_date, last_sequence) VALUES (:date, :seq)")
.param("date", date)
.param("seq", 1)
.update();
}
if (nextSequence > 999) {
return Result.failure(new BatchError.ValidationFailure(
"Maximum batch number sequence (999) reached for date " + date));
}
return Result.success(BatchNumber.generate(date, nextSequence));
} catch (Exception e) {
return Result.failure(new BatchError.RepositoryFailure(e.getMessage()));

View file

@ -0,0 +1,370 @@
package de.effigenix.infrastructure.production.persistence;
import de.effigenix.domain.masterdata.ArticleId;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Quantity;
import de.effigenix.shared.common.RepositoryError;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Profile;
import org.springframework.jdbc.core.simple.JdbcClient;
import org.springframework.stereotype.Repository;
import java.math.BigDecimal;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.util.List;
import java.util.Optional;
@Repository
@Profile("!no-db")
public class JdbcBatchRepository implements BatchRepository {
private static final Logger logger = LoggerFactory.getLogger(JdbcBatchRepository.class);
private final JdbcClient jdbc;
public JdbcBatchRepository(JdbcClient jdbc) {
this.jdbc = jdbc;
}
@Override
public Result<RepositoryError, Optional<Batch>> findById(BatchId id) {
try {
var batchOpt = jdbc.sql("SELECT * FROM batches WHERE id = :id")
.param("id", id.value())
.query(this::mapBatchRow)
.optional();
if (batchOpt.isEmpty()) {
return Result.success(Optional.empty());
}
var consumptions = loadConsumptions(id.value());
return Result.success(Optional.of(reconstituteWithConsumptions(batchOpt.get(), consumptions)));
} catch (Exception e) {
logger.trace("Database error in findById", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Batch>> findAll() {
try {
var batches = jdbc.sql("SELECT * FROM batches ORDER BY created_at DESC")
.query(this::mapBatchRow)
.list()
.stream()
.map(this::loadWithConsumptions)
.toList();
return Result.success(batches);
} catch (Exception e) {
logger.trace("Database error in findAll", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, Optional<Batch>> findByBatchNumber(BatchNumber batchNumber) {
try {
var batchOpt = jdbc.sql("SELECT * FROM batches WHERE batch_number = :batchNumber")
.param("batchNumber", batchNumber.value())
.query(this::mapBatchRow)
.optional();
if (batchOpt.isEmpty()) {
return Result.success(Optional.empty());
}
var consumptions = loadConsumptions(batchOpt.get().id().value());
return Result.success(Optional.of(reconstituteWithConsumptions(batchOpt.get(), consumptions)));
} catch (Exception e) {
logger.trace("Database error in findByBatchNumber", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Batch>> findByStatus(BatchStatus status) {
try {
var batches = jdbc.sql("SELECT * FROM batches WHERE status = :status ORDER BY created_at DESC")
.param("status", status.name())
.query(this::mapBatchRow)
.list()
.stream()
.map(this::loadWithConsumptions)
.toList();
return Result.success(batches);
} catch (Exception e) {
logger.trace("Database error in findByStatus", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Batch>> findByProductionDate(LocalDate date) {
try {
var batches = jdbc.sql("SELECT * FROM batches WHERE production_date = :date ORDER BY created_at DESC")
.param("date", date)
.query(this::mapBatchRow)
.list()
.stream()
.map(this::loadWithConsumptions)
.toList();
return Result.success(batches);
} catch (Exception e) {
logger.trace("Database error in findByProductionDate", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Batch>> findByRecipeIds(List<RecipeId> recipeIds) {
try {
List<String> ids = recipeIds.stream().map(RecipeId::value).toList();
var batches = jdbc.sql("SELECT * FROM batches WHERE recipe_id IN (:ids) ORDER BY created_at DESC")
.param("ids", ids)
.query(this::mapBatchRow)
.list()
.stream()
.map(this::loadWithConsumptions)
.toList();
return Result.success(batches);
} catch (Exception e) {
logger.trace("Database error in findByRecipeIds", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Batch>> findAllSummary() {
try {
var batches = jdbc.sql("SELECT * FROM batches ORDER BY created_at DESC")
.query(this::mapBatchRow)
.list();
return Result.success(batches);
} catch (Exception e) {
logger.trace("Database error in findAllSummary", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Batch>> findByStatusSummary(BatchStatus status) {
try {
var batches = jdbc.sql("SELECT * FROM batches WHERE status = :status ORDER BY created_at DESC")
.param("status", status.name())
.query(this::mapBatchRow)
.list();
return Result.success(batches);
} catch (Exception e) {
logger.trace("Database error in findByStatusSummary", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Batch>> findByProductionDateSummary(LocalDate date) {
try {
var batches = jdbc.sql("SELECT * FROM batches WHERE production_date = :date ORDER BY created_at DESC")
.param("date", date)
.query(this::mapBatchRow)
.list();
return Result.success(batches);
} catch (Exception e) {
logger.trace("Database error in findByProductionDateSummary", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Batch>> findByRecipeIdsSummary(List<RecipeId> recipeIds) {
try {
List<String> ids = recipeIds.stream().map(RecipeId::value).toList();
var batches = jdbc.sql("SELECT * FROM batches WHERE recipe_id IN (:ids) ORDER BY created_at DESC")
.param("ids", ids)
.query(this::mapBatchRow)
.list();
return Result.success(batches);
} catch (Exception e) {
logger.trace("Database error in findByRecipeIdsSummary", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, Void> save(Batch batch) {
try {
int rows = jdbc.sql("""
UPDATE batches
SET status = :status, actual_quantity_amount = :actualQuantityAmount,
actual_quantity_unit = :actualQuantityUnit, waste_amount = :wasteAmount,
waste_unit = :wasteUnit, remarks = :remarks, updated_at = :updatedAt,
completed_at = :completedAt, cancellation_reason = :cancellationReason,
cancelled_at = :cancelledAt, version = version + 1
WHERE id = :id AND version = :version
""")
.param("id", batch.id().value())
.param("status", batch.status().name())
.param("actualQuantityAmount", batch.actualQuantity() != null ? batch.actualQuantity().amount() : null)
.param("actualQuantityUnit", batch.actualQuantity() != null ? batch.actualQuantity().uom().name() : null)
.param("wasteAmount", batch.waste() != null ? batch.waste().amount() : null)
.param("wasteUnit", batch.waste() != null ? batch.waste().uom().name() : null)
.param("remarks", batch.remarks())
.param("updatedAt", batch.updatedAt())
.param("completedAt", batch.completedAt())
.param("cancellationReason", batch.cancellationReason())
.param("cancelledAt", batch.cancelledAt())
.param("version", batch.version())
.update();
if (rows == 0) {
boolean exists = jdbc.sql("SELECT COUNT(*) FROM batches WHERE id = :id")
.param("id", batch.id().value())
.query(Integer.class)
.single() > 0;
if (exists) {
return Result.failure(new RepositoryError.ConcurrentModification(
"Batch was modified by another transaction"));
}
jdbc.sql("""
INSERT INTO batches
(id, batch_number, recipe_id, status, planned_quantity_amount, planned_quantity_unit,
production_date, best_before_date, actual_quantity_amount, actual_quantity_unit,
waste_amount, waste_unit, remarks, created_at, updated_at,
completed_at, cancellation_reason, cancelled_at, version)
VALUES (:id, :batchNumber, :recipeId, :status, :plannedQuantityAmount, :plannedQuantityUnit,
:productionDate, :bestBeforeDate, :actualQuantityAmount, :actualQuantityUnit,
:wasteAmount, :wasteUnit, :remarks, :createdAt, :updatedAt,
:completedAt, :cancellationReason, :cancelledAt, 0)
""")
.param("id", batch.id().value())
.param("batchNumber", batch.batchNumber().value())
.param("recipeId", batch.recipeId().value())
.param("status", batch.status().name())
.param("plannedQuantityAmount", batch.plannedQuantity().amount())
.param("plannedQuantityUnit", batch.plannedQuantity().uom().name())
.param("productionDate", batch.productionDate())
.param("bestBeforeDate", batch.bestBeforeDate())
.param("actualQuantityAmount", batch.actualQuantity() != null ? batch.actualQuantity().amount() : null)
.param("actualQuantityUnit", batch.actualQuantity() != null ? batch.actualQuantity().uom().name() : null)
.param("wasteAmount", batch.waste() != null ? batch.waste().amount() : null)
.param("wasteUnit", batch.waste() != null ? batch.waste().uom().name() : null)
.param("remarks", batch.remarks())
.param("createdAt", batch.createdAt())
.param("updatedAt", batch.updatedAt())
.param("completedAt", batch.completedAt())
.param("cancellationReason", batch.cancellationReason())
.param("cancelledAt", batch.cancelledAt())
.update();
}
saveConsumptions(batch);
return Result.success(null);
} catch (Exception e) {
logger.trace("Database error in save", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
private void saveConsumptions(Batch batch) {
var existingIds = jdbc.sql("SELECT id FROM batch_consumptions WHERE batch_id = :batchId")
.param("batchId", batch.id().value())
.query((rs, rowNum) -> rs.getString("id"))
.list();
var existingIdSet = new java.util.HashSet<>(existingIds);
for (Consumption c : batch.consumptions()) {
if (!existingIdSet.contains(c.id().value())) {
jdbc.sql("""
INSERT INTO batch_consumptions
(id, batch_id, input_batch_id, article_id, quantity_used_amount, quantity_used_unit, consumed_at)
VALUES (:id, :batchId, :inputBatchId, :articleId, :quantityUsedAmount, :quantityUsedUnit, :consumedAt)
""")
.param("id", c.id().value())
.param("batchId", batch.id().value())
.param("inputBatchId", c.inputBatchId().value())
.param("articleId", c.articleId().value())
.param("quantityUsedAmount", c.quantityUsed().amount())
.param("quantityUsedUnit", c.quantityUsed().uom().name())
.param("consumedAt", c.consumedAt())
.update();
}
}
}
private List<Consumption> loadConsumptions(String batchId) {
return jdbc.sql("SELECT * FROM batch_consumptions WHERE batch_id = :batchId ORDER BY consumed_at")
.param("batchId", batchId)
.query(this::mapConsumptionRow)
.list();
}
private Batch loadWithConsumptions(Batch summary) {
var consumptions = loadConsumptions(summary.id().value());
return reconstituteWithConsumptions(summary, consumptions);
}
private Batch reconstituteWithConsumptions(Batch batch, List<Consumption> consumptions) {
return Batch.reconstitute(
batch.id(), batch.batchNumber(), batch.recipeId(), batch.status(),
batch.plannedQuantity(), batch.actualQuantity(), batch.waste(),
batch.remarks(), batch.productionDate(), batch.bestBeforeDate(),
batch.createdAt(), batch.updatedAt(), batch.completedAt(),
batch.cancellationReason(), batch.cancelledAt(), batch.version(),
consumptions
);
}
private Batch mapBatchRow(ResultSet rs, int rowNum) throws SQLException {
BigDecimal actualQtyAmount = rs.getBigDecimal("actual_quantity_amount");
Quantity actualQuantity = actualQtyAmount != null
? Quantity.reconstitute(actualQtyAmount, UnitOfMeasure.valueOf(rs.getString("actual_quantity_unit")))
: null;
BigDecimal wasteAmount = rs.getBigDecimal("waste_amount");
Quantity waste = wasteAmount != null
? Quantity.reconstitute(wasteAmount, UnitOfMeasure.valueOf(rs.getString("waste_unit")))
: null;
return Batch.reconstitute(
BatchId.of(rs.getString("id")),
new BatchNumber(rs.getString("batch_number")),
RecipeId.of(rs.getString("recipe_id")),
BatchStatus.valueOf(rs.getString("status")),
Quantity.reconstitute(
rs.getBigDecimal("planned_quantity_amount"),
UnitOfMeasure.valueOf(rs.getString("planned_quantity_unit"))
),
actualQuantity,
waste,
rs.getString("remarks"),
rs.getObject("production_date", LocalDate.class),
rs.getObject("best_before_date", LocalDate.class),
rs.getObject("created_at", OffsetDateTime.class),
rs.getObject("updated_at", OffsetDateTime.class),
rs.getObject("completed_at", OffsetDateTime.class),
rs.getString("cancellation_reason"),
rs.getObject("cancelled_at", OffsetDateTime.class),
rs.getLong("version"),
List.of()
);
}
private Consumption mapConsumptionRow(ResultSet rs, int rowNum) throws SQLException {
return Consumption.reconstitute(
ConsumptionId.of(rs.getString("id")),
BatchId.of(rs.getString("input_batch_id")),
ArticleId.of(rs.getString("article_id")),
Quantity.reconstitute(
rs.getBigDecimal("quantity_used_amount"),
UnitOfMeasure.valueOf(rs.getString("quantity_used_unit"))
),
rs.getObject("consumed_at", OffsetDateTime.class)
);
}
}

View file

@ -0,0 +1,134 @@
package de.effigenix.infrastructure.production.persistence;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Quantity;
import de.effigenix.shared.common.RepositoryError;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Profile;
import org.springframework.jdbc.core.simple.JdbcClient;
import org.springframework.stereotype.Repository;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.time.OffsetDateTime;
import java.util.List;
import java.util.Optional;
@Repository
@Profile("!no-db")
public class JdbcProductionOrderRepository implements ProductionOrderRepository {
private static final Logger logger = LoggerFactory.getLogger(JdbcProductionOrderRepository.class);
private final JdbcClient jdbc;
public JdbcProductionOrderRepository(JdbcClient jdbc) {
this.jdbc = jdbc;
}
@Override
public Result<RepositoryError, Optional<ProductionOrder>> findById(ProductionOrderId id) {
try {
var order = jdbc.sql("SELECT * FROM production_orders WHERE id = :id")
.param("id", id.value())
.query(this::mapRow)
.optional();
return Result.success(order);
} catch (Exception e) {
logger.trace("Database error in findById", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<ProductionOrder>> findAll() {
try {
var orders = jdbc.sql("SELECT * FROM production_orders ORDER BY created_at DESC")
.query(this::mapRow)
.list();
return Result.success(orders);
} catch (Exception e) {
logger.trace("Database error in findAll", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, Void> save(ProductionOrder order) {
try {
int rows = jdbc.sql("""
UPDATE production_orders
SET status = :status, batch_id = :batchId, priority = :priority,
notes = :notes, updated_at = :updatedAt, version = version + 1
WHERE id = :id AND version = :version
""")
.param("id", order.id().value())
.param("status", order.status().name())
.param("batchId", order.batchId() != null ? order.batchId().value() : null)
.param("priority", order.priority().name())
.param("notes", order.notes())
.param("updatedAt", order.updatedAt())
.param("version", order.version())
.update();
if (rows == 0) {
boolean exists = jdbc.sql("SELECT COUNT(*) FROM production_orders WHERE id = :id")
.param("id", order.id().value())
.query(Integer.class)
.single() > 0;
if (exists) {
return Result.failure(new RepositoryError.ConcurrentModification(
"Production order was modified by another transaction"));
}
jdbc.sql("""
INSERT INTO production_orders
(id, recipe_id, status, planned_quantity_amount, planned_quantity_unit,
planned_date, priority, batch_id, notes, created_at, updated_at, version)
VALUES (:id, :recipeId, :status, :plannedQuantityAmount, :plannedQuantityUnit,
:plannedDate, :priority, :batchId, :notes, :createdAt, :updatedAt, 0)
""")
.param("id", order.id().value())
.param("recipeId", order.recipeId().value())
.param("status", order.status().name())
.param("plannedQuantityAmount", order.plannedQuantity().amount())
.param("plannedQuantityUnit", order.plannedQuantity().uom().name())
.param("plannedDate", order.plannedDate())
.param("priority", order.priority().name())
.param("batchId", order.batchId() != null ? order.batchId().value() : null)
.param("notes", order.notes())
.param("createdAt", order.createdAt())
.param("updatedAt", order.updatedAt())
.update();
}
return Result.success(null);
} catch (Exception e) {
logger.trace("Database error in save", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
private ProductionOrder mapRow(ResultSet rs, int rowNum) throws SQLException {
return ProductionOrder.reconstitute(
ProductionOrderId.of(rs.getString("id")),
RecipeId.of(rs.getString("recipe_id")),
ProductionOrderStatus.valueOf(rs.getString("status")),
rs.getString("batch_id") != null ? BatchId.of(rs.getString("batch_id")) : null,
Quantity.reconstitute(
rs.getBigDecimal("planned_quantity_amount"),
UnitOfMeasure.valueOf(rs.getString("planned_quantity_unit"))
),
rs.getObject("planned_date", java.time.LocalDate.class),
Priority.valueOf(rs.getString("priority")),
rs.getString("notes"),
rs.getObject("created_at", OffsetDateTime.class),
rs.getObject("updated_at", OffsetDateTime.class),
rs.getLong("version")
);
}
}

View file

@ -0,0 +1,297 @@
package de.effigenix.infrastructure.production.persistence;
import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Quantity;
import de.effigenix.shared.common.RepositoryError;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Profile;
import org.springframework.jdbc.core.simple.JdbcClient;
import org.springframework.stereotype.Repository;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.time.OffsetDateTime;
import java.util.List;
import java.util.Optional;
@Repository
@Profile("!no-db")
public class JdbcRecipeRepository implements RecipeRepository {
private static final Logger logger = LoggerFactory.getLogger(JdbcRecipeRepository.class);
private final JdbcClient jdbc;
public JdbcRecipeRepository(JdbcClient jdbc) {
this.jdbc = jdbc;
}
@Override
public Result<RepositoryError, Optional<Recipe>> findById(RecipeId id) {
try {
var recipeOpt = jdbc.sql("SELECT * FROM recipes WHERE id = :id")
.param("id", id.value())
.query(this::mapRecipeRow)
.optional();
if (recipeOpt.isEmpty()) {
return Result.success(Optional.empty());
}
var recipe = loadChildren(recipeOpt.get());
return Result.success(Optional.of(recipe));
} catch (Exception e) {
logger.trace("Database error in findById", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Recipe>> findAll() {
try {
var recipes = jdbc.sql("SELECT * FROM recipes ORDER BY name, version")
.query(this::mapRecipeRow)
.list()
.stream()
.map(this::loadChildren)
.toList();
return Result.success(recipes);
} catch (Exception e) {
logger.trace("Database error in findAll", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, Void> save(Recipe recipe) {
try {
int rows = jdbc.sql("""
UPDATE recipes
SET name = :name, version = :version, type = :type, description = :description,
yield_percentage = :yieldPercentage, shelf_life_days = :shelfLifeDays,
output_quantity = :outputQuantity, output_uom = :outputUom,
article_id = :articleId, status = :status, updated_at = :updatedAt
WHERE id = :id
""")
.param("id", recipe.id().value())
.param("name", recipe.name().value())
.param("version", recipe.version())
.param("type", recipe.type().name())
.param("description", recipe.description())
.param("yieldPercentage", recipe.yieldPercentage().value())
.param("shelfLifeDays", recipe.shelfLifeDays())
.param("outputQuantity", recipe.outputQuantity().amount())
.param("outputUom", recipe.outputQuantity().uom().name())
.param("articleId", recipe.articleId())
.param("status", recipe.status().name())
.param("updatedAt", recipe.updatedAt())
.update();
if (rows == 0) {
jdbc.sql("""
INSERT INTO recipes
(id, name, version, type, description, yield_percentage, shelf_life_days,
output_quantity, output_uom, article_id, status, created_at, updated_at)
VALUES (:id, :name, :version, :type, :description, :yieldPercentage, :shelfLifeDays,
:outputQuantity, :outputUom, :articleId, :status, :createdAt, :updatedAt)
""")
.param("id", recipe.id().value())
.param("name", recipe.name().value())
.param("version", recipe.version())
.param("type", recipe.type().name())
.param("description", recipe.description())
.param("yieldPercentage", recipe.yieldPercentage().value())
.param("shelfLifeDays", recipe.shelfLifeDays())
.param("outputQuantity", recipe.outputQuantity().amount())
.param("outputUom", recipe.outputQuantity().uom().name())
.param("articleId", recipe.articleId())
.param("status", recipe.status().name())
.param("createdAt", recipe.createdAt())
.param("updatedAt", recipe.updatedAt())
.update();
}
saveIngredients(recipe);
saveProductionSteps(recipe);
return Result.success(null);
} catch (Exception e) {
logger.trace("Database error in save", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, Void> delete(Recipe recipe) {
try {
jdbc.sql("DELETE FROM recipes WHERE id = :id")
.param("id", recipe.id().value())
.update();
return Result.success(null);
} catch (Exception e) {
logger.trace("Database error in delete", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, Boolean> existsByNameAndVersion(String name, int version) {
try {
int count = jdbc.sql("SELECT COUNT(*) FROM recipes WHERE name = :name AND version = :version")
.param("name", name)
.param("version", version)
.query(Integer.class)
.single();
return Result.success(count > 0);
} catch (Exception e) {
logger.trace("Database error in existsByNameAndVersion", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Recipe>> findByStatus(RecipeStatus status) {
try {
var recipes = jdbc.sql("SELECT * FROM recipes WHERE status = :status ORDER BY name, version")
.param("status", status.name())
.query(this::mapRecipeRow)
.list()
.stream()
.map(this::loadChildren)
.toList();
return Result.success(recipes);
} catch (Exception e) {
logger.trace("Database error in findByStatus", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Recipe>> findByArticleId(String articleId) {
try {
var recipes = jdbc.sql("SELECT * FROM recipes WHERE article_id = :articleId ORDER BY name, version")
.param("articleId", articleId)
.query(this::mapRecipeRow)
.list()
.stream()
.map(this::loadChildren)
.toList();
return Result.success(recipes);
} catch (Exception e) {
logger.trace("Database error in findByArticleId", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
private void saveIngredients(Recipe recipe) {
jdbc.sql("DELETE FROM recipe_ingredients WHERE recipe_id = :recipeId")
.param("recipeId", recipe.id().value())
.update();
for (Ingredient ingredient : recipe.ingredients()) {
jdbc.sql("""
INSERT INTO recipe_ingredients
(id, recipe_id, position, article_id, quantity, uom, sub_recipe_id, substitutable)
VALUES (:id, :recipeId, :position, :articleId, :quantity, :uom, :subRecipeId, :substitutable)
""")
.param("id", ingredient.id().value())
.param("recipeId", recipe.id().value())
.param("position", ingredient.position())
.param("articleId", ingredient.articleId())
.param("quantity", ingredient.quantity().amount())
.param("uom", ingredient.quantity().uom().name())
.param("subRecipeId", ingredient.subRecipeId())
.param("substitutable", ingredient.substitutable())
.update();
}
}
private void saveProductionSteps(Recipe recipe) {
jdbc.sql("DELETE FROM recipe_production_steps WHERE recipe_id = :recipeId")
.param("recipeId", recipe.id().value())
.update();
for (ProductionStep step : recipe.productionSteps()) {
jdbc.sql("""
INSERT INTO recipe_production_steps
(id, recipe_id, step_number, description, duration_minutes, temperature_celsius)
VALUES (:id, :recipeId, :stepNumber, :description, :durationMinutes, :temperatureCelsius)
""")
.param("id", step.id().value())
.param("recipeId", recipe.id().value())
.param("stepNumber", step.stepNumber())
.param("description", step.description())
.param("durationMinutes", step.durationMinutes())
.param("temperatureCelsius", step.temperatureCelsius())
.update();
}
}
private Recipe loadChildren(Recipe recipe) {
var ingredients = jdbc.sql(
"SELECT * FROM recipe_ingredients WHERE recipe_id = :recipeId ORDER BY position ASC")
.param("recipeId", recipe.id().value())
.query(this::mapIngredientRow)
.list();
var steps = jdbc.sql(
"SELECT * FROM recipe_production_steps WHERE recipe_id = :recipeId ORDER BY step_number ASC")
.param("recipeId", recipe.id().value())
.query(this::mapStepRow)
.list();
return Recipe.reconstitute(
recipe.id(), recipe.name(), recipe.version(), recipe.type(),
recipe.description(), recipe.yieldPercentage(), recipe.shelfLifeDays(),
recipe.outputQuantity(), recipe.articleId(), recipe.status(),
ingredients, steps, recipe.createdAt(), recipe.updatedAt()
);
}
private Recipe mapRecipeRow(ResultSet rs, int rowNum) throws SQLException {
return Recipe.reconstitute(
RecipeId.of(rs.getString("id")),
new RecipeName(rs.getString("name")),
rs.getInt("version"),
RecipeType.valueOf(rs.getString("type")),
rs.getString("description"),
new YieldPercentage(rs.getInt("yield_percentage")),
rs.getObject("shelf_life_days", Integer.class),
Quantity.reconstitute(
rs.getBigDecimal("output_quantity"),
UnitOfMeasure.valueOf(rs.getString("output_uom"))
),
rs.getString("article_id"),
RecipeStatus.valueOf(rs.getString("status")),
List.of(),
List.of(),
rs.getObject("created_at", OffsetDateTime.class),
rs.getObject("updated_at", OffsetDateTime.class)
);
}
private Ingredient mapIngredientRow(ResultSet rs, int rowNum) throws SQLException {
return Ingredient.reconstitute(
IngredientId.of(rs.getString("id")),
rs.getInt("position"),
rs.getString("article_id"),
Quantity.reconstitute(
rs.getBigDecimal("quantity"),
UnitOfMeasure.valueOf(rs.getString("uom"))
),
rs.getString("sub_recipe_id"),
rs.getBoolean("substitutable")
);
}
private ProductionStep mapStepRow(ResultSet rs, int rowNum) throws SQLException {
return ProductionStep.reconstitute(
ProductionStepId.of(rs.getString("id")),
rs.getInt("step_number"),
rs.getString("description"),
rs.getObject("duration_minutes", Integer.class),
rs.getObject("temperature_celsius", Integer.class)
);
}
}

View file

@ -1,135 +0,0 @@
package de.effigenix.infrastructure.production.persistence.entity;
import jakarta.persistence.*;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.List;
@Entity
@Table(name = "batches")
public class BatchEntity {
@Id
@Column(name = "id", nullable = false, length = 36)
private String id;
@Version
@Column(name = "version", nullable = false)
private Long version;
@Column(name = "batch_number", nullable = false, unique = true, length = 20)
private String batchNumber;
@Column(name = "recipe_id", nullable = false, length = 36)
private String recipeId;
@Column(name = "status", nullable = false, length = 20)
private String status;
@Column(name = "planned_quantity_amount", nullable = false, precision = 19, scale = 6)
private BigDecimal plannedQuantityAmount;
@Column(name = "planned_quantity_unit", nullable = false, length = 10)
private String plannedQuantityUnit;
@Column(name = "production_date", nullable = false)
private LocalDate productionDate;
@Column(name = "best_before_date", nullable = false)
private LocalDate bestBeforeDate;
@Column(name = "actual_quantity_amount", precision = 19, scale = 6)
private BigDecimal actualQuantityAmount;
@Column(name = "actual_quantity_unit", length = 10)
private String actualQuantityUnit;
@Column(name = "waste_amount", precision = 19, scale = 6)
private BigDecimal wasteAmount;
@Column(name = "waste_unit", length = 10)
private String wasteUnit;
@Column(name = "remarks", length = 500)
private String remarks;
@Column(name = "created_at", nullable = false, updatable = false)
private OffsetDateTime createdAt;
@Column(name = "updated_at", nullable = false)
private OffsetDateTime updatedAt;
@Column(name = "completed_at")
private OffsetDateTime completedAt;
@Column(name = "cancellation_reason", length = 500)
private String cancellationReason;
@Column(name = "cancelled_at")
private OffsetDateTime cancelledAt;
@OneToMany(mappedBy = "batch", cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.LAZY)
private List<ConsumptionEntity> consumptions = new ArrayList<>();
protected BatchEntity() {}
public BatchEntity(
String id,
String batchNumber,
String recipeId,
String status,
BigDecimal plannedQuantityAmount,
String plannedQuantityUnit,
LocalDate productionDate,
LocalDate bestBeforeDate,
OffsetDateTime createdAt,
OffsetDateTime updatedAt
) {
this.id = id;
this.batchNumber = batchNumber;
this.recipeId = recipeId;
this.status = status;
this.plannedQuantityAmount = plannedQuantityAmount;
this.plannedQuantityUnit = plannedQuantityUnit;
this.productionDate = productionDate;
this.bestBeforeDate = bestBeforeDate;
this.createdAt = createdAt;
this.updatedAt = updatedAt;
}
public String getId() { return id; }
public Long getVersion() { return version; }
public String getBatchNumber() { return batchNumber; }
public String getRecipeId() { return recipeId; }
public String getStatus() { return status; }
public BigDecimal getPlannedQuantityAmount() { return plannedQuantityAmount; }
public String getPlannedQuantityUnit() { return plannedQuantityUnit; }
public LocalDate getProductionDate() { return productionDate; }
public LocalDate getBestBeforeDate() { return bestBeforeDate; }
public OffsetDateTime getCreatedAt() { return createdAt; }
public OffsetDateTime getUpdatedAt() { return updatedAt; }
public List<ConsumptionEntity> getConsumptions() { return consumptions; }
public BigDecimal getActualQuantityAmount() { return actualQuantityAmount; }
public String getActualQuantityUnit() { return actualQuantityUnit; }
public BigDecimal getWasteAmount() { return wasteAmount; }
public String getWasteUnit() { return wasteUnit; }
public String getRemarks() { return remarks; }
public OffsetDateTime getCompletedAt() { return completedAt; }
public String getCancellationReason() { return cancellationReason; }
public OffsetDateTime getCancelledAt() { return cancelledAt; }
public void setStatus(String status) { this.status = status; }
public void setUpdatedAt(OffsetDateTime updatedAt) { this.updatedAt = updatedAt; }
public void setActualQuantityAmount(BigDecimal actualQuantityAmount) { this.actualQuantityAmount = actualQuantityAmount; }
public void setActualQuantityUnit(String actualQuantityUnit) { this.actualQuantityUnit = actualQuantityUnit; }
public void setWasteAmount(BigDecimal wasteAmount) { this.wasteAmount = wasteAmount; }
public void setWasteUnit(String wasteUnit) { this.wasteUnit = wasteUnit; }
public void setRemarks(String remarks) { this.remarks = remarks; }
public void setCompletedAt(OffsetDateTime completedAt) { this.completedAt = completedAt; }
public void setCancellationReason(String cancellationReason) { this.cancellationReason = cancellationReason; }
public void setCancelledAt(OffsetDateTime cancelledAt) { this.cancelledAt = cancelledAt; }
}

View file

@ -1,37 +0,0 @@
package de.effigenix.infrastructure.production.persistence.entity;
import jakarta.persistence.*;
import java.time.LocalDate;
@Entity
@Table(name = "batch_number_sequences")
public class BatchNumberSequenceEntity {
@Id
@Column(name = "production_date", nullable = false)
private LocalDate productionDate;
@Column(name = "last_sequence", nullable = false)
private int lastSequence;
protected BatchNumberSequenceEntity() {
}
public BatchNumberSequenceEntity(LocalDate productionDate, int lastSequence) {
this.productionDate = productionDate;
this.lastSequence = lastSequence;
}
public LocalDate getProductionDate() {
return productionDate;
}
public int getLastSequence() {
return lastSequence;
}
public void setLastSequence(int lastSequence) {
this.lastSequence = lastSequence;
}
}

View file

@ -1,55 +0,0 @@
package de.effigenix.infrastructure.production.persistence.entity;
import jakarta.persistence.*;
import java.math.BigDecimal;
import java.time.OffsetDateTime;
@Entity
@Table(name = "batch_consumptions")
public class ConsumptionEntity {
@Id
@Column(name = "id", nullable = false, length = 36)
private String id;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "batch_id", nullable = false)
private BatchEntity batch;
@Column(name = "input_batch_id", nullable = false, length = 36)
private String inputBatchId;
@Column(name = "article_id", nullable = false, length = 36)
private String articleId;
@Column(name = "quantity_used_amount", nullable = false, precision = 19, scale = 6)
private BigDecimal quantityUsedAmount;
@Column(name = "quantity_used_unit", nullable = false, length = 10)
private String quantityUsedUnit;
@Column(name = "consumed_at", nullable = false)
private OffsetDateTime consumedAt;
protected ConsumptionEntity() {}
public ConsumptionEntity(String id, BatchEntity batch, String inputBatchId, String articleId,
BigDecimal quantityUsedAmount, String quantityUsedUnit, OffsetDateTime consumedAt) {
this.id = id;
this.batch = batch;
this.inputBatchId = inputBatchId;
this.articleId = articleId;
this.quantityUsedAmount = quantityUsedAmount;
this.quantityUsedUnit = quantityUsedUnit;
this.consumedAt = consumedAt;
}
public String getId() { return id; }
public BatchEntity getBatch() { return batch; }
public String getInputBatchId() { return inputBatchId; }
public String getArticleId() { return articleId; }
public BigDecimal getQuantityUsedAmount() { return quantityUsedAmount; }
public String getQuantityUsedUnit() { return quantityUsedUnit; }
public OffsetDateTime getConsumedAt() { return consumedAt; }
}

View file

@ -1,68 +0,0 @@
package de.effigenix.infrastructure.production.persistence.entity;
import jakarta.persistence.*;
import java.math.BigDecimal;
@Entity
@Table(name = "recipe_ingredients")
public class IngredientEntity {
@Id
@Column(name = "id", nullable = false, length = 36)
private String id;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "recipe_id", nullable = false)
private RecipeEntity recipe;
@Column(name = "position", nullable = false)
private int position;
@Column(name = "article_id", nullable = false, length = 36)
private String articleId;
@Column(name = "quantity", nullable = false, precision = 19, scale = 6)
private BigDecimal quantity;
@Column(name = "uom", nullable = false, length = 20)
private String uom;
@Column(name = "sub_recipe_id", length = 36)
private String subRecipeId;
@Column(name = "substitutable", nullable = false)
private boolean substitutable;
protected IngredientEntity() {}
public IngredientEntity(String id, RecipeEntity recipe, int position, String articleId,
BigDecimal quantity, String uom, String subRecipeId, boolean substitutable) {
this.id = id;
this.recipe = recipe;
this.position = position;
this.articleId = articleId;
this.quantity = quantity;
this.uom = uom;
this.subRecipeId = subRecipeId;
this.substitutable = substitutable;
}
public String getId() { return id; }
public RecipeEntity getRecipe() { return recipe; }
public int getPosition() { return position; }
public String getArticleId() { return articleId; }
public BigDecimal getQuantity() { return quantity; }
public String getUom() { return uom; }
public String getSubRecipeId() { return subRecipeId; }
public boolean isSubstitutable() { return substitutable; }
public void setId(String id) { this.id = id; }
public void setRecipe(RecipeEntity recipe) { this.recipe = recipe; }
public void setPosition(int position) { this.position = position; }
public void setArticleId(String articleId) { this.articleId = articleId; }
public void setQuantity(BigDecimal quantity) { this.quantity = quantity; }
public void setUom(String uom) { this.uom = uom; }
public void setSubRecipeId(String subRecipeId) { this.subRecipeId = subRecipeId; }
public void setSubstitutable(boolean substitutable) { this.substitutable = substitutable; }
}

View file

@ -1,95 +0,0 @@
package de.effigenix.infrastructure.production.persistence.entity;
import jakarta.persistence.*;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.OffsetDateTime;
@Entity
@Table(name = "production_orders")
public class ProductionOrderEntity {
@Id
@Column(name = "id", nullable = false, length = 36)
private String id;
@Version
@Column(name = "version", nullable = false)
private Long version;
@Column(name = "recipe_id", nullable = false, length = 36)
private String recipeId;
@Column(name = "status", nullable = false, length = 20)
private String status;
@Column(name = "planned_quantity_amount", nullable = false, precision = 19, scale = 6)
private BigDecimal plannedQuantityAmount;
@Column(name = "planned_quantity_unit", nullable = false, length = 10)
private String plannedQuantityUnit;
@Column(name = "planned_date", nullable = false)
private LocalDate plannedDate;
@Column(name = "priority", nullable = false, length = 10)
private String priority;
@Column(name = "batch_id", length = 36)
private String batchId;
@Column(name = "notes", length = 1000)
private String notes;
@Column(name = "created_at", nullable = false, updatable = false)
private OffsetDateTime createdAt;
@Column(name = "updated_at", nullable = false)
private OffsetDateTime updatedAt;
protected ProductionOrderEntity() {}
public ProductionOrderEntity(
String id,
String recipeId,
String status,
BigDecimal plannedQuantityAmount,
String plannedQuantityUnit,
LocalDate plannedDate,
String priority,
String notes,
OffsetDateTime createdAt,
OffsetDateTime updatedAt
) {
this.id = id;
this.recipeId = recipeId;
this.status = status;
this.plannedQuantityAmount = plannedQuantityAmount;
this.plannedQuantityUnit = plannedQuantityUnit;
this.plannedDate = plannedDate;
this.priority = priority;
this.notes = notes;
this.createdAt = createdAt;
this.updatedAt = updatedAt;
}
public String getId() { return id; }
public Long getVersion() { return version; }
public String getRecipeId() { return recipeId; }
public String getStatus() { return status; }
public BigDecimal getPlannedQuantityAmount() { return plannedQuantityAmount; }
public String getPlannedQuantityUnit() { return plannedQuantityUnit; }
public LocalDate getPlannedDate() { return plannedDate; }
public String getPriority() { return priority; }
public String getBatchId() { return batchId; }
public String getNotes() { return notes; }
public OffsetDateTime getCreatedAt() { return createdAt; }
public OffsetDateTime getUpdatedAt() { return updatedAt; }
public void setStatus(String status) { this.status = status; }
public void setBatchId(String batchId) { this.batchId = batchId; }
public void setUpdatedAt(OffsetDateTime updatedAt) { this.updatedAt = updatedAt; }
public void setNotes(String notes) { this.notes = notes; }
public void setPriority(String priority) { this.priority = priority; }
}

View file

@ -1,55 +0,0 @@
package de.effigenix.infrastructure.production.persistence.entity;
import jakarta.persistence.*;
@Entity
@Table(name = "recipe_production_steps",
uniqueConstraints = @UniqueConstraint(name = "uq_recipe_step_number", columnNames = {"recipe_id", "step_number"}))
public class ProductionStepEntity {
@Id
@Column(name = "id", nullable = false, length = 36)
private String id;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "recipe_id", nullable = false)
private RecipeEntity recipe;
@Column(name = "step_number", nullable = false)
private int stepNumber;
@Column(name = "description", nullable = false, length = 500)
private String description;
@Column(name = "duration_minutes")
private Integer durationMinutes;
@Column(name = "temperature_celsius")
private Integer temperatureCelsius;
protected ProductionStepEntity() {}
public ProductionStepEntity(String id, RecipeEntity recipe, int stepNumber, String description,
Integer durationMinutes, Integer temperatureCelsius) {
this.id = id;
this.recipe = recipe;
this.stepNumber = stepNumber;
this.description = description;
this.durationMinutes = durationMinutes;
this.temperatureCelsius = temperatureCelsius;
}
public String getId() { return id; }
public RecipeEntity getRecipe() { return recipe; }
public int getStepNumber() { return stepNumber; }
public String getDescription() { return description; }
public Integer getDurationMinutes() { return durationMinutes; }
public Integer getTemperatureCelsius() { return temperatureCelsius; }
public void setId(String id) { this.id = id; }
public void setRecipe(RecipeEntity recipe) { this.recipe = recipe; }
public void setStepNumber(int stepNumber) { this.stepNumber = stepNumber; }
public void setDescription(String description) { this.description = description; }
public void setDurationMinutes(Integer durationMinutes) { this.durationMinutes = durationMinutes; }
public void setTemperatureCelsius(Integer temperatureCelsius) { this.temperatureCelsius = temperatureCelsius; }
}

View file

@ -1,114 +0,0 @@
package de.effigenix.infrastructure.production.persistence.entity;
import jakarta.persistence.*;
import java.math.BigDecimal;
import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.List;
@Entity
@Table(name = "recipes",
uniqueConstraints = @UniqueConstraint(name = "uq_recipe_name_version", columnNames = {"name", "version"}))
public class RecipeEntity {
@Id
@Column(name = "id", nullable = false, length = 36)
private String id;
@Column(name = "name", nullable = false, length = 200)
private String name;
@Column(name = "version", nullable = false)
private int version;
@Column(name = "type", nullable = false, length = 30)
private String type;
@Column(name = "description", length = 2000)
private String description;
@Column(name = "yield_percentage", nullable = false)
private int yieldPercentage;
@Column(name = "shelf_life_days")
private Integer shelfLifeDays;
@Column(name = "output_quantity", nullable = false, precision = 19, scale = 6)
private BigDecimal outputQuantity;
@Column(name = "output_uom", nullable = false, length = 20)
private String outputUom;
@Column(name = "article_id", nullable = false, length = 36)
private String articleId;
@Column(name = "status", nullable = false, length = 20)
private String status;
@Column(name = "created_at", nullable = false, updatable = false)
private OffsetDateTime createdAt;
@Column(name = "updated_at", nullable = false)
private OffsetDateTime updatedAt;
@OneToMany(mappedBy = "recipe", cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER)
@OrderBy("position ASC")
private List<IngredientEntity> ingredients = new ArrayList<>();
@OneToMany(mappedBy = "recipe", cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER)
@OrderBy("stepNumber ASC")
private List<ProductionStepEntity> productionSteps = new ArrayList<>();
protected RecipeEntity() {}
public RecipeEntity(String id, String name, int version, String type, String description,
int yieldPercentage, Integer shelfLifeDays, BigDecimal outputQuantity,
String outputUom, String articleId, String status, OffsetDateTime createdAt, OffsetDateTime updatedAt) {
this.id = id;
this.name = name;
this.version = version;
this.type = type;
this.description = description;
this.yieldPercentage = yieldPercentage;
this.shelfLifeDays = shelfLifeDays;
this.outputQuantity = outputQuantity;
this.outputUom = outputUom;
this.articleId = articleId;
this.status = status;
this.createdAt = createdAt;
this.updatedAt = updatedAt;
}
public String getId() { return id; }
public String getName() { return name; }
public int getVersion() { return version; }
public String getType() { return type; }
public String getDescription() { return description; }
public int getYieldPercentage() { return yieldPercentage; }
public Integer getShelfLifeDays() { return shelfLifeDays; }
public BigDecimal getOutputQuantity() { return outputQuantity; }
public String getOutputUom() { return outputUom; }
public String getArticleId() { return articleId; }
public String getStatus() { return status; }
public OffsetDateTime getCreatedAt() { return createdAt; }
public OffsetDateTime getUpdatedAt() { return updatedAt; }
public List<IngredientEntity> getIngredients() { return ingredients; }
public List<ProductionStepEntity> getProductionSteps() { return productionSteps; }
public void setId(String id) { this.id = id; }
public void setName(String name) { this.name = name; }
public void setVersion(int version) { this.version = version; }
public void setType(String type) { this.type = type; }
public void setDescription(String description) { this.description = description; }
public void setYieldPercentage(int yieldPercentage) { this.yieldPercentage = yieldPercentage; }
public void setShelfLifeDays(Integer shelfLifeDays) { this.shelfLifeDays = shelfLifeDays; }
public void setOutputQuantity(BigDecimal outputQuantity) { this.outputQuantity = outputQuantity; }
public void setOutputUom(String outputUom) { this.outputUom = outputUom; }
public void setArticleId(String articleId) { this.articleId = articleId; }
public void setStatus(String status) { this.status = status; }
public void setCreatedAt(OffsetDateTime createdAt) { this.createdAt = createdAt; }
public void setUpdatedAt(OffsetDateTime updatedAt) { this.updatedAt = updatedAt; }
public void setIngredients(List<IngredientEntity> ingredients) { this.ingredients = ingredients; }
public void setProductionSteps(List<ProductionStepEntity> productionSteps) { this.productionSteps = productionSteps; }
}

View file

@ -1,174 +0,0 @@
package de.effigenix.infrastructure.production.persistence.mapper;
import de.effigenix.domain.masterdata.ArticleId;
import de.effigenix.domain.production.*;
import de.effigenix.infrastructure.production.persistence.entity.BatchEntity;
import de.effigenix.infrastructure.production.persistence.entity.ConsumptionEntity;
import de.effigenix.shared.common.Quantity;
import de.effigenix.shared.common.UnitOfMeasure;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
@Component
public class BatchMapper {
public BatchEntity toEntity(Batch batch) {
var entity = new BatchEntity(
batch.id().value(),
batch.batchNumber().value(),
batch.recipeId().value(),
batch.status().name(),
batch.plannedQuantity().amount(),
batch.plannedQuantity().uom().name(),
batch.productionDate(),
batch.bestBeforeDate(),
batch.createdAt(),
batch.updatedAt()
);
if (batch.actualQuantity() != null) {
entity.setActualQuantityAmount(batch.actualQuantity().amount());
entity.setActualQuantityUnit(batch.actualQuantity().uom().name());
}
if (batch.waste() != null) {
entity.setWasteAmount(batch.waste().amount());
entity.setWasteUnit(batch.waste().uom().name());
}
entity.setRemarks(batch.remarks());
entity.setCompletedAt(batch.completedAt());
entity.setCancellationReason(batch.cancellationReason());
entity.setCancelledAt(batch.cancelledAt());
for (Consumption c : batch.consumptions()) {
entity.getConsumptions().add(toConsumptionEntity(c, entity));
}
return entity;
}
public void updateEntity(BatchEntity entity, Batch batch) {
entity.setStatus(batch.status().name());
entity.setUpdatedAt(batch.updatedAt());
if (batch.actualQuantity() != null) {
entity.setActualQuantityAmount(batch.actualQuantity().amount());
entity.setActualQuantityUnit(batch.actualQuantity().uom().name());
} else {
entity.setActualQuantityAmount(null);
entity.setActualQuantityUnit(null);
}
if (batch.waste() != null) {
entity.setWasteAmount(batch.waste().amount());
entity.setWasteUnit(batch.waste().uom().name());
} else {
entity.setWasteAmount(null);
entity.setWasteUnit(null);
}
entity.setRemarks(batch.remarks());
entity.setCompletedAt(batch.completedAt());
entity.setCancellationReason(batch.cancellationReason());
entity.setCancelledAt(batch.cancelledAt());
Set<String> existingIds = entity.getConsumptions().stream()
.map(ConsumptionEntity::getId)
.collect(Collectors.toSet());
for (Consumption c : batch.consumptions()) {
if (!existingIds.contains(c.id().value())) {
entity.getConsumptions().add(toConsumptionEntity(c, entity));
}
}
}
public Batch toDomain(BatchEntity entity) {
List<Consumption> consumptions = entity.getConsumptions().stream()
.map(ce -> Consumption.reconstitute(
ConsumptionId.of(ce.getId()),
BatchId.of(ce.getInputBatchId()),
ArticleId.of(ce.getArticleId()),
Quantity.reconstitute(
ce.getQuantityUsedAmount(),
UnitOfMeasure.valueOf(ce.getQuantityUsedUnit())
),
ce.getConsumedAt()
))
.toList();
Quantity actualQuantity = entity.getActualQuantityAmount() != null
? Quantity.reconstitute(entity.getActualQuantityAmount(), UnitOfMeasure.valueOf(entity.getActualQuantityUnit()))
: null;
Quantity waste = entity.getWasteAmount() != null
? Quantity.reconstitute(entity.getWasteAmount(), UnitOfMeasure.valueOf(entity.getWasteUnit()))
: null;
return Batch.reconstitute(
BatchId.of(entity.getId()),
new BatchNumber(entity.getBatchNumber()),
RecipeId.of(entity.getRecipeId()),
BatchStatus.valueOf(entity.getStatus()),
Quantity.reconstitute(
entity.getPlannedQuantityAmount(),
UnitOfMeasure.valueOf(entity.getPlannedQuantityUnit())
),
actualQuantity,
waste,
entity.getRemarks(),
entity.getProductionDate(),
entity.getBestBeforeDate(),
entity.getCreatedAt(),
entity.getUpdatedAt(),
entity.getCompletedAt(),
entity.getCancellationReason(),
entity.getCancelledAt(),
entity.getVersion(),
consumptions
);
}
public Batch toDomainSummary(BatchEntity entity) {
Quantity actualQuantity = entity.getActualQuantityAmount() != null
? Quantity.reconstitute(entity.getActualQuantityAmount(), UnitOfMeasure.valueOf(entity.getActualQuantityUnit()))
: null;
Quantity waste = entity.getWasteAmount() != null
? Quantity.reconstitute(entity.getWasteAmount(), UnitOfMeasure.valueOf(entity.getWasteUnit()))
: null;
return Batch.reconstitute(
BatchId.of(entity.getId()),
new BatchNumber(entity.getBatchNumber()),
RecipeId.of(entity.getRecipeId()),
BatchStatus.valueOf(entity.getStatus()),
Quantity.reconstitute(
entity.getPlannedQuantityAmount(),
UnitOfMeasure.valueOf(entity.getPlannedQuantityUnit())
),
actualQuantity,
waste,
entity.getRemarks(),
entity.getProductionDate(),
entity.getBestBeforeDate(),
entity.getCreatedAt(),
entity.getUpdatedAt(),
entity.getCompletedAt(),
entity.getCancellationReason(),
entity.getCancelledAt(),
entity.getVersion(),
List.of()
);
}
private ConsumptionEntity toConsumptionEntity(Consumption c, BatchEntity parent) {
return new ConsumptionEntity(
c.id().value(),
parent,
c.inputBatchId().value(),
c.articleId().value(),
c.quantityUsed().amount(),
c.quantityUsed().uom().name(),
c.consumedAt()
);
}
}

View file

@ -1,53 +0,0 @@
package de.effigenix.infrastructure.production.persistence.mapper;
import de.effigenix.domain.production.*;
import de.effigenix.infrastructure.production.persistence.entity.ProductionOrderEntity;
import de.effigenix.shared.common.Quantity;
import de.effigenix.shared.common.UnitOfMeasure;
import org.springframework.stereotype.Component;
@Component
public class ProductionOrderMapper {
public ProductionOrderEntity toEntity(ProductionOrder order) {
return new ProductionOrderEntity(
order.id().value(),
order.recipeId().value(),
order.status().name(),
order.plannedQuantity().amount(),
order.plannedQuantity().uom().name(),
order.plannedDate(),
order.priority().name(),
order.notes(),
order.createdAt(),
order.updatedAt()
);
}
public void updateEntity(ProductionOrderEntity entity, ProductionOrder order) {
entity.setStatus(order.status().name());
entity.setBatchId(order.batchId() != null ? order.batchId().value() : null);
entity.setPriority(order.priority().name());
entity.setNotes(order.notes());
entity.setUpdatedAt(order.updatedAt());
}
public ProductionOrder toDomain(ProductionOrderEntity entity) {
return ProductionOrder.reconstitute(
ProductionOrderId.of(entity.getId()),
RecipeId.of(entity.getRecipeId()),
ProductionOrderStatus.valueOf(entity.getStatus()),
entity.getBatchId() != null ? BatchId.of(entity.getBatchId()) : null,
Quantity.reconstitute(
entity.getPlannedQuantityAmount(),
UnitOfMeasure.valueOf(entity.getPlannedQuantityUnit())
),
entity.getPlannedDate(),
Priority.valueOf(entity.getPriority()),
entity.getNotes(),
entity.getCreatedAt(),
entity.getUpdatedAt(),
entity.getVersion()
);
}
}

View file

@ -1,124 +0,0 @@
package de.effigenix.infrastructure.production.persistence.mapper;
import de.effigenix.domain.production.*;
import de.effigenix.infrastructure.production.persistence.entity.IngredientEntity;
import de.effigenix.infrastructure.production.persistence.entity.ProductionStepEntity;
import de.effigenix.infrastructure.production.persistence.entity.RecipeEntity;
import de.effigenix.shared.common.Quantity;
import de.effigenix.shared.common.UnitOfMeasure;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.stream.Collectors;
@Component
public class RecipeMapper {
public RecipeEntity toEntity(Recipe recipe) {
var entity = new RecipeEntity(
recipe.id().value(),
recipe.name().value(),
recipe.version(),
recipe.type().name(),
recipe.description(),
recipe.yieldPercentage().value(),
recipe.shelfLifeDays(),
recipe.outputQuantity().amount(),
recipe.outputQuantity().uom().name(),
recipe.articleId(),
recipe.status().name(),
recipe.createdAt(),
recipe.updatedAt()
);
List<IngredientEntity> ingredientEntities = recipe.ingredients().stream()
.map(i -> toIngredientEntity(i, entity))
.collect(Collectors.toList());
entity.setIngredients(ingredientEntities);
List<ProductionStepEntity> stepEntities = recipe.productionSteps().stream()
.map(s -> toProductionStepEntity(s, entity))
.collect(Collectors.toList());
entity.setProductionSteps(stepEntities);
return entity;
}
public Recipe toDomain(RecipeEntity entity) {
List<Ingredient> ingredients = entity.getIngredients().stream()
.map(this::toDomainIngredient)
.collect(Collectors.toList());
List<ProductionStep> productionSteps = entity.getProductionSteps().stream()
.map(this::toDomainProductionStep)
.collect(Collectors.toList());
return Recipe.reconstitute(
RecipeId.of(entity.getId()),
new RecipeName(entity.getName()),
entity.getVersion(),
RecipeType.valueOf(entity.getType()),
entity.getDescription(),
new YieldPercentage(entity.getYieldPercentage()),
entity.getShelfLifeDays(),
Quantity.reconstitute(
entity.getOutputQuantity(),
UnitOfMeasure.valueOf(entity.getOutputUom())
),
entity.getArticleId(),
RecipeStatus.valueOf(entity.getStatus()),
ingredients,
productionSteps,
entity.getCreatedAt(),
entity.getUpdatedAt()
);
}
private IngredientEntity toIngredientEntity(Ingredient ingredient, RecipeEntity recipe) {
return new IngredientEntity(
ingredient.id().value(),
recipe,
ingredient.position(),
ingredient.articleId(),
ingredient.quantity().amount(),
ingredient.quantity().uom().name(),
ingredient.subRecipeId(),
ingredient.substitutable()
);
}
private ProductionStepEntity toProductionStepEntity(ProductionStep step, RecipeEntity recipe) {
return new ProductionStepEntity(
step.id().value(),
recipe,
step.stepNumber(),
step.description(),
step.durationMinutes(),
step.temperatureCelsius()
);
}
private ProductionStep toDomainProductionStep(ProductionStepEntity entity) {
return ProductionStep.reconstitute(
ProductionStepId.of(entity.getId()),
entity.getStepNumber(),
entity.getDescription(),
entity.getDurationMinutes(),
entity.getTemperatureCelsius()
);
}
private Ingredient toDomainIngredient(IngredientEntity entity) {
return Ingredient.reconstitute(
IngredientId.of(entity.getId()),
entity.getPosition(),
entity.getArticleId(),
Quantity.reconstitute(
entity.getQuantity(),
UnitOfMeasure.valueOf(entity.getUom())
),
entity.getSubRecipeId(),
entity.isSubstitutable()
);
}
}

View file

@ -1,19 +0,0 @@
package de.effigenix.infrastructure.production.persistence.repository;
import de.effigenix.infrastructure.production.persistence.entity.BatchEntity;
import org.springframework.data.jpa.repository.JpaRepository;
import java.time.LocalDate;
import java.util.List;
import java.util.Optional;
public interface BatchJpaRepository extends JpaRepository<BatchEntity, String> {
Optional<BatchEntity> findByBatchNumber(String batchNumber);
List<BatchEntity> findByStatus(String status);
List<BatchEntity> findByProductionDate(LocalDate productionDate);
List<BatchEntity> findByRecipeIdIn(List<String> recipeIds);
}

View file

@ -1,15 +0,0 @@
package de.effigenix.infrastructure.production.persistence.repository;
import de.effigenix.infrastructure.production.persistence.entity.BatchNumberSequenceEntity;
import jakarta.persistence.LockModeType;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Lock;
import java.time.LocalDate;
import java.util.Optional;
public interface BatchNumberSequenceJpaRepository extends JpaRepository<BatchNumberSequenceEntity, LocalDate> {
@Lock(LockModeType.PESSIMISTIC_WRITE)
Optional<BatchNumberSequenceEntity> findByProductionDate(LocalDate productionDate);
}

View file

@ -1,184 +0,0 @@
package de.effigenix.infrastructure.production.persistence.repository;
import de.effigenix.domain.production.*;
import de.effigenix.infrastructure.production.persistence.mapper.BatchMapper;
import de.effigenix.shared.common.RepositoryError;
import de.effigenix.shared.common.Result;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Profile;
import org.springframework.orm.ObjectOptimisticLockingFailureException;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import java.time.LocalDate;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
@Repository
@Profile("!no-db")
@Transactional(readOnly = true)
public class JpaBatchRepository implements BatchRepository {
private static final Logger logger = LoggerFactory.getLogger(JpaBatchRepository.class);
private final BatchJpaRepository jpaRepository;
private final BatchMapper mapper;
public JpaBatchRepository(BatchJpaRepository jpaRepository, BatchMapper mapper) {
this.jpaRepository = jpaRepository;
this.mapper = mapper;
}
@Override
public Result<RepositoryError, Optional<Batch>> findById(BatchId id) {
try {
Optional<Batch> result = jpaRepository.findById(id.value())
.map(mapper::toDomain);
return Result.success(result);
} catch (Exception e) {
logger.trace("Database error in findById", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Batch>> findAll() {
try {
List<Batch> result = jpaRepository.findAll().stream()
.map(mapper::toDomain)
.collect(Collectors.toList());
return Result.success(result);
} catch (Exception e) {
logger.trace("Database error in findAll", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, Optional<Batch>> findByBatchNumber(BatchNumber batchNumber) {
try {
Optional<Batch> result = jpaRepository.findByBatchNumber(batchNumber.value())
.map(mapper::toDomain);
return Result.success(result);
} catch (Exception e) {
logger.trace("Database error in findByBatchNumber", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Batch>> findByStatus(BatchStatus status) {
try {
List<Batch> result = jpaRepository.findByStatus(status.name()).stream()
.map(mapper::toDomain)
.collect(Collectors.toList());
return Result.success(result);
} catch (Exception e) {
logger.trace("Database error in findByStatus", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Batch>> findByProductionDate(LocalDate date) {
try {
List<Batch> result = jpaRepository.findByProductionDate(date).stream()
.map(mapper::toDomain)
.collect(Collectors.toList());
return Result.success(result);
} catch (Exception e) {
logger.trace("Database error in findByProductionDate", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Batch>> findByRecipeIds(List<RecipeId> recipeIds) {
try {
List<String> ids = recipeIds.stream().map(RecipeId::value).toList();
List<Batch> result = jpaRepository.findByRecipeIdIn(ids).stream()
.map(mapper::toDomain)
.collect(Collectors.toList());
return Result.success(result);
} catch (Exception e) {
logger.trace("Database error in findByRecipeIds", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Batch>> findAllSummary() {
try {
List<Batch> result = jpaRepository.findAll().stream()
.map(mapper::toDomainSummary)
.collect(Collectors.toList());
return Result.success(result);
} catch (Exception e) {
logger.trace("Database error in findAllSummary", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Batch>> findByStatusSummary(BatchStatus status) {
try {
List<Batch> result = jpaRepository.findByStatus(status.name()).stream()
.map(mapper::toDomainSummary)
.collect(Collectors.toList());
return Result.success(result);
} catch (Exception e) {
logger.trace("Database error in findByStatusSummary", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Batch>> findByProductionDateSummary(LocalDate date) {
try {
List<Batch> result = jpaRepository.findByProductionDate(date).stream()
.map(mapper::toDomainSummary)
.collect(Collectors.toList());
return Result.success(result);
} catch (Exception e) {
logger.trace("Database error in findByProductionDateSummary", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Batch>> findByRecipeIdsSummary(List<RecipeId> recipeIds) {
try {
List<String> ids = recipeIds.stream().map(RecipeId::value).toList();
List<Batch> result = jpaRepository.findByRecipeIdIn(ids).stream()
.map(mapper::toDomainSummary)
.collect(Collectors.toList());
return Result.success(result);
} catch (Exception e) {
logger.trace("Database error in findByRecipeIdsSummary", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
@Transactional
public Result<RepositoryError, Void> save(Batch batch) {
try {
var existing = jpaRepository.findById(batch.id().value());
if (existing.isPresent()) {
mapper.updateEntity(existing.get(), batch);
} else {
jpaRepository.save(mapper.toEntity(batch));
}
return Result.success(null);
} catch (ObjectOptimisticLockingFailureException e) {
logger.warn("Optimistic locking failure for batch {}", batch.id().value());
return Result.failure(new RepositoryError.ConcurrentModification(
"Batch was modified by another transaction"));
} catch (Exception e) {
logger.trace("Database error in save", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
}

View file

@ -1,79 +0,0 @@
package de.effigenix.infrastructure.production.persistence.repository;
import de.effigenix.domain.production.ProductionOrder;
import de.effigenix.domain.production.ProductionOrderId;
import de.effigenix.domain.production.ProductionOrderRepository;
import de.effigenix.infrastructure.production.persistence.mapper.ProductionOrderMapper;
import de.effigenix.shared.common.RepositoryError;
import de.effigenix.shared.common.Result;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Profile;
import org.springframework.orm.ObjectOptimisticLockingFailureException;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.Optional;
@Repository
@Profile("!no-db")
@Transactional(readOnly = true)
public class JpaProductionOrderRepository implements ProductionOrderRepository {
private static final Logger logger = LoggerFactory.getLogger(JpaProductionOrderRepository.class);
private final ProductionOrderJpaRepository jpaRepository;
private final ProductionOrderMapper mapper;
public JpaProductionOrderRepository(ProductionOrderJpaRepository jpaRepository, ProductionOrderMapper mapper) {
this.jpaRepository = jpaRepository;
this.mapper = mapper;
}
@Override
public Result<RepositoryError, Optional<ProductionOrder>> findById(ProductionOrderId id) {
try {
Optional<ProductionOrder> result = jpaRepository.findById(id.value())
.map(mapper::toDomain);
return Result.success(result);
} catch (Exception e) {
logger.trace("Database error in findById", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<ProductionOrder>> findAll() {
try {
List<ProductionOrder> result = jpaRepository.findAll().stream()
.map(mapper::toDomain)
.toList();
return Result.success(result);
} catch (Exception e) {
logger.trace("Database error in findAll", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
@Transactional
public Result<RepositoryError, Void> save(ProductionOrder order) {
try {
var existing = jpaRepository.findById(order.id().value());
if (existing.isPresent()) {
mapper.updateEntity(existing.get(), order);
} else {
jpaRepository.save(mapper.toEntity(order));
}
return Result.success(null);
} catch (ObjectOptimisticLockingFailureException e) {
logger.warn("Optimistic locking failure for production order {}", order.id().value());
return Result.failure(new RepositoryError.ConcurrentModification(
"Production order was modified by another transaction"));
} catch (Exception e) {
logger.trace("Database error in save", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
}

View file

@ -1,116 +0,0 @@
package de.effigenix.infrastructure.production.persistence.repository;
import de.effigenix.domain.production.*;
import de.effigenix.infrastructure.production.persistence.mapper.RecipeMapper;
import de.effigenix.shared.common.RepositoryError;
import de.effigenix.shared.common.Result;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
@Repository
@Profile("!no-db")
@Transactional(readOnly = true)
public class JpaRecipeRepository implements RecipeRepository {
private static final Logger logger = LoggerFactory.getLogger(JpaRecipeRepository.class);
private final RecipeJpaRepository jpaRepository;
private final RecipeMapper mapper;
public JpaRecipeRepository(RecipeJpaRepository jpaRepository, RecipeMapper mapper) {
this.jpaRepository = jpaRepository;
this.mapper = mapper;
}
@Override
public Result<RepositoryError, Optional<Recipe>> findById(RecipeId id) {
try {
Optional<Recipe> result = jpaRepository.findById(id.value())
.map(mapper::toDomain);
return Result.success(result);
} catch (Exception e) {
logger.trace("Database error in findById", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Recipe>> findAll() {
try {
List<Recipe> result = jpaRepository.findAll().stream()
.map(mapper::toDomain)
.collect(Collectors.toList());
return Result.success(result);
} catch (Exception e) {
logger.trace("Database error in findAll", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
@Transactional
public Result<RepositoryError, Void> save(Recipe recipe) {
try {
jpaRepository.save(mapper.toEntity(recipe));
return Result.success(null);
} catch (Exception e) {
logger.trace("Database error in save", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
@Transactional
public Result<RepositoryError, Void> delete(Recipe recipe) {
try {
jpaRepository.deleteById(recipe.id().value());
return Result.success(null);
} catch (Exception e) {
logger.trace("Database error in delete", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Recipe>> findByStatus(RecipeStatus status) {
try {
List<Recipe> result = jpaRepository.findByStatus(status.name()).stream()
.map(mapper::toDomain)
.collect(Collectors.toList());
return Result.success(result);
} catch (Exception e) {
logger.trace("Database error in findByStatus", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, List<Recipe>> findByArticleId(String articleId) {
try {
List<Recipe> result = jpaRepository.findByArticleId(articleId).stream()
.map(mapper::toDomain)
.collect(Collectors.toList());
return Result.success(result);
} catch (Exception e) {
logger.trace("Database error in findByArticleId", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
@Override
public Result<RepositoryError, Boolean> existsByNameAndVersion(String name, int version) {
try {
return Result.success(jpaRepository.existsByNameAndVersion(name, version));
} catch (Exception e) {
logger.trace("Database error in existsByNameAndVersion", e);
return Result.failure(new RepositoryError.DatabaseError(e.getMessage()));
}
}
}

View file

@ -1,7 +0,0 @@
package de.effigenix.infrastructure.production.persistence.repository;
import de.effigenix.infrastructure.production.persistence.entity.ProductionOrderEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface ProductionOrderJpaRepository extends JpaRepository<ProductionOrderEntity, String> {
}

View file

@ -1,15 +0,0 @@
package de.effigenix.infrastructure.production.persistence.repository;
import de.effigenix.infrastructure.production.persistence.entity.RecipeEntity;
import org.springframework.data.jpa.repository.JpaRepository;
import java.util.List;
public interface RecipeJpaRepository extends JpaRepository<RecipeEntity, String> {
List<RecipeEntity> findByStatus(String status);
boolean existsByNameAndVersion(String name, int version);
List<RecipeEntity> findByArticleId(String articleId);
}

View file

@ -0,0 +1,9 @@
package de.effigenix.shared.persistence;
import de.effigenix.shared.common.Result;
import java.util.function.Supplier;
public interface UnitOfWork {
<E, T> Result<E, T> executeAtomically(Supplier<Result<E, T>> work);
}

View file

@ -56,7 +56,9 @@
<constraints nullable="false"/>
</column>
<column name="issuer" type="VARCHAR(200)"/>
<column name="valid_from" type="DATE"/>
<column name="valid_from" type="DATE">
<constraints nullable="false"/>
</column>
<column name="valid_until" type="DATE"/>
</createTable>
<addPrimaryKey tableName="quality_certificates"

View file

@ -5,7 +5,7 @@
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-latest.xsd">
<changeSet id="017-timestamps-to-timestamptz" author="effigenix">
<changeSet id="017-timestamps-to-timestamptz" author="effigenix" dbms="postgresql">
<comment>Migrate all TIMESTAMP columns to TIMESTAMP WITH TIME ZONE for consistent timezone handling</comment>
<!-- users -->
@ -33,4 +33,26 @@
<sql>ALTER TABLE recipes ALTER COLUMN updated_at TYPE TIMESTAMP WITH TIME ZONE;</sql>
</changeSet>
<changeSet id="017-timestamps-to-timestamptz" author="effigenix" dbms="h2">
<comment>H2: Migrate all TIMESTAMP columns to TIMESTAMP WITH TIME ZONE</comment>
<sql>ALTER TABLE users ALTER COLUMN created_at SET DATA TYPE TIMESTAMP WITH TIME ZONE;</sql>
<sql>ALTER TABLE users ALTER COLUMN last_login SET DATA TYPE TIMESTAMP WITH TIME ZONE;</sql>
<sql>ALTER TABLE audit_logs ALTER COLUMN timestamp SET DATA TYPE TIMESTAMP WITH TIME ZONE;</sql>
<sql>ALTER TABLE audit_logs ALTER COLUMN created_at SET DATA TYPE TIMESTAMP WITH TIME ZONE;</sql>
<sql>ALTER TABLE articles ALTER COLUMN created_at SET DATA TYPE TIMESTAMP WITH TIME ZONE;</sql>
<sql>ALTER TABLE articles ALTER COLUMN updated_at SET DATA TYPE TIMESTAMP WITH TIME ZONE;</sql>
<sql>ALTER TABLE suppliers ALTER COLUMN created_at SET DATA TYPE TIMESTAMP WITH TIME ZONE;</sql>
<sql>ALTER TABLE suppliers ALTER COLUMN updated_at SET DATA TYPE TIMESTAMP WITH TIME ZONE;</sql>
<sql>ALTER TABLE customers ALTER COLUMN created_at SET DATA TYPE TIMESTAMP WITH TIME ZONE;</sql>
<sql>ALTER TABLE customers ALTER COLUMN updated_at SET DATA TYPE TIMESTAMP WITH TIME ZONE;</sql>
<sql>ALTER TABLE recipes ALTER COLUMN created_at SET DATA TYPE TIMESTAMP WITH TIME ZONE;</sql>
<sql>ALTER TABLE recipes ALTER COLUMN updated_at SET DATA TYPE TIMESTAMP WITH TIME ZONE;</sql>
</changeSet>
</databaseChangeLog>

View file

@ -17,7 +17,7 @@
</preConditions>
<comment>Add PRODUCTION_ORDER_READ and PRODUCTION_ORDER_WRITE permissions for ADMIN and PRODUCTION_MANAGER roles (skipped if already present from 002)</comment>
<sql>
<sql dbms="postgresql">
INSERT INTO role_permissions (role_id, permission) VALUES
('c0a80121-0000-0000-0000-000000000001', 'PRODUCTION_ORDER_READ'),
('c0a80121-0000-0000-0000-000000000001', 'PRODUCTION_ORDER_WRITE'),
@ -25,6 +25,16 @@
('c0a80121-0000-0000-0000-000000000002', 'PRODUCTION_ORDER_WRITE')
ON CONFLICT DO NOTHING;
</sql>
<sql dbms="h2">
MERGE INTO role_permissions (role_id, permission) KEY (role_id, permission) VALUES
('c0a80121-0000-0000-0000-000000000001', 'PRODUCTION_ORDER_READ');
MERGE INTO role_permissions (role_id, permission) KEY (role_id, permission) VALUES
('c0a80121-0000-0000-0000-000000000001', 'PRODUCTION_ORDER_WRITE');
MERGE INTO role_permissions (role_id, permission) KEY (role_id, permission) VALUES
('c0a80121-0000-0000-0000-000000000002', 'PRODUCTION_ORDER_READ');
MERGE INTO role_permissions (role_id, permission) KEY (role_id, permission) VALUES
('c0a80121-0000-0000-0000-000000000002', 'PRODUCTION_ORDER_WRITE');
</sql>
</changeSet>
</databaseChangeLog>

View file

@ -5,9 +5,14 @@
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-latest.xsd">
<changeSet id="027-add-released-status-to-production-orders" author="effigenix">
<changeSet id="027-add-released-status-to-production-orders" author="effigenix" dbms="postgresql">
<sql>ALTER TABLE production_orders DROP CONSTRAINT chk_production_order_status;</sql>
<sql>ALTER TABLE production_orders ADD CONSTRAINT chk_production_order_status CHECK (status IN ('PLANNED', 'RELEASED', 'IN_PROGRESS', 'COMPLETED', 'CANCELLED'));</sql>
</changeSet>
<changeSet id="027-add-released-status-to-production-orders" author="effigenix" dbms="h2">
<sql>ALTER TABLE production_orders DROP CONSTRAINT IF EXISTS chk_production_order_status;</sql>
<sql>ALTER TABLE production_orders ADD CONSTRAINT chk_production_order_status CHECK (status IN ('PLANNED', 'RELEASED', 'IN_PROGRESS', 'COMPLETED', 'CANCELLED'));</sql>
</changeSet>
</databaseChangeLog>

View file

@ -7,6 +7,7 @@ import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import de.effigenix.shared.persistence.UnitOfWork;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
@ -19,6 +20,7 @@ import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Optional;
import java.util.function.Supplier;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
@ -30,14 +32,16 @@ class ActivateRecipeTest {
@Mock private RecipeRepository recipeRepository;
@Mock private AuthorizationPort authPort;
@Mock private UnitOfWork unitOfWork;
private ActivateRecipe activateRecipe;
private ActorId performedBy;
@BeforeEach
void setUp() {
activateRecipe = new ActivateRecipe(recipeRepository, authPort);
activateRecipe = new ActivateRecipe(recipeRepository, authPort, unitOfWork);
performedBy = ActorId.of("admin-user");
lenient().when(unitOfWork.executeAtomically(any())).thenAnswer(inv -> ((Supplier<?>) inv.getArgument(0)).get());
}
private Recipe draftRecipeWithIngredient() {

View file

@ -5,6 +5,7 @@ import de.effigenix.domain.production.*;
import de.effigenix.shared.common.Result;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import de.effigenix.shared.persistence.UnitOfWork;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
@ -14,6 +15,7 @@ import org.mockito.junit.jupiter.MockitoExtension;
import java.util.List;
import java.util.Optional;
import java.util.function.Supplier;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
@ -26,14 +28,16 @@ class AddRecipeIngredientTest {
@Mock private RecipeRepository recipeRepository;
@Mock private AuthorizationPort authPort;
@Mock private RecipeCycleChecker cycleChecker;
@Mock private UnitOfWork unitOfWork;
private AddRecipeIngredient addRecipeIngredient;
private ActorId performedBy;
@BeforeEach
void setUp() {
addRecipeIngredient = new AddRecipeIngredient(recipeRepository, authPort, cycleChecker);
addRecipeIngredient = new AddRecipeIngredient(recipeRepository, authPort, cycleChecker, unitOfWork);
performedBy = ActorId.of("admin-user");
lenient().when(unitOfWork.executeAtomically(any())).thenAnswer(inv -> ((Supplier<?>) inv.getArgument(0)).get());
}
private Recipe draftRecipe() {

View file

@ -7,6 +7,7 @@ import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import de.effigenix.shared.persistence.UnitOfWork;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
@ -19,6 +20,7 @@ import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Optional;
import java.util.function.Supplier;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
@ -30,14 +32,16 @@ class ArchiveRecipeTest {
@Mock private RecipeRepository recipeRepository;
@Mock private AuthorizationPort authPort;
@Mock private UnitOfWork unitOfWork;
private ArchiveRecipe archiveRecipe;
private ActorId performedBy;
@BeforeEach
void setUp() {
archiveRecipe = new ArchiveRecipe(recipeRepository, authPort);
archiveRecipe = new ArchiveRecipe(recipeRepository, authPort, unitOfWork);
performedBy = ActorId.of("admin-user");
lenient().when(unitOfWork.executeAtomically(any())).thenAnswer(inv -> ((Supplier<?>) inv.getArgument(0)).get());
}
private Recipe activeRecipe() {

View file

@ -8,6 +8,7 @@ import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import de.effigenix.shared.persistence.UnitOfWork;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
@ -21,6 +22,7 @@ import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Optional;
import java.util.function.Supplier;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
@ -31,14 +33,16 @@ class CancelBatchTest {
@Mock private BatchRepository batchRepository;
@Mock private AuthorizationPort authPort;
@Mock private UnitOfWork unitOfWork;
private CancelBatch cancelBatch;
private ActorId performedBy;
@BeforeEach
void setUp() {
cancelBatch = new CancelBatch(batchRepository, authPort);
cancelBatch = new CancelBatch(batchRepository, authPort, unitOfWork);
performedBy = ActorId.of("admin-user");
lenient().when(unitOfWork.executeAtomically(any())).thenAnswer(inv -> ((Supplier<?>) inv.getArgument(0)).get());
}
private Batch plannedBatch(String id) {

View file

@ -8,6 +8,7 @@ import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import de.effigenix.shared.persistence.UnitOfWork;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
@ -21,6 +22,7 @@ import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Optional;
import java.util.function.Supplier;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
@ -31,14 +33,16 @@ class CompleteBatchTest {
@Mock private BatchRepository batchRepository;
@Mock private AuthorizationPort authPort;
@Mock private UnitOfWork unitOfWork;
private CompleteBatch completeBatch;
private ActorId performedBy;
@BeforeEach
void setUp() {
completeBatch = new CompleteBatch(batchRepository, authPort);
completeBatch = new CompleteBatch(batchRepository, authPort, unitOfWork);
performedBy = ActorId.of("admin-user");
lenient().when(unitOfWork.executeAtomically(any())).thenAnswer(inv -> ((Supplier<?>) inv.getArgument(0)).get());
}
private Batch inProductionBatchWithConsumption(String id) {

View file

@ -8,6 +8,7 @@ import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import de.effigenix.shared.persistence.UnitOfWork;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
@ -21,6 +22,7 @@ import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Optional;
import java.util.function.Supplier;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
@ -33,6 +35,7 @@ class CreateProductionOrderTest {
@Mock private ProductionOrderRepository productionOrderRepository;
@Mock private RecipeRepository recipeRepository;
@Mock private AuthorizationPort authPort;
@Mock private UnitOfWork unitOfWork;
private CreateProductionOrder createProductionOrder;
private ActorId performedBy;
@ -41,8 +44,9 @@ class CreateProductionOrderTest {
@BeforeEach
void setUp() {
createProductionOrder = new CreateProductionOrder(productionOrderRepository, recipeRepository, authPort);
createProductionOrder = new CreateProductionOrder(productionOrderRepository, recipeRepository, authPort, unitOfWork);
performedBy = ActorId.of("admin-user");
lenient().when(unitOfWork.executeAtomically(any())).thenAnswer(inv -> ((Supplier<?>) inv.getArgument(0)).get());
}
private CreateProductionOrderCommand validCommand() {

View file

@ -8,6 +8,7 @@ import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import de.effigenix.shared.persistence.UnitOfWork;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
@ -21,6 +22,7 @@ import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Optional;
import java.util.function.Supplier;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
@ -34,6 +36,7 @@ class PlanBatchTest {
@Mock private RecipeRepository recipeRepository;
@Mock private BatchNumberGenerator batchNumberGenerator;
@Mock private AuthorizationPort authPort;
@Mock private UnitOfWork unitOfWork;
private PlanBatch planBatch;
private ActorId performedBy;
@ -44,8 +47,9 @@ class PlanBatchTest {
@BeforeEach
void setUp() {
planBatch = new PlanBatch(batchRepository, recipeRepository, batchNumberGenerator, authPort);
planBatch = new PlanBatch(batchRepository, recipeRepository, batchNumberGenerator, authPort, unitOfWork);
performedBy = ActorId.of("admin-user");
lenient().when(unitOfWork.executeAtomically(any())).thenAnswer(inv -> ((Supplier<?>) inv.getArgument(0)).get());
}
private PlanBatchCommand validCommand() {

View file

@ -8,6 +8,7 @@ import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import de.effigenix.shared.persistence.UnitOfWork;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
@ -21,6 +22,7 @@ import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Optional;
import java.util.function.Supplier;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
@ -31,14 +33,16 @@ class RecordConsumptionTest {
@Mock private BatchRepository batchRepository;
@Mock private AuthorizationPort authPort;
@Mock private UnitOfWork unitOfWork;
private RecordConsumption recordConsumption;
private ActorId performedBy;
@BeforeEach
void setUp() {
recordConsumption = new RecordConsumption(batchRepository, authPort);
recordConsumption = new RecordConsumption(batchRepository, authPort, unitOfWork);
performedBy = ActorId.of("admin-user");
lenient().when(unitOfWork.executeAtomically(any())).thenAnswer(inv -> ((Supplier<?>) inv.getArgument(0)).get());
}
private Batch inProductionBatch(String id) {

View file

@ -8,6 +8,7 @@ import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import de.effigenix.shared.persistence.UnitOfWork;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
@ -21,6 +22,7 @@ import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Optional;
import java.util.function.Supplier;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
@ -33,6 +35,7 @@ class ReleaseProductionOrderTest {
@Mock private ProductionOrderRepository productionOrderRepository;
@Mock private RecipeRepository recipeRepository;
@Mock private AuthorizationPort authPort;
@Mock private UnitOfWork unitOfWork;
private ReleaseProductionOrder releaseProductionOrder;
private ActorId performedBy;
@ -41,8 +44,9 @@ class ReleaseProductionOrderTest {
@BeforeEach
void setUp() {
releaseProductionOrder = new ReleaseProductionOrder(productionOrderRepository, recipeRepository, authPort);
releaseProductionOrder = new ReleaseProductionOrder(productionOrderRepository, recipeRepository, authPort, unitOfWork);
performedBy = ActorId.of("admin-user");
lenient().when(unitOfWork.executeAtomically(any())).thenAnswer(inv -> ((Supplier<?>) inv.getArgument(0)).get());
}
private ReleaseProductionOrderCommand validCommand() {

View file

@ -8,6 +8,7 @@ import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import de.effigenix.shared.persistence.UnitOfWork;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
@ -21,6 +22,7 @@ import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Optional;
import java.util.function.Supplier;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
@ -31,14 +33,16 @@ class StartBatchTest {
@Mock private BatchRepository batchRepository;
@Mock private AuthorizationPort authPort;
@Mock private UnitOfWork unitOfWork;
private StartBatch startBatch;
private ActorId performedBy;
@BeforeEach
void setUp() {
startBatch = new StartBatch(batchRepository, authPort);
startBatch = new StartBatch(batchRepository, authPort, unitOfWork);
performedBy = ActorId.of("admin-user");
lenient().when(unitOfWork.executeAtomically(any())).thenAnswer(inv -> ((Supplier<?>) inv.getArgument(0)).get());
}
private Batch plannedBatch(String id) {

View file

@ -8,6 +8,7 @@ import de.effigenix.shared.common.Result;
import de.effigenix.shared.common.UnitOfMeasure;
import de.effigenix.shared.security.ActorId;
import de.effigenix.shared.security.AuthorizationPort;
import de.effigenix.shared.persistence.UnitOfWork;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
@ -21,6 +22,7 @@ import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Optional;
import java.util.function.Supplier;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
@ -33,6 +35,7 @@ class StartProductionOrderTest {
@Mock private ProductionOrderRepository productionOrderRepository;
@Mock private BatchRepository batchRepository;
@Mock private AuthorizationPort authPort;
@Mock private UnitOfWork unitOfWork;
private StartProductionOrder startProductionOrder;
private ActorId performedBy;
@ -41,8 +44,9 @@ class StartProductionOrderTest {
@BeforeEach
void setUp() {
startProductionOrder = new StartProductionOrder(productionOrderRepository, batchRepository, authPort);
startProductionOrder = new StartProductionOrder(productionOrderRepository, batchRepository, authPort, unitOfWork);
performedBy = ActorId.of("admin-user");
lenient().when(unitOfWork.executeAtomically(any())).thenAnswer(inv -> ((Supplier<?>) inv.getArgument(0)).get());
}
private StartProductionOrderCommand validCommand() {

View file

@ -3,6 +3,10 @@ package de.effigenix.infrastructure;
import com.fasterxml.jackson.databind.ObjectMapper;
import de.effigenix.domain.usermanagement.RoleName;
import de.effigenix.domain.usermanagement.UserStatus;
import de.effigenix.infrastructure.masterdata.persistence.entity.ArticleEntity;
import de.effigenix.infrastructure.masterdata.persistence.entity.ProductCategoryEntity;
import de.effigenix.infrastructure.masterdata.persistence.repository.ArticleJpaRepository;
import de.effigenix.infrastructure.masterdata.persistence.repository.ProductCategoryJpaRepository;
import de.effigenix.infrastructure.usermanagement.persistence.entity.RoleEntity;
import de.effigenix.infrastructure.usermanagement.persistence.entity.UserEntity;
import de.effigenix.infrastructure.usermanagement.persistence.repository.RoleJpaRepository;
@ -45,6 +49,12 @@ public abstract class AbstractIntegrationTest {
@Autowired
protected RoleJpaRepository roleRepository;
@Autowired
protected ArticleJpaRepository articleRepository;
@Autowired
protected ProductCategoryJpaRepository productCategoryRepository;
@Value("${jwt.secret}")
protected String jwtSecret;
@ -94,9 +104,11 @@ public abstract class AbstractIntegrationTest {
}
protected RoleEntity createRole(RoleName roleName, String description) {
RoleEntity role = new RoleEntity(
UUID.randomUUID().toString(), roleName, Set.of(), description);
return roleRepository.save(role);
return roleRepository.findByName(roleName).orElseGet(() -> {
RoleEntity role = new RoleEntity(
UUID.randomUUID().toString(), roleName, Set.of(), description);
return roleRepository.save(role);
});
}
protected UserEntity createUser(String username, String email, Set<RoleEntity> roles, String branchId) {
@ -106,4 +118,14 @@ public abstract class AbstractIntegrationTest {
branchId, UserStatus.ACTIVE, OffsetDateTime.now(ZoneOffset.UTC), null);
return userRepository.save(user);
}
protected String createArticleId() {
String categoryId = UUID.randomUUID().toString();
productCategoryRepository.save(new ProductCategoryEntity(categoryId, "TestCat-" + categoryId.substring(0, 8), null));
var now = OffsetDateTime.now(ZoneOffset.UTC);
var article = new ArticleEntity(
UUID.randomUUID().toString(), "TestArticle-" + UUID.randomUUID().toString().substring(0, 8),
"ART-" + UUID.randomUUID().toString().substring(0, 8), categoryId, "ACTIVE", now, now);
return articleRepository.save(article).getId();
}
}

View file

@ -52,13 +52,17 @@ class StockControllerIntegrationTest extends AbstractIntegrationTest {
storageLocationId = createStorageLocation();
}
private String newArticleId() {
return createArticleId();
}
// ==================== Bestandsposition anlegen Pflichtfelder ====================
@Test
@DisplayName("Bestandsposition mit Pflichtfeldern erstellen → 201")
void createStock_withRequiredFields_returns201() throws Exception {
var request = new CreateStockRequest(
UUID.randomUUID().toString(), storageLocationId, null, null, null);
newArticleId(), storageLocationId, null, null, null);
mockMvc.perform(post("/api/inventory/stocks")
.header("Authorization", "Bearer " + adminToken)
@ -78,7 +82,7 @@ class StockControllerIntegrationTest extends AbstractIntegrationTest {
@DisplayName("Bestandsposition mit allen Feldern erstellen → 201")
void createStock_withAllFields_returns201() throws Exception {
var request = new CreateStockRequest(
UUID.randomUUID().toString(), storageLocationId, "10.5", "KILOGRAM", 30);
newArticleId(), storageLocationId, "10.5", "KILOGRAM", 30);
mockMvc.perform(post("/api/inventory/stocks")
.header("Authorization", "Bearer " + adminToken)
@ -97,7 +101,7 @@ class StockControllerIntegrationTest extends AbstractIntegrationTest {
@Test
@DisplayName("Bestandsposition Duplikat (gleiche articleId+storageLocationId) → 409")
void createStock_duplicate_returns409() throws Exception {
String articleId = UUID.randomUUID().toString();
String articleId = newArticleId();
var request = new CreateStockRequest(articleId, storageLocationId, null, null, null);
mockMvc.perform(post("/api/inventory/stocks")
@ -942,7 +946,7 @@ class StockControllerIntegrationTest extends AbstractIntegrationTest {
@Test
@DisplayName("Bestandspositionen nach articleId filtern → 200")
void listStocks_filterByArticleId() throws Exception {
String articleId = UUID.randomUUID().toString();
String articleId = newArticleId();
createStockForArticle(articleId);
mockMvc.perform(get("/api/inventory/stocks")
@ -1512,7 +1516,7 @@ class StockControllerIntegrationTest extends AbstractIntegrationTest {
private String createStock() throws Exception {
var request = new CreateStockRequest(
UUID.randomUUID().toString(), storageLocationId, null, null, null);
newArticleId(), storageLocationId, null, null, null);
var result = mockMvc.perform(post("/api/inventory/stocks")
.header("Authorization", "Bearer " + adminToken)
@ -1526,7 +1530,7 @@ class StockControllerIntegrationTest extends AbstractIntegrationTest {
private String createStockForLocation(String locationId) throws Exception {
var request = new CreateStockRequest(
UUID.randomUUID().toString(), locationId, null, null, null);
newArticleId(), locationId, null, null, null);
var result = mockMvc.perform(post("/api/inventory/stocks")
.header("Authorization", "Bearer " + adminToken)
@ -1540,7 +1544,7 @@ class StockControllerIntegrationTest extends AbstractIntegrationTest {
private String createStockWithMinimumLevel(String minimumAmount, String unit) throws Exception {
var request = new CreateStockRequest(
UUID.randomUUID().toString(), storageLocationId, minimumAmount, unit, null);
newArticleId(), storageLocationId, minimumAmount, unit, null);
var result = mockMvc.perform(post("/api/inventory/stocks")
.header("Authorization", "Bearer " + adminToken)

View file

@ -408,7 +408,7 @@ class StorageLocationControllerIntegrationTest extends AbstractIntegrationTest {
// Stock an diesem Lagerort anlegen
var stockRequest = new CreateStockRequest(
UUID.randomUUID().toString(), id, null, null, null);
createArticleId(), id, null, null, null);
mockMvc.perform(post("/api/inventory/stocks")
.header("Authorization", "Bearer " + adminToken)
.contentType(MediaType.APPLICATION_JSON)

View file

@ -2,7 +2,6 @@ package de.effigenix.infrastructure.production.web;
import de.effigenix.domain.usermanagement.RoleName;
import de.effigenix.infrastructure.AbstractIntegrationTest;
import de.effigenix.infrastructure.production.persistence.entity.RecipeEntity;
import de.effigenix.infrastructure.production.web.dto.PlanBatchRequest;
import de.effigenix.infrastructure.usermanagement.persistence.entity.RoleEntity;
import de.effigenix.infrastructure.usermanagement.persistence.entity.UserEntity;

View file

@ -8,11 +8,11 @@ spring:
jpa:
database-platform: org.hibernate.dialect.H2Dialect
hibernate:
ddl-auto: create-drop
ddl-auto: none
show-sql: false
liquibase:
enabled: false # Use Hibernate for test schema
enabled: true
jwt:
secret: TestSecretKeyForUnitTestsMin256BitsLongForHS256AlgorithmSecurity