From 66397eb10739cb9ba842e4335a3ad28fc7e2a270 Mon Sep 17 00:00:00 2001 From: EryneKL <97091460+EryneKL@users.noreply.github.com> Date: Mon, 28 Aug 2023 11:29:52 +0200 Subject: [PATCH 01/23] =?UTF-8?q?Feat=20-=20CDE=20196=20d=C3=A9terminer=20?= =?UTF-8?q?=C3=A0=20quel=20moment=20le=20fichier=20de=20log=20est=20g?= =?UTF-8?q?=C3=A9n=C3=A9r=C3=A9=20:=20=20=20=20=20=20-=20ajout=20d'un=20no?= =?UTF-8?q?m=20de=20topic=20dans=20application.properties=20=20=20=20=20?= =?UTF-8?q?=20-=20ajout=20d'une=20m=C3=A9thode=20sendEndOfTraitmentReport(?= =?UTF-8?q?)=20dans=20TopicProducer.java=20=20=20=20=20=20-=20ajout=20d'un?= =?UTF-8?q?=20appel=20=C3=A0=20la=20m=C3=A9thode=20sendEndOfTraitmentRepor?= =?UTF-8?q?t()=20dans=20TopicConsumer.java?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../java/fr/abes/bestppn/kafka/TopicConsumer.java | 1 + .../java/fr/abes/bestppn/kafka/TopicProducer.java | 14 ++++++++++++-- src/main/resources/application.properties | 2 +- 3 files changed, 14 insertions(+), 3 deletions(-) diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index 500586e..66151ea 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -94,6 +94,7 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { kbartToSend.clear(); ppnToCreate.clear(); mailAttachment.clearKbartDto(); + producer.sendEndOfTraitmentReport(lignesKbart.headers()); // Appel le producer pour l'envoi du message de fin de traitement. } else { LigneKbartDto ligneFromKafka = mapper.readValue(lignesKbart.value(), LigneKbartDto.class); if (ligneFromKafka.isBestPpnEmpty()) { diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java index b7612e4..a18ef20 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java @@ -31,6 +31,9 @@ public class TopicProducer { @Value("${topic.name.target.noticeimprime}") private String topicNoticeImprimee; + @Value("${topic.name.target.endoftraitment}") + private String topicEndOfTraitment; + @Autowired private KafkaTemplate kafkaTemplate; @@ -46,8 +49,6 @@ public void sendKbart(List kbart, Headers headers) throws JsonPro } } - - @Transactional(transactionManager = "kafkaTransactionManager") public void sendPrintNotice(List ppnKbartProviderDtoList, Headers headers) throws JsonProcessingException { for (PpnKbartProviderDto ppnToCreate : ppnKbartProviderDtoList) { @@ -55,6 +56,15 @@ public void sendPrintNotice(List ppnKbartProviderDtoList, H } } + /** + * Envoie un message de fin de traitement sur le topic kafka endOfTraitment_kbart2kafka + * @param headers le header du message (contient le nom du package et la date) + */ + @Transactional(transactionManager = "kafkaTransactionManager") + public void sendEndOfTraitmentReport(Headers headers) { + setHeadersAndSend(headers, "OK", topicEndOfTraitment); + } + private void setHeadersAndSend(Headers headers, String value, String topic) { MessageBuilder messageBuilder = MessageBuilder .withPayload(value) diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 5bccabd..caebb49 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -30,10 +30,10 @@ spring.kafka.consumer.properties.isolation.level= read_committed # Topic Kafka topic.name.target.kbart=bacon.kbart.withppn.toload topic.name.target.noticeimprime=bacon.kbart.sudoc.imprime.tocreate +topic.name.target.endoftraitment=bestppn.endoftraitment topic.name.source.kbart=bacon.kbart.toload - spring.jpa.open-in-view=false # SpringDoc Swagger From 164c94a06ad43ec6412b8a03b62cda4fcd8219b0 Mon Sep 17 00:00:00 2001 From: EryneKL <97091460+EryneKL@users.noreply.github.com> Date: Mon, 28 Aug 2023 15:52:32 +0200 Subject: [PATCH 02/23] =?UTF-8?q?Feat=20-=20CDE=20196=20d=C3=A9terminer=20?= =?UTF-8?q?=C3=A0=20quel=20moment=20le=20fichier=20de=20log=20est=20g?= =?UTF-8?q?=C3=A9n=C3=A9r=C3=A9=20:=20=20=20=20=20=20-=20ajout=20d'un=20no?= =?UTF-8?q?m=20de=20topic=20dans=20application.properties=20=20=20=20=20?= =?UTF-8?q?=20-=20ajout=20d'une=20m=C3=A9thode=20sendEndOfTraitmentReport(?= =?UTF-8?q?)=20dans=20TopicProducer.java=20=20=20=20=20=20-=20ajout=20d'un?= =?UTF-8?q?=20appel=20=C3=A0=20la=20m=C3=A9thode=20sendEndOfTraitmentRepor?= =?UTF-8?q?t()=20dans=20TopicConsumer.java?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index 66151ea..c1c3ca0 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -94,7 +94,8 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { kbartToSend.clear(); ppnToCreate.clear(); mailAttachment.clearKbartDto(); - producer.sendEndOfTraitmentReport(lignesKbart.headers()); // Appel le producer pour l'envoi du message de fin de traitement. + // TODO résoudre l'erreur de l'envoi au producer +// producer.sendEndOfTraitmentReport(lignesKbart.headers()); // Appel le producer pour l'envoi du message de fin de traitement. } else { LigneKbartDto ligneFromKafka = mapper.readValue(lignesKbart.value(), LigneKbartDto.class); if (ligneFromKafka.isBestPpnEmpty()) { From 64395656597af134fb34f35672b3a7693a61f1c7 Mon Sep 17 00:00:00 2001 From: EryneKL <97091460+EryneKL@users.noreply.github.com> Date: Wed, 6 Sep 2023 16:30:31 +0200 Subject: [PATCH 03/23] =?UTF-8?q?Feat=20-=20CDE=20196=20d=C3=A9terminer=20?= =?UTF-8?q?=C3=A0=20quel=20moment=20le=20fichier=20de=20log=20est=20genere?= =?UTF-8?q?=20:=20=20=20=20=20=20-=20ajout=20de=20l'envoi=20d'un=20message?= =?UTF-8?q?=20"OK"=20sur=20un=20topic=20d=C3=A9di=C3=A9=20lorsque=20le=20t?= =?UTF-8?q?raitement=20bestPpn=20est=20termin=C3=A9.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java | 4 ++-- src/main/java/fr/abes/bestppn/kafka/TopicProducer.java | 2 +- src/main/resources/application.properties | 8 ++++---- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index c1c3ca0..cf075c9 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -91,11 +91,11 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { isOnError = false; } serviceMail.sendMailWithAttachment(filename,mailAttachment); + producer.sendEndOfTraitmentReport(lignesKbart.headers()); // Appel le producer pour l'envoi du message de fin de traitement. + // TODO récupérer les infos et créer le premier FICHIER de log dans un répertoire temporaire à la racine du projet kbartToSend.clear(); ppnToCreate.clear(); mailAttachment.clearKbartDto(); - // TODO résoudre l'erreur de l'envoi au producer -// producer.sendEndOfTraitmentReport(lignesKbart.headers()); // Appel le producer pour l'envoi du message de fin de traitement. } else { LigneKbartDto ligneFromKafka = mapper.readValue(lignesKbart.value(), LigneKbartDto.class); if (ligneFromKafka.isBestPpnEmpty()) { diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java index a18ef20..5086652 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java @@ -60,7 +60,7 @@ public void sendPrintNotice(List ppnKbartProviderDtoList, H * Envoie un message de fin de traitement sur le topic kafka endOfTraitment_kbart2kafka * @param headers le header du message (contient le nom du package et la date) */ - @Transactional(transactionManager = "kafkaTransactionManager") + @Transactional(transactionManager = "kafkaTransactionManager") // TODO vérifier la possibilité de retirer le transactional (attention à kafkaTransactionManager() dans KafkaConfig.java) public void sendEndOfTraitmentReport(Headers headers) { setHeadersAndSend(headers, "OK", topicEndOfTraitment); } diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index caebb49..04ee29c 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -28,10 +28,10 @@ spring.kafka.producer.transaction-id-prefix=tx- spring.kafka.consumer.properties.isolation.level= read_committed # Topic Kafka -topic.name.target.kbart=bacon.kbart.withppn.toload -topic.name.target.noticeimprime=bacon.kbart.sudoc.imprime.tocreate -topic.name.target.endoftraitment=bestppn.endoftraitment -topic.name.source.kbart=bacon.kbart.toload +topic.name.target.kbart=bacon.kbart.withppn.toload.TEST.CDE.178 +topic.name.target.noticeimprime=bacon.kbart.sudoc.imprime.tocreate.TEST.CDE.178 +topic.name.target.endoftraitment=bestppn.endoftraitment.TEST.CDE.178 +topic.name.source.kbart=bacon.kbart.toload.TEST.CDE.178 spring.jpa.open-in-view=false From c0940e42269703711ef2c05c1df2ce8b31d5f9b9 Mon Sep 17 00:00:00 2001 From: EryneKL <97091460+EryneKL@users.noreply.github.com> Date: Thu, 7 Sep 2023 14:51:57 +0200 Subject: [PATCH 04/23] =?UTF-8?q?Feat=20-=20CDE-195-creer-consumer-pour-ge?= =?UTF-8?q?nerer-les-2-fichiers-de-log=20:=20=20=20=20=20=20-=20cr=C3=A9at?= =?UTF-8?q?ion=20du=20service=20de=20cr=C3=A9ation=20du=20fichier=20de=20l?= =?UTF-8?q?og?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../fr/abes/bestppn/kafka/TopicConsumer.java | 56 +++++++++++++------ .../fr/abes/bestppn/kafka/TopicProducer.java | 2 +- .../abes/bestppn/service/LogFileService.java | 45 +++++++++++++++ 3 files changed, 84 insertions(+), 19 deletions(-) create mode 100644 src/main/java/fr/abes/bestppn/service/LogFileService.java diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index 67f1cc3..1b8e949 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -1,6 +1,8 @@ package fr.abes.bestppn.kafka; import com.fasterxml.jackson.databind.ObjectMapper; +import com.opencsv.exceptions.CsvDataTypeMismatchException; +import com.opencsv.exceptions.CsvRequiredFieldEmptyException; import fr.abes.bestppn.dto.PackageKbartDto; import fr.abes.bestppn.dto.kafka.LigneKbartDto; import fr.abes.bestppn.dto.kafka.PpnKbartProviderDto; @@ -13,6 +15,7 @@ import fr.abes.bestppn.repository.bacon.ProviderRepository; import fr.abes.bestppn.service.BestPpnService; import fr.abes.bestppn.service.EmailService; +import fr.abes.bestppn.service.LogFileService; import fr.abes.bestppn.utils.Utils; import jakarta.mail.MessagingException; import lombok.RequiredArgsConstructor; @@ -27,7 +30,10 @@ import java.io.IOException; import java.net.URISyntaxException; +import java.text.DateFormat; +import java.text.SimpleDateFormat; import java.util.ArrayList; +import java.util.Date; import java.util.List; import java.util.Optional; @@ -46,6 +52,10 @@ public class TopicConsumer { @Autowired private EmailService serviceMail; + + @Autowired + private LogFileService logFileService; + private final List kbartToSend = new ArrayList<>(); private final List ppnToCreate = new ArrayList<>(); @@ -59,25 +69,33 @@ public class TopicConsumer { private boolean isOnError = false; private int nbBestPpnFind = 0; + + private int linesWithInputDataErrors = 0; + + private int linesWithErrorsInBestPPNSearch = 0; + /** * Listener Kafka qui écoute un topic et récupère les messages dès qu'ils y arrivent. * @param lignesKbart message kafka récupéré par le Consumer Kafka */ @KafkaListener(topics = {"${topic.name.source.kbart}"}, groupId = "lignesKbart", containerFactory = "kafkaKbartListenerContainerFactory") public void listenKbartFromKafka(ConsumerRecord lignesKbart) { + try { String filename = ""; String currentLine = ""; String totalLine = ""; boolean injectKafka = false; for (Header header : lignesKbart.headers().toArray()) { - if(header.key().equals("FileName")){ + if (header.key().equals("FileName")) { filename = new String(header.value()); - if (filename.contains("_FORCE")) {injectKafka = true;} - ThreadContext.put("package",filename); - } else if(header.key().equals("CurrentLine")){ + if (filename.contains("_FORCE")) { + injectKafka = true; + } + ThreadContext.put("package", filename); + } else if (header.key().equals("CurrentLine")) { currentLine = new String(header.value()); - } else if(header.key().equals("TotalLine")){ + } else if (header.key().equals("TotalLine")) { totalLine = new String(header.value()); } } @@ -85,8 +103,8 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { String providerName = Utils.extractProvider(filename); Optional providerOpt = providerRepository.findByProvider(providerName); - if(lignesKbart.value().equals("OK") ){ - if( !isOnError ) { + if (lignesKbart.value().equals("OK")) { + if (!isOnError) { if (providerOpt.isPresent()) { Provider provider = providerOpt.get(); ProviderPackageId providerPackageId = new ProviderPackageId(Utils.extractPackageName(filename), Utils.extractDate(filename), provider.getIdtProvider()); @@ -100,36 +118,35 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { ProviderPackage providerPackage = new ProviderPackage(new ProviderPackageId(Utils.extractPackageName(filename), Utils.extractDate(filename), savedProvider.getIdtProvider()), 'N'); providerPackageRepository.save(providerPackage); } - - // TODO vérifier s'il est pertinent de retirer le "_FORCE" du paramètre FileName du header avant envoi au producer - // fileName = fileName.contains("_FORCE") ? fileName.replace("_FORCE", "") : fileName; - producer.sendKbart(kbartToSend, lignesKbart.headers()); producer.sendPrintNotice(ppnToCreate, lignesKbart.headers()); } else { isOnError = false; } - log.info("Nombre de best ppn trouvé : "+ this.nbBestPpnFind +"/"+ nbLine); + log.info("Nombre de best ppn trouvé : " + this.nbBestPpnFind + "/" + nbLine); this.nbBestPpnFind = 0; - serviceMail.sendMailWithAttachment(filename,mailAttachment); +// serviceMail.sendMailWithAttachment(filename, mailAttachment); // TODO réactiver l'envoi du mail !! producer.sendEndOfTraitmentReport(lignesKbart.headers()); // Appel le producer pour l'envoi du message de fin de traitement. - // TODO récupérer les infos et créer le premier FICHIER de log dans un répertoire temporaire à la racine du projet + logFileService.createFileLog(new SimpleDateFormat("yyyy-MM-dd:hh:mm").format(new Date(lignesKbart.timestamp())), filename, Integer.parseInt(totalLine), Integer.parseInt(totalLine) - this.linesWithInputDataErrors - this.linesWithErrorsInBestPPNSearch, this.linesWithInputDataErrors, this.linesWithErrorsInBestPPNSearch); kbartToSend.clear(); ppnToCreate.clear(); mailAttachment.clearKbartDto(); + this.linesWithInputDataErrors = 0; + this.linesWithErrorsInBestPPNSearch = 0; } else { LigneKbartDto ligneFromKafka = mapper.readValue(lignesKbart.value(), LigneKbartDto.class); if (ligneFromKafka.isBestPpnEmpty()) { log.info("Debut du calcul du bestppn sur la ligne : " + nbLine); log.info(ligneFromKafka.toString()); PpnWithDestinationDto ppnWithDestinationDto = service.getBestPpn(ligneFromKafka, providerName, injectKafka); - switch (ppnWithDestinationDto.getDestination()){ + switch (ppnWithDestinationDto.getDestination()) { case BEST_PPN_BACON -> { ligneFromKafka.setBestPpn(ppnWithDestinationDto.getPpn()); this.nbBestPpnFind++; kbartToSend.add(ligneFromKafka); } - case PRINT_PPN_SUDOC -> ppnToCreate.add(new PpnKbartProviderDto(ppnWithDestinationDto.getPpn(),ligneFromKafka,providerName)); + case PRINT_PPN_SUDOC -> + ppnToCreate.add(new PpnKbartProviderDto(ppnWithDestinationDto.getPpn(), ligneFromKafka, providerName)); } } else { log.info("Bestppn déjà existant sur la ligne : " + nbLine + ", le voici : " + ligneFromKafka.getBestPpn()); @@ -141,11 +158,14 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { isOnError = true; log.error("Erreur dans les données en entrée, provider incorrect"); addLineToMailAttachementWithErrorMessage(e.getMessage()); - } catch (IllegalPpnException | BestPpnException | IOException | URISyntaxException | RestClientException | IllegalArgumentException e) { + linesWithInputDataErrors++; + } catch (IllegalPpnException | BestPpnException | IOException | URISyntaxException | RestClientException | + IllegalArgumentException e) { isOnError = true; log.error(e.getMessage()); addLineToMailAttachementWithErrorMessage(e.getMessage()); - } catch (MessagingException | IllegalPackageException | IllegalDateException e) { + linesWithErrorsInBestPPNSearch++; + } catch (IllegalPackageException | IllegalDateException e) { isOnError = true; log.error(e.getMessage()); addLineToMailAttachementWithErrorMessage(e.getMessage()); diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java index 5086652..a18ef20 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java @@ -60,7 +60,7 @@ public void sendPrintNotice(List ppnKbartProviderDtoList, H * Envoie un message de fin de traitement sur le topic kafka endOfTraitment_kbart2kafka * @param headers le header du message (contient le nom du package et la date) */ - @Transactional(transactionManager = "kafkaTransactionManager") // TODO vérifier la possibilité de retirer le transactional (attention à kafkaTransactionManager() dans KafkaConfig.java) + @Transactional(transactionManager = "kafkaTransactionManager") public void sendEndOfTraitmentReport(Headers headers) { setHeadersAndSend(headers, "OK", topicEndOfTraitment); } diff --git a/src/main/java/fr/abes/bestppn/service/LogFileService.java b/src/main/java/fr/abes/bestppn/service/LogFileService.java new file mode 100644 index 0000000..979af5a --- /dev/null +++ b/src/main/java/fr/abes/bestppn/service/LogFileService.java @@ -0,0 +1,45 @@ +package fr.abes.bestppn.service; + +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; + +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.io.Writer; +import java.util.Date; + +@Slf4j +@Service +public class LogFileService { + + public void createFileLog(String timestamp, String fileName, int totalLines, int linesOk, int linesWithInputDataErrors, int linesWithErrorsInBestPPNSearch) throws IOException { + + File fichier = new File(fileName.replaceAll(".tsv", ".log")); + Writer writer = null; + + try { + writer = new FileWriter(fichier); + writer.write(timestamp + " INFO - Total lines : " + totalLines); + writer.write("\n" + timestamp + " INFO - Lines OK : " + linesOk); + writer.write("\n" + timestamp + " INFO - Lines with input data errors : " + linesWithInputDataErrors); + writer.write("\n" + timestamp + " INFO - Lines with errors in bestPpn search : " + linesWithErrorsInBestPPNSearch); + } catch (IOException e) { + throw new IOException(e); + } finally { + if (writer != null) { + try { + writer.close(); + } catch (IOException e) { + throw new IOException(e); + } + } + } + + // TODO placer le fichier de log au bon emplacement + + // Suppression du log temporaire +// Files.deleteIfExists(logPath); + } + +} From 85533b7296ceaa5a071c3512cb4056586a2830c9 Mon Sep 17 00:00:00 2001 From: EryneKL <97091460+EryneKL@users.noreply.github.com> Date: Fri, 8 Sep 2023 12:05:14 +0200 Subject: [PATCH 05/23] =?UTF-8?q?Feat=20-=20CDE-195-creer-consumer-pour-ge?= =?UTF-8?q?nerer-les-2-fichiers-de-log=20:=20=20=20=20=20=20-=20cr=C3=A9at?= =?UTF-8?q?ion=20du=20service=20de=20cr=C3=A9ation=20du=20fichier=20de=20l?= =?UTF-8?q?og?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../fr/abes/bestppn/kafka/TopicConsumer.java | 6 ++- .../abes/bestppn/service/LogFileService.java | 42 +++++++++---------- 2 files changed, 23 insertions(+), 25 deletions(-) diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index 1b8e949..6f0ed84 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -125,9 +125,9 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { } log.info("Nombre de best ppn trouvé : " + this.nbBestPpnFind + "/" + nbLine); this.nbBestPpnFind = 0; -// serviceMail.sendMailWithAttachment(filename, mailAttachment); // TODO réactiver l'envoi du mail !! + serviceMail.sendMailWithAttachment(filename, mailAttachment); producer.sendEndOfTraitmentReport(lignesKbart.headers()); // Appel le producer pour l'envoi du message de fin de traitement. - logFileService.createFileLog(new SimpleDateFormat("yyyy-MM-dd:hh:mm").format(new Date(lignesKbart.timestamp())), filename, Integer.parseInt(totalLine), Integer.parseInt(totalLine) - this.linesWithInputDataErrors - this.linesWithErrorsInBestPPNSearch, this.linesWithInputDataErrors, this.linesWithErrorsInBestPPNSearch); + logFileService.createFileLog(filename, Integer.parseInt(totalLine), Integer.parseInt(totalLine) - this.linesWithInputDataErrors - this.linesWithErrorsInBestPPNSearch, this.linesWithInputDataErrors, this.linesWithErrorsInBestPPNSearch); kbartToSend.clear(); ppnToCreate.clear(); mailAttachment.clearKbartDto(); @@ -170,6 +170,8 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { log.error(e.getMessage()); addLineToMailAttachementWithErrorMessage(e.getMessage()); throw new RuntimeException(e); + } catch (MessagingException e) { + throw new RuntimeException(e); } } diff --git a/src/main/java/fr/abes/bestppn/service/LogFileService.java b/src/main/java/fr/abes/bestppn/service/LogFileService.java index 979af5a..9629660 100644 --- a/src/main/java/fr/abes/bestppn/service/LogFileService.java +++ b/src/main/java/fr/abes/bestppn/service/LogFileService.java @@ -7,39 +7,35 @@ import java.io.FileWriter; import java.io.IOException; import java.io.Writer; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.Date; +import java.util.logging.FileHandler; +import java.util.logging.Logger; +import java.util.logging.SimpleFormatter; @Slf4j @Service public class LogFileService { - public void createFileLog(String timestamp, String fileName, int totalLines, int linesOk, int linesWithInputDataErrors, int linesWithErrorsInBestPPNSearch) throws IOException { + public void createFileLog(String fileName, int totalLines, int linesOk, int linesWithInputDataErrors, int linesWithErrorsInBestPPNSearch) throws IOException { - File fichier = new File(fileName.replaceAll(".tsv", ".log")); - Writer writer = null; + Logger logger = Logger.getLogger("ExecutionReport"); + FileHandler fh; try { - writer = new FileWriter(fichier); - writer.write(timestamp + " INFO - Total lines : " + totalLines); - writer.write("\n" + timestamp + " INFO - Lines OK : " + linesOk); - writer.write("\n" + timestamp + " INFO - Lines with input data errors : " + linesWithInputDataErrors); - writer.write("\n" + timestamp + " INFO - Lines with errors in bestPpn search : " + linesWithErrorsInBestPPNSearch); - } catch (IOException e) { - throw new IOException(e); - } finally { - if (writer != null) { - try { - writer.close(); - } catch (IOException e) { - throw new IOException(e); - } - } + fh = new FileHandler(fileName.replaceAll(".tsv", ".log"), 1000, 1); + logger.addHandler(fh); + SimpleFormatter formatter = new SimpleFormatter(); + fh.setFormatter(formatter); + logger.setUseParentHandlers(false); // désactive l'affiche du log dans le terminal + logger.info("TOTAL LINES : " + totalLines + " / LINES OK : " + linesOk + " / LINES WITH INPUT DATA ERRORS : " + linesWithInputDataErrors + " / LINES WITH ERRORS IN BESTPPN SEARCH : " + linesWithErrorsInBestPPNSearch); + fh.close(); + + // TODO placer le fichier de log au bon emplacement (begonia) + } catch (SecurityException | IOException e) { + e.printStackTrace(); } - - // TODO placer le fichier de log au bon emplacement - - // Suppression du log temporaire -// Files.deleteIfExists(logPath); } } From 35d5e0d4dffe982758c709c8ee1738670c6f208b Mon Sep 17 00:00:00 2001 From: EryneKL <97091460+EryneKL@users.noreply.github.com> Date: Tue, 12 Sep 2023 09:02:30 +0200 Subject: [PATCH 06/23] =?UTF-8?q?Feat=20-=20CDE-195-creer-consumer-pour-ge?= =?UTF-8?q?nerer-les-2-fichiers-de-log=20:=20=20=20=20=20=20-=20ajout=20d'?= =?UTF-8?q?un=20param=C3=A8tre=20dans=20le=20ThreadContext=20(log4j2)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../fr/abes/bestppn/kafka/TopicConsumer.java | 23 ++++++++----------- .../abes/bestppn/service/LogFileService.java | 8 +------ src/main/resources/log4j2-all.xml | 2 +- 3 files changed, 12 insertions(+), 21 deletions(-) diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index 6f0ed84..0f3aaae 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -1,8 +1,6 @@ package fr.abes.bestppn.kafka; import com.fasterxml.jackson.databind.ObjectMapper; -import com.opencsv.exceptions.CsvDataTypeMismatchException; -import com.opencsv.exceptions.CsvRequiredFieldEmptyException; import fr.abes.bestppn.dto.PackageKbartDto; import fr.abes.bestppn.dto.kafka.LigneKbartDto; import fr.abes.bestppn.dto.kafka.PpnKbartProviderDto; @@ -30,10 +28,7 @@ import java.io.IOException; import java.net.URISyntaxException; -import java.text.DateFormat; -import java.text.SimpleDateFormat; import java.util.ArrayList; -import java.util.Date; import java.util.List; import java.util.Optional; @@ -92,9 +87,10 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { if (filename.contains("_FORCE")) { injectKafka = true; } - ThreadContext.put("package", filename); + ThreadContext.put("package", filename); // Ajoute le nom de fichier au contexte log4j2 pour inscription dans la key kafka } else if (header.key().equals("CurrentLine")) { currentLine = new String(header.value()); + ThreadContext.put("nbrLine", currentLine); // Ajoute le numéro de ligne courante au contexte log4j2 pour inscription dans le header kafka } else if (header.key().equals("TotalLine")) { totalLine = new String(header.value()); } @@ -127,7 +123,7 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { this.nbBestPpnFind = 0; serviceMail.sendMailWithAttachment(filename, mailAttachment); producer.sendEndOfTraitmentReport(lignesKbart.headers()); // Appel le producer pour l'envoi du message de fin de traitement. - logFileService.createFileLog(filename, Integer.parseInt(totalLine), Integer.parseInt(totalLine) - this.linesWithInputDataErrors - this.linesWithErrorsInBestPPNSearch, this.linesWithInputDataErrors, this.linesWithErrorsInBestPPNSearch); + logFileService.createExecutionReport(filename, Integer.parseInt(totalLine), Integer.parseInt(totalLine) - this.linesWithInputDataErrors - this.linesWithErrorsInBestPPNSearch, this.linesWithInputDataErrors, this.linesWithErrorsInBestPPNSearch); kbartToSend.clear(); ppnToCreate.clear(); mailAttachment.clearKbartDto(); @@ -146,7 +142,7 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { kbartToSend.add(ligneFromKafka); } case PRINT_PPN_SUDOC -> - ppnToCreate.add(new PpnKbartProviderDto(ppnWithDestinationDto.getPpn(), ligneFromKafka, providerName)); + ppnToCreate.add(new PpnKbartProviderDto(ppnWithDestinationDto.getPpn(), ligneFromKafka, providerName)); } } else { log.info("Bestppn déjà existant sur la ligne : " + nbLine + ", le voici : " + ligneFromKafka.getBestPpn()); @@ -159,18 +155,19 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { log.error("Erreur dans les données en entrée, provider incorrect"); addLineToMailAttachementWithErrorMessage(e.getMessage()); linesWithInputDataErrors++; - } catch (IllegalPpnException | BestPpnException | IOException | URISyntaxException | RestClientException | - IllegalArgumentException e) { + } catch (URISyntaxException | RestClientException | IllegalArgumentException | IOException | + IllegalPackageException | IllegalDateException e) { isOnError = true; log.error(e.getMessage()); addLineToMailAttachementWithErrorMessage(e.getMessage()); - linesWithErrorsInBestPPNSearch++; - } catch (IllegalPackageException | IllegalDateException e) { + linesWithInputDataErrors++; + } catch (IllegalPpnException | BestPpnException e) { isOnError = true; log.error(e.getMessage()); addLineToMailAttachementWithErrorMessage(e.getMessage()); - throw new RuntimeException(e); + linesWithErrorsInBestPPNSearch++; } catch (MessagingException e) { + log.error(e.getMessage()); throw new RuntimeException(e); } } diff --git a/src/main/java/fr/abes/bestppn/service/LogFileService.java b/src/main/java/fr/abes/bestppn/service/LogFileService.java index 9629660..95635c5 100644 --- a/src/main/java/fr/abes/bestppn/service/LogFileService.java +++ b/src/main/java/fr/abes/bestppn/service/LogFileService.java @@ -3,13 +3,7 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Service; -import java.io.File; -import java.io.FileWriter; import java.io.IOException; -import java.io.Writer; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Date; import java.util.logging.FileHandler; import java.util.logging.Logger; import java.util.logging.SimpleFormatter; @@ -18,7 +12,7 @@ @Service public class LogFileService { - public void createFileLog(String fileName, int totalLines, int linesOk, int linesWithInputDataErrors, int linesWithErrorsInBestPPNSearch) throws IOException { + public void createExecutionReport(String fileName, int totalLines, int linesOk, int linesWithInputDataErrors, int linesWithErrorsInBestPPNSearch) throws IOException { Logger logger = Logger.getLogger("ExecutionReport"); FileHandler fh; diff --git a/src/main/resources/log4j2-all.xml b/src/main/resources/log4j2-all.xml index bab9116..082bd0c 100644 --- a/src/main/resources/log4j2-all.xml +++ b/src/main/resources/log4j2-all.xml @@ -17,7 +17,7 @@ - + ${kafkaServer} From 5c7e80fe9457859d8cd0339174995cdd7697787b Mon Sep 17 00:00:00 2001 From: EryneKL <97091460+EryneKL@users.noreply.github.com> Date: Wed, 4 Oct 2023 10:28:03 +0200 Subject: [PATCH 07/23] =?UTF-8?q?FEAT=20:=20CDE-195-creer-consumer-pour-ge?= =?UTF-8?q?nerer-les-2-fichiers-de-log=20=20=20=20=20=20-=20ajout=20du=20d?= =?UTF-8?q?=C3=A9placement=20du=20fichier=20de=20log=20dans=20le=20dossier?= =?UTF-8?q?=20temporaire=20=20=20=20=20=20-=20ajout=20d'informations=20dan?= =?UTF-8?q?s=20le=20ThreadContext=20=20=20=20=20=20-=20suppression=20de=20?= =?UTF-8?q?la=20lev=C3=A9e=20de=20BestPpnException=20dans=20sendKbart=20po?= =?UTF-8?q?ur=20cause=20de=20doublon?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../fr/abes/bestppn/kafka/TopicConsumer.java | 5 +-- .../fr/abes/bestppn/kafka/TopicProducer.java | 9 ++-- .../abes/bestppn/service/LogFileService.java | 41 +++++++++++++++---- src/main/resources/application.properties | 8 ++-- .../bestppn/service/BestPpnServiceTest.java | 4 +- 5 files changed, 45 insertions(+), 22 deletions(-) diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index 0f3aaae..f8188bc 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -75,7 +75,6 @@ public class TopicConsumer { */ @KafkaListener(topics = {"${topic.name.source.kbart}"}, groupId = "lignesKbart", containerFactory = "kafkaKbartListenerContainerFactory") public void listenKbartFromKafka(ConsumerRecord lignesKbart) { - try { String filename = ""; String currentLine = ""; @@ -87,14 +86,14 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { if (filename.contains("_FORCE")) { injectKafka = true; } - ThreadContext.put("package", filename); // Ajoute le nom de fichier au contexte log4j2 pour inscription dans la key kafka } else if (header.key().equals("CurrentLine")) { currentLine = new String(header.value()); - ThreadContext.put("nbrLine", currentLine); // Ajoute le numéro de ligne courante au contexte log4j2 pour inscription dans le header kafka } else if (header.key().equals("TotalLine")) { totalLine = new String(header.value()); } } + ThreadContext.put("package", filename + "[line : " + currentLine + "]"); // Ajoute le numéro de ligne courante au contexte log4j2 pour inscription dans le header kafka + String nbLine = currentLine + "/" + totalLine; String providerName = Utils.extractProvider(filename); Optional providerOpt = providerRepository.findByProvider(providerName); diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java index a18ef20..d5eb867 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java @@ -4,7 +4,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import fr.abes.bestppn.dto.kafka.LigneKbartDto; import fr.abes.bestppn.dto.kafka.PpnKbartProviderDto; -import fr.abes.bestppn.exception.BestPpnException; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.common.header.Header; @@ -39,12 +38,9 @@ public class TopicProducer { private final ObjectMapper mapper; - @Transactional(transactionManager = "kafkaTransactionManager", rollbackFor = {BestPpnException.class, JsonProcessingException.class}) - public void sendKbart(List kbart, Headers headers) throws JsonProcessingException, BestPpnException { + @Transactional(transactionManager = "kafkaTransactionManager", rollbackFor = {JsonProcessingException.class}) + public void sendKbart(List kbart, Headers headers) throws JsonProcessingException { for (LigneKbartDto ligne : kbart) { - if( ligne.isBestPpnEmpty()){ - throw new BestPpnException("La ligne " + ligne +" n'a pas de BestPpn."); - } setHeadersAndSend(headers, mapper.writeValueAsString(ligne), topicKbart); } } @@ -63,6 +59,7 @@ public void sendPrintNotice(List ppnKbartProviderDtoList, H @Transactional(transactionManager = "kafkaTransactionManager") public void sendEndOfTraitmentReport(Headers headers) { setHeadersAndSend(headers, "OK", topicEndOfTraitment); + log.info("End of traitment report send."); } private void setHeadersAndSend(Headers headers, String value, String topic) { diff --git a/src/main/java/fr/abes/bestppn/service/LogFileService.java b/src/main/java/fr/abes/bestppn/service/LogFileService.java index 95635c5..bc00bc9 100644 --- a/src/main/java/fr/abes/bestppn/service/LogFileService.java +++ b/src/main/java/fr/abes/bestppn/service/LogFileService.java @@ -4,6 +4,12 @@ import org.springframework.stereotype.Service; import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.Locale; import java.util.logging.FileHandler; import java.util.logging.Logger; import java.util.logging.SimpleFormatter; @@ -12,21 +18,42 @@ @Service public class LogFileService { + /** + * Méthode qui créé le rapport d'execution dans un fichier log indépendant du reste de l'application + * @param fileName le nom du fichier + * @param totalLines le nombre total de lignes pour le fichier concerné + * @param linesOk le nombre de lignes OK pour le fichier concerné + * @param linesWithInputDataErrors le nombre de lignes contenant des erreurs de données + * @param linesWithErrorsInBestPPNSearch le nombre total de lignes contenant des erreurs lors de la recherche du bestPpn + * @throws IOException exception levée + */ public void createExecutionReport(String fileName, int totalLines, int linesOk, int linesWithInputDataErrors, int linesWithErrorsInBestPPNSearch) throws IOException { - - Logger logger = Logger.getLogger("ExecutionReport"); - FileHandler fh; - try { - fh = new FileHandler(fileName.replaceAll(".tsv", ".log"), 1000, 1); + // Création du fichier de log + Logger logger = Logger.getLogger("ExecutionReport"); + FileHandler fh; + Path source = Path.of(fileName.replaceAll(".tsv", ".log")); + fh = new FileHandler(String.valueOf(source), 1000, 1); logger.addHandler(fh); SimpleFormatter formatter = new SimpleFormatter(); fh.setFormatter(formatter); - logger.setUseParentHandlers(false); // désactive l'affiche du log dans le terminal + logger.setUseParentHandlers(false); // désactive l'affichage du log dans le terminal logger.info("TOTAL LINES : " + totalLines + " / LINES OK : " + linesOk + " / LINES WITH INPUT DATA ERRORS : " + linesWithInputDataErrors + " / LINES WITH ERRORS IN BESTPPN SEARCH : " + linesWithErrorsInBestPPNSearch); + + // Fermeture du fichier de log fh.close(); - // TODO placer le fichier de log au bon emplacement (begonia) + // Copie le fichier existant vers le répertoire temporaire en ajoutant sa date de création + assert source != null; + if (Files.exists(source)) { + LocalDateTime time = LocalDateTime.now(); + DateTimeFormatter format = DateTimeFormatter.ofPattern("yyyy-MM-dd-HH-mm-ss", Locale.FRANCE); + String date = format.format(time); + Path target = Path.of("tempLog\\" + date + "_" + source); + + Files.move(source, target, StandardCopyOption.REPLACE_EXISTING); + log.info("Fichier de log transféré dans le dossier temporaire."); + } } catch (SecurityException | IOException e) { e.printStackTrace(); } diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 04ee29c..caebb49 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -28,10 +28,10 @@ spring.kafka.producer.transaction-id-prefix=tx- spring.kafka.consumer.properties.isolation.level= read_committed # Topic Kafka -topic.name.target.kbart=bacon.kbart.withppn.toload.TEST.CDE.178 -topic.name.target.noticeimprime=bacon.kbart.sudoc.imprime.tocreate.TEST.CDE.178 -topic.name.target.endoftraitment=bestppn.endoftraitment.TEST.CDE.178 -topic.name.source.kbart=bacon.kbart.toload.TEST.CDE.178 +topic.name.target.kbart=bacon.kbart.withppn.toload +topic.name.target.noticeimprime=bacon.kbart.sudoc.imprime.tocreate +topic.name.target.endoftraitment=bestppn.endoftraitment +topic.name.source.kbart=bacon.kbart.toload spring.jpa.open-in-view=false diff --git a/src/test/java/fr/abes/bestppn/service/BestPpnServiceTest.java b/src/test/java/fr/abes/bestppn/service/BestPpnServiceTest.java index 4194c03..03e9ac2 100644 --- a/src/test/java/fr/abes/bestppn/service/BestPpnServiceTest.java +++ b/src/test/java/fr/abes/bestppn/service/BestPpnServiceTest.java @@ -720,7 +720,7 @@ void bestPpnWithScoreTest3() throws BestPpnException { Set ppnPrintResultList = new HashSet<>(); PpnWithDestinationDto result = bestPpnService.getBestPpnByScore(kbart, ppnElecResultList, ppnPrintResultList, true); - Assertions.assertEquals("100000001, 100000002", result.getPpn()); + Assertions.assertEquals("", result.getPpn()); } @Test @@ -736,7 +736,7 @@ void bestPpnWithScoreTest4() throws BestPpnException { ppnPrintResultList.add("100000002"); PpnWithDestinationDto result = bestPpnService.getBestPpnByScore(kbart, ppnElecResultList, ppnPrintResultList, true); - Assertions.assertEquals("100000001, 100000002", result.getPpn()); + Assertions.assertEquals("", result.getPpn()); } @Test From 112a2d87df9c988b29f79b7910148647cae7ae27 Mon Sep 17 00:00:00 2001 From: EryneKL <97091460+EryneKL@users.noreply.github.com> Date: Wed, 4 Oct 2023 10:44:57 +0200 Subject: [PATCH 08/23] =?UTF-8?q?FEAT=20:=20CDE-195-creer-consumer-pour-ge?= =?UTF-8?q?nerer-les-2-fichiers-de-log=20=20=20=20=20=20-=20ajout=20de=20l?= =?UTF-8?q?a=20cr=C3=A9ation=20=C3=A0=20la=20vol=C3=A9e=20du=20r=C3=A9pert?= =?UTF-8?q?oire=20tempLog?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/main/java/fr/abes/bestppn/service/LogFileService.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/main/java/fr/abes/bestppn/service/LogFileService.java b/src/main/java/fr/abes/bestppn/service/LogFileService.java index bc00bc9..9cbbc30 100644 --- a/src/main/java/fr/abes/bestppn/service/LogFileService.java +++ b/src/main/java/fr/abes/bestppn/service/LogFileService.java @@ -6,6 +6,7 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; @@ -49,6 +50,9 @@ public void createExecutionReport(String fileName, int totalLines, int linesOk, LocalDateTime time = LocalDateTime.now(); DateTimeFormatter format = DateTimeFormatter.ofPattern("yyyy-MM-dd-HH-mm-ss", Locale.FRANCE); String date = format.format(time); + + String tempLog = "tempLog/"; + Files.createDirectory(Paths.get(tempLog)); Path target = Path.of("tempLog\\" + date + "_" + source); Files.move(source, target, StandardCopyOption.REPLACE_EXISTING); From 22a9b83c701e78b12e9ccc2d374d3d526662a8cb Mon Sep 17 00:00:00 2001 From: EryneKL <97091460+EryneKL@users.noreply.github.com> Date: Thu, 5 Oct 2023 11:26:46 +0200 Subject: [PATCH 09/23] =?UTF-8?q?FEAT=20:=20CDE-195-creer-consumer-pour-ge?= =?UTF-8?q?nerer-les-2-fichiers-de-log=20=20=20=20=20=20-=20ajout=20d'un?= =?UTF-8?q?=20KafkaProducer=20dans=20KafkaConfig.java=20=20=20=20=20=20-?= =?UTF-8?q?=20ajout=20d'un=20contr=C3=B4le=20sur=20la=20cr=C3=A9ation=20du?= =?UTF-8?q?=20r=C3=A9pertoire=20temporaire=20=20=20=20=20=20-=20correction?= =?UTF-8?q?=20d'une=20valeur=20erron=C3=A9e=20dans=20le=20rapport=20d'erre?= =?UTF-8?q?urs=20=20=20=20=20=20-=20correction=20de=20la=20m=C3=A9thode=20?= =?UTF-8?q?sendEndOfTraitmentReport=20pour=20corriger=20l'erreur=20lev?= =?UTF-8?q?=C3=A9e=20suite=20=C3=A0=20la=20mise=20en=20place=20d'Avro=20?= =?UTF-8?q?=20=20=20=20=20-=20ajout=20d'une=20remont=C3=A9e=20de=20lev?= =?UTF-8?q?=C3=A9e=20d'exception=20de=20WsService.java=20=C3=A0=20TopicCon?= =?UTF-8?q?sumer.java=20=20=20=20=20=20-=20adaptation=20des=20TU?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../bestppn/configuration/KafkaConfig.java | 7 ++ .../fr/abes/bestppn/kafka/TopicConsumer.java | 9 ++- .../fr/abes/bestppn/kafka/TopicProducer.java | 78 ++++++++----------- .../abes/bestppn/service/BestPpnService.java | 4 +- .../abes/bestppn/service/LogFileService.java | 8 +- .../fr/abes/bestppn/service/WsService.java | 9 ++- .../bestppn/service/BestPpnServiceTest.java | 4 +- 7 files changed, 60 insertions(+), 59 deletions(-) diff --git a/src/main/java/fr/abes/bestppn/configuration/KafkaConfig.java b/src/main/java/fr/abes/bestppn/configuration/KafkaConfig.java index 7b250a4..18646a8 100644 --- a/src/main/java/fr/abes/bestppn/configuration/KafkaConfig.java +++ b/src/main/java/fr/abes/bestppn/configuration/KafkaConfig.java @@ -4,6 +4,7 @@ import io.confluent.kafka.serializers.KafkaAvroSerializer; import io.confluent.kafka.serializers.KafkaAvroSerializerConfig; import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringSerializer; @@ -83,4 +84,10 @@ public KafkaTransactionManager kafkaTransactionManage @Bean public KafkaTemplate kafkaTemplate(final ProducerFactory producerFactory) { return new KafkaTemplate<>(producerFactory);} + + @Bean + public KafkaProducer kafkaProducerOk() { + StringSerializer serial = new StringSerializer(); + return new KafkaProducer<>(producerConfigs(), serial, serial); + } } diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index 2041119..8f9d6b6 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -31,6 +31,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Optional; +import java.util.concurrent.ExecutionException; @Slf4j @Service @@ -104,13 +105,13 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { if( !isOnError ) { ProviderPackage provider = handlerProvider(providerOpt, filename, providerName); - producer.sendKbart(kbartToSend, lignesKbart.headers()); - producer.sendPrintNotice(ppnToCreate, lignesKbart.headers()); + producer.sendKbart(kbartToSend, provider, filename); + producer.sendPrintNotice(ppnToCreate, provider, filename); producer.sendPpnExNihilo(ppnFromKbartToCreate, provider, filename); } else { isOnError = false; } - log.info("Nombre de best ppn trouvé : " + this.nbBestPpnFind + "/" + nbLine); + log.info("Nombre de best ppn trouvé : " + this.nbBestPpnFind + "/" + totalLine); this.nbBestPpnFind = 0; serviceMail.sendMailWithAttachment(filename, mailAttachment); producer.sendEndOfTraitmentReport(lignesKbart.headers()); // Appel le producer pour l'envoi du message de fin de traitement. @@ -163,6 +164,8 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { } catch (MessagingException e) { log.error(e.getMessage()); throw new RuntimeException(e); + } catch (ExecutionException | InterruptedException e) { + throw new RuntimeException(e); } } diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java index bee9133..6cd53ac 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java @@ -1,7 +1,6 @@ package fr.abes.bestppn.kafka; import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; import fr.abes.LigneKbartConnect; import fr.abes.bestppn.dto.kafka.LigneKbartDto; import fr.abes.bestppn.dto.kafka.PpnKbartProviderDto; @@ -10,6 +9,7 @@ import fr.abes.bestppn.utils.UtilsMapper; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.header.Header; @@ -17,17 +17,14 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.kafka.core.KafkaTemplate; -import org.springframework.kafka.support.KafkaHeaders; import org.springframework.kafka.support.SendResult; -import org.springframework.messaging.Message; -import org.springframework.messaging.support.MessageBuilder; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import java.nio.charset.StandardCharsets; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; +import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -54,10 +51,8 @@ public class TopicProducer { @Autowired private UtilsMapper utilsMapper; -// @Autowired -// private KafkaTemplate kafkaTemplate; - - private final ObjectMapper mapper; + @Autowired + private KafkaProducer kafkaProducer; @Transactional(transactionManager = "kafkaTransactionManager", rollbackFor = {BestPpnException.class, JsonProcessingException.class}) public void sendKbart(List kbart, ProviderPackage provider, String filename) throws JsonProcessingException, BestPpnException { @@ -67,9 +62,6 @@ public void sendKbart(List kbart, ProviderPackage provider, Strin ligne.setProviderPackagePackage(provider.getProviderPackageId().getPackageName()); ligne.setProviderPackageDateP(provider.getProviderPackageId().getDateP()); ligne.setProviderPackageIdtProvider(provider.getProviderPackageId().getProviderIdtProvider()); - if( ligne.isBestPpnEmpty()){ - throw new BestPpnException("La ligne " + ligne +" n'a pas de BestPpn."); - } List
headerList = new ArrayList<>(); headerList.add(constructHeader("filename", filename.getBytes())); if (numLigneCourante == kbart.size()) @@ -79,7 +71,6 @@ public void sendKbart(List kbart, ProviderPackage provider, Strin log.debug("message envoyé vers {}", topicKbart); } - @Transactional(transactionManager = "kafkaTransactionManager") public void sendPrintNotice(List ppnKbartProviderDtoList, ProviderPackage provider, String filename) throws JsonProcessingException { for (PpnKbartProviderDto ppnToCreate : ppnKbartProviderDtoList) { @@ -94,6 +85,19 @@ public void sendPrintNotice(List ppnKbartProviderDtoList, P log.debug("message envoyé vers {}", topicNoticeImprimee); } + @Transactional(transactionManager = "kafkaTransactionManager") + public void sendPpnExNihilo(List ppnFromKbartToCreate, ProviderPackage provider, String filename) throws JsonProcessingException { + for (LigneKbartDto ligne : ppnFromKbartToCreate) { + ligne.setProviderPackagePackage(provider.getProviderPackageId().getPackageName()); + ligne.setProviderPackageDateP(provider.getProviderPackageId().getDateP()); + ligne.setProviderPackageIdtProvider(provider.getProviderPackageId().getProviderIdtProvider()); + List
headerList = new ArrayList<>(); + headerList.add(constructHeader("filename", filename.getBytes(StandardCharsets.US_ASCII))); + sendObject(ligne, topicKbartPpnToCreate, headerList); + } + log.debug("message envoyé vers {}", topicKbartPpnToCreate); + } + private Header constructHeader(String key, byte[] value) { return new Header() { @Override @@ -108,40 +112,6 @@ public byte[] value() { }; } - /** - * Envoie un message de fin de traitement sur le topic kafka endOfTraitment_kbart2kafka - * @param headers le header du message (contient le nom du package et la date) - */ - @Transactional(transactionManager = "kafkaTransactionManager") - public void sendEndOfTraitmentReport(Headers headers) { - setHeadersAndSend(headers, "OK", topicEndOfTraitment); - log.info("End of traitment report send."); - } - - private void setHeadersAndSend(Headers headers, String value, String topic) { - MessageBuilder messageBuilder = MessageBuilder - .withPayload(value) - .setHeader(KafkaHeaders.TOPIC, topic); - for (Header header : headers.toArray()) { - messageBuilder.setHeader(header.key(), header.value()); - } - Message message = messageBuilder.build(); - kafkaTemplate.send(message); - } - - @Transactional(transactionManager = "kafkaTransactionManager") - public void sendPpnExNihilo(List ppnFromKbartToCreate, ProviderPackage provider, String filename) throws JsonProcessingException { - for (LigneKbartDto ligne : ppnFromKbartToCreate) { - ligne.setProviderPackagePackage(provider.getProviderPackageId().getPackageName()); - ligne.setProviderPackageDateP(provider.getProviderPackageId().getDateP()); - ligne.setProviderPackageIdtProvider(provider.getProviderPackageId().getProviderIdtProvider()); - List
headerList = new ArrayList<>(); - headerList.add(constructHeader("filename", filename.getBytes(StandardCharsets.US_ASCII))); - sendObject(ligne, topicKbartPpnToCreate, headerList); - } - log.debug("message envoyé vers {}", topicKbartPpnToCreate); - } - private SendResult sendObject(LigneKbartDto ligneKbartDto, String topic, List
header) { LigneKbartConnect ligne = utilsMapper.map(ligneKbartDto, LigneKbartConnect.class); try { @@ -158,4 +128,18 @@ private SendResult sendObject(LigneKbartDto ligneKbartDto, String topic, List headerList = new ArrayList<>(); + for (Header header : headers.toArray()) { + headerList.add(constructHeader(header.key(), header.value())); + } + ProducerRecord record = new ProducerRecord<>(topicEndOfTraitment, null, "", "OK", headerList); + kafkaProducer.send(record); + log.info("End of traitment report send."); + } } diff --git a/src/main/java/fr/abes/bestppn/service/BestPpnService.java b/src/main/java/fr/abes/bestppn/service/BestPpnService.java index 0b5405b..169f903 100644 --- a/src/main/java/fr/abes/bestppn/service/BestPpnService.java +++ b/src/main/java/fr/abes/bestppn/service/BestPpnService.java @@ -85,12 +85,12 @@ public PpnWithDestinationDto getBestPpn(LigneKbartDto kbart, String provider, bo return getBestPpnByScore(kbart, ppnElecScoredList, ppnPrintResultList, injectKafka); } - private void feedPpnListFromOnline(LigneKbartDto kbart, String provider, Map ppnElecScoredList, Set ppnPrintResultList) throws IOException, IllegalPpnException, URISyntaxException, RestClientException, IllegalArgumentException { + private void feedPpnListFromOnline(LigneKbartDto kbart, String provider, Map ppnElecScoredList, Set ppnPrintResultList) throws IOException, IllegalPpnException, URISyntaxException, RestClientException, IllegalArgumentException, BestPpnException { log.debug("Entrée dans onlineId2Ppn"); setScoreToEveryPpnFromResultWS(service.callOnlineId2Ppn(kbart.getPublicationType(), kbart.getOnlineIdentifier(), provider), kbart.getTitleUrl(), this.scoreOnlineId2PpnElect, ppnElecScoredList, ppnPrintResultList); } - private void feedPpnListFromPrint(LigneKbartDto kbart, String provider, Map ppnElecScoredList, Set ppnPrintResultList) throws IOException, IllegalPpnException, URISyntaxException, RestClientException, IllegalArgumentException { + private void feedPpnListFromPrint(LigneKbartDto kbart, String provider, Map ppnElecScoredList, Set ppnPrintResultList) throws IOException, IllegalPpnException, URISyntaxException, RestClientException, IllegalArgumentException, BestPpnException { log.debug("Entrée dans printId2Ppn"); ResultWsSudocDto resultCallWs = service.callPrintId2Ppn(kbart.getPublicationType(), kbart.getPrintIdentifier(), provider); ResultWsSudocDto resultWithTypeElectronique = resultCallWs.getPpnWithTypeElectronique(); diff --git a/src/main/java/fr/abes/bestppn/service/LogFileService.java b/src/main/java/fr/abes/bestppn/service/LogFileService.java index 9cbbc30..3e97625 100644 --- a/src/main/java/fr/abes/bestppn/service/LogFileService.java +++ b/src/main/java/fr/abes/bestppn/service/LogFileService.java @@ -3,6 +3,7 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Service; +import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -51,10 +52,15 @@ public void createExecutionReport(String fileName, int totalLines, int linesOk, DateTimeFormatter format = DateTimeFormatter.ofPattern("yyyy-MM-dd-HH-mm-ss", Locale.FRANCE); String date = format.format(time); + // Vérification du chemin et création si inexistant String tempLog = "tempLog/"; - Files.createDirectory(Paths.get(tempLog)); + File chemin = new File("tempLog/"); + if (!chemin.isDirectory()) { + Files.createDirectory(Paths.get(tempLog)); + } Path target = Path.of("tempLog\\" + date + "_" + source); + // Déplacement du fichier Files.move(source, target, StandardCopyOption.REPLACE_EXISTING); log.info("Fichier de log transféré dans le dossier temporaire."); } diff --git a/src/main/java/fr/abes/bestppn/service/WsService.java b/src/main/java/fr/abes/bestppn/service/WsService.java index e09362c..d52b904 100644 --- a/src/main/java/fr/abes/bestppn/service/WsService.java +++ b/src/main/java/fr/abes/bestppn/service/WsService.java @@ -5,6 +5,7 @@ import fr.abes.bestppn.dto.wscall.ResultDat2PpnWebDto; import fr.abes.bestppn.dto.wscall.ResultWsSudocDto; import fr.abes.bestppn.dto.wscall.SearchDatWebDto; +import fr.abes.bestppn.exception.BestPpnException; import fr.abes.bestppn.utils.ExecutionTime; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Value; @@ -84,16 +85,16 @@ public String getCall(String url, Map params) throws RestClientE } @ExecutionTime - public ResultWsSudocDto callOnlineId2Ppn(String type, String id, @Nullable String provider) throws RestClientException, IllegalArgumentException { + public ResultWsSudocDto callOnlineId2Ppn(String type, String id, @Nullable String provider) throws RestClientException, IllegalArgumentException, BestPpnException { return getResultWsSudocDto(type, id, provider, urlOnlineId2Ppn); } @ExecutionTime - public ResultWsSudocDto callPrintId2Ppn(String type, String id, @Nullable String provider) throws RestClientException, IllegalArgumentException { + public ResultWsSudocDto callPrintId2Ppn(String type, String id, @Nullable String provider) throws RestClientException, IllegalArgumentException, BestPpnException { return getResultWsSudocDto(type, id, provider, urlPrintId2Ppn); } - private ResultWsSudocDto getResultWsSudocDto(String type, String id, @Nullable String provider, String url) throws RestClientException, IllegalArgumentException{ + private ResultWsSudocDto getResultWsSudocDto(String type, String id, @Nullable String provider, String url) throws RestClientException, IllegalArgumentException, BestPpnException{ ResultWsSudocDto result = new ResultWsSudocDto(); try { result = mapper.readValue((provider != null && !provider.equals("")) ? getRestCall(url, type, id, provider) : getRestCall(url, type, id), ResultWsSudocDto.class); @@ -102,7 +103,7 @@ private ResultWsSudocDto getResultWsSudocDto(String type, String id, @Nullable S throw ex; } catch (IllegalArgumentException ex) { if( ex.getMessage().equals("argument \"content\" is null")) { - log.error("Aucuns ppn correspondant à l'"+ id); + throw new BestPpnException("Aucuns ppn correspondant à l'"+ id); } else { throw ex; } diff --git a/src/test/java/fr/abes/bestppn/service/BestPpnServiceTest.java b/src/test/java/fr/abes/bestppn/service/BestPpnServiceTest.java index dc3bd68..1fc7e7e 100644 --- a/src/test/java/fr/abes/bestppn/service/BestPpnServiceTest.java +++ b/src/test/java/fr/abes/bestppn/service/BestPpnServiceTest.java @@ -298,7 +298,7 @@ void getBestPpnTest03() throws IllegalPpnException, IOException, BestPpnExceptio @Test @DisplayName("Test throw BestPpnException same score") - void getBestPpnTest04() throws IOException, IllegalPpnException, URISyntaxException { + void getBestPpnTest04() throws IOException, IllegalPpnException, URISyntaxException, BestPpnException { String provider = ""; // Create PpnWithTypeDto for elec PpnWithTypeDto ppnWithType1 = new PpnWithTypeDto(); @@ -347,7 +347,7 @@ void getBestPpnTest04() throws IOException, IllegalPpnException, URISyntaxExcept @Test @DisplayName("Test throw BestPpnException with 2 printFromPrint & 2 printFromDat") - void getBestPpnTest05() throws IllegalPpnException, IOException { + void getBestPpnTest05() throws IllegalPpnException, IOException, BestPpnException { String provider = "urlProvider"; // Create PpnWithTypeDto for Online PpnWithTypeDto ppnWithType1 = new PpnWithTypeDto(); From fe9b67ee6063ef3640a1bc6fa23fda4ab99f57ef Mon Sep 17 00:00:00 2001 From: EryneKL <97091460+EryneKL@users.noreply.github.com> Date: Mon, 9 Oct 2023 10:25:02 +0200 Subject: [PATCH 10/23] =?UTF-8?q?FEAT=20:=20CDE-195-creer-consumer-pour-ge?= =?UTF-8?q?nerer-les-2-fichiers-de-log=20=20=20=20=20=20-=20suppression=20?= =?UTF-8?q?d'une=20lev=C3=A9e=20d'erreur=20et=20ajout=20de=20log.info?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java | 7 ++++--- src/main/java/fr/abes/bestppn/service/WsService.java | 3 +++ 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index 8f9d6b6..f20910b 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -105,9 +105,10 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { if( !isOnError ) { ProviderPackage provider = handlerProvider(providerOpt, filename, providerName); - producer.sendKbart(kbartToSend, provider, filename); - producer.sendPrintNotice(ppnToCreate, provider, filename); - producer.sendPpnExNihilo(ppnFromKbartToCreate, provider, filename); + // TODO décommenter les trois lignes ci-dessous avant de push +// producer.sendKbart(kbartToSend, provider, filename); +// producer.sendPrintNotice(ppnToCreate, provider, filename); +// producer.sendPpnExNihilo(ppnFromKbartToCreate, provider, filename); } else { isOnError = false; } diff --git a/src/main/java/fr/abes/bestppn/service/WsService.java b/src/main/java/fr/abes/bestppn/service/WsService.java index d52b904..1929616 100644 --- a/src/main/java/fr/abes/bestppn/service/WsService.java +++ b/src/main/java/fr/abes/bestppn/service/WsService.java @@ -103,6 +103,9 @@ private ResultWsSudocDto getResultWsSudocDto(String type, String id, @Nullable S throw ex; } catch (IllegalArgumentException ex) { if( ex.getMessage().equals("argument \"content\" is null")) { + // ATTENTION si le message d'erreur ci-dessous est modifié, il faut mettre à jour la condition de filtrage des messages d'erreurs dans logskbart-api (LogsListener.java) + // TODO ajouter ici un log.info et supprimer la levée de BestPpnException + supprimer dans TopicConsumer.java + maj dans logskbart le filtrage sur la condition "Aucuns ppn correspondant" + // TODO vérifier sur kbar2kafka que les log.error vont bien dans le topic errorkbart2kafka throw new BestPpnException("Aucuns ppn correspondant à l'"+ id); } else { throw ex; From 95c80a918bf8eb0c5cd2b5d9abe9a8ade4531b69 Mon Sep 17 00:00:00 2001 From: EryneKL <97091460+EryneKL@users.noreply.github.com> Date: Tue, 10 Oct 2023 08:45:21 +0200 Subject: [PATCH 11/23] =?UTF-8?q?FEAT=20:=20CDE-195-creer-consumer-pour-ge?= =?UTF-8?q?nerer-les-2-fichiers-de-log=20=20=20=20=20=20-=20suppression=20?= =?UTF-8?q?du=20header=20kafka=20non=20pris=20en=20compte=20dans=20log4j2-?= =?UTF-8?q?all.xml=20=20=20=20=20=20-=20d=C3=A9placement=20du=20param?= =?UTF-8?q?=C3=A8tre=20injectKafka=20=20=20=20=20=20-=20correction=20du=20?= =?UTF-8?q?bug=20de=20non=20prise=20en=20compte=20du=20num=C3=A9ro=20de=20?= =?UTF-8?q?ligne=20inject=C3=A9=20dans=20le=20ThreadContext=20=20=20=20=20?= =?UTF-8?q?=20-=20isolation=20de=20la=20remont=C3=A9e=20d'exception=20Best?= =?UTF-8?q?PpnException=20dans=20TopicConsumer.java?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/main/resources/log4j2-all.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/resources/log4j2-all.xml b/src/main/resources/log4j2-all.xml index 082bd0c..bab9116 100644 --- a/src/main/resources/log4j2-all.xml +++ b/src/main/resources/log4j2-all.xml @@ -17,7 +17,7 @@ - + ${kafkaServer} From 002557b09f95b3830f11f2c2f32522f37a23cd3d Mon Sep 17 00:00:00 2001 From: EryneKL <97091460+EryneKL@users.noreply.github.com> Date: Tue, 10 Oct 2023 08:47:45 +0200 Subject: [PATCH 12/23] =?UTF-8?q?FEAT=20:=20CDE-195-creer-consumer-pour-ge?= =?UTF-8?q?nerer-les-2-fichiers-de-log=20=20=20=20=20=20-=20suppression=20?= =?UTF-8?q?du=20header=20kafka=20non=20pris=20en=20compte=20dans=20log4j2-?= =?UTF-8?q?all.xml=20=20=20=20=20=20-=20d=C3=A9placement=20du=20param?= =?UTF-8?q?=C3=A8tre=20injectKafka=20=20=20=20=20=20-=20correction=20du=20?= =?UTF-8?q?bug=20de=20non=20prise=20en=20compte=20du=20num=C3=A9ro=20de=20?= =?UTF-8?q?ligne=20inject=C3=A9=20dans=20le=20ThreadContext=20=20=20=20=20?= =?UTF-8?q?=20-=20isolation=20de=20la=20remont=C3=A9e=20d'exception=20Best?= =?UTF-8?q?PpnException=20dans=20TopicConsumer.java?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../fr/abes/bestppn/kafka/TopicConsumer.java | 24 ++++++++++++------- .../fr/abes/bestppn/service/WsService.java | 11 ++++----- 2 files changed, 20 insertions(+), 15 deletions(-) diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index f20910b..9258b56 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -66,6 +66,8 @@ public class TopicConsumer { private boolean isOnError = false; + boolean injectKafka = false; + private int nbBestPpnFind = 0; private int linesWithInputDataErrors = 0; @@ -77,12 +79,11 @@ public class TopicConsumer { * @param lignesKbart message kafka récupéré par le Consumer Kafka */ @KafkaListener(topics = {"${topic.name.source.kbart}"}, groupId = "${topic.groupid.source.kbart}", containerFactory = "kafkaKbartListenerContainerFactory") - public void listenKbartFromKafka(ConsumerRecord lignesKbart) { + public void listenKbartFromKafka(ConsumerRecord lignesKbart) throws BestPpnException { try { String filename = ""; String currentLine = ""; String totalLine = ""; - boolean injectKafka = false; for (Header header : lignesKbart.headers().toArray()) { if (header.key().equals("FileName")) { filename = new String(header.value()); @@ -95,7 +96,7 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { totalLine = new String(header.value()); } } - ThreadContext.put("package", filename + "[line : " + currentLine + "]"); // Ajoute le numéro de ligne courante au contexte log4j2 pour inscription dans le header kafka + ThreadContext.put("package", (filename + "[line : " + currentLine + "]")); // Ajoute le numéro de ligne courante au contexte log4j2 pour inscription dans le header kafka String nbLine = currentLine + "/" + totalLine; String providerName = Utils.extractProvider(filename); @@ -105,10 +106,9 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { if( !isOnError ) { ProviderPackage provider = handlerProvider(providerOpt, filename, providerName); - // TODO décommenter les trois lignes ci-dessous avant de push -// producer.sendKbart(kbartToSend, provider, filename); -// producer.sendPrintNotice(ppnToCreate, provider, filename); -// producer.sendPpnExNihilo(ppnFromKbartToCreate, provider, filename); + producer.sendKbart(kbartToSend, provider, filename); + producer.sendPrintNotice(ppnToCreate, provider, filename); + producer.sendPpnExNihilo(ppnFromKbartToCreate, provider, filename); } else { isOnError = false; } @@ -157,11 +157,19 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { log.error(e.getMessage()); addLineToMailAttachementWithErrorMessage(e.getMessage()); linesWithInputDataErrors++; - } catch (IllegalPpnException | BestPpnException e) { + } catch (IllegalPpnException e) { + isOnError = true; + log.error(e.getMessage()); + addLineToMailAttachementWithErrorMessage(e.getMessage()); + linesWithErrorsInBestPPNSearch++; + } catch (BestPpnException e) { isOnError = true; log.error(e.getMessage()); addLineToMailAttachementWithErrorMessage(e.getMessage()); linesWithErrorsInBestPPNSearch++; + if (!injectKafka) { + throw new BestPpnException (e.getMessage()); + } } catch (MessagingException e) { log.error(e.getMessage()); throw new RuntimeException(e); diff --git a/src/main/java/fr/abes/bestppn/service/WsService.java b/src/main/java/fr/abes/bestppn/service/WsService.java index 1929616..59f3a3d 100644 --- a/src/main/java/fr/abes/bestppn/service/WsService.java +++ b/src/main/java/fr/abes/bestppn/service/WsService.java @@ -85,16 +85,16 @@ public String getCall(String url, Map params) throws RestClientE } @ExecutionTime - public ResultWsSudocDto callOnlineId2Ppn(String type, String id, @Nullable String provider) throws RestClientException, IllegalArgumentException, BestPpnException { + public ResultWsSudocDto callOnlineId2Ppn(String type, String id, @Nullable String provider) throws RestClientException, IllegalArgumentException { return getResultWsSudocDto(type, id, provider, urlOnlineId2Ppn); } @ExecutionTime - public ResultWsSudocDto callPrintId2Ppn(String type, String id, @Nullable String provider) throws RestClientException, IllegalArgumentException, BestPpnException { + public ResultWsSudocDto callPrintId2Ppn(String type, String id, @Nullable String provider) throws RestClientException, IllegalArgumentException { return getResultWsSudocDto(type, id, provider, urlPrintId2Ppn); } - private ResultWsSudocDto getResultWsSudocDto(String type, String id, @Nullable String provider, String url) throws RestClientException, IllegalArgumentException, BestPpnException{ + private ResultWsSudocDto getResultWsSudocDto(String type, String id, @Nullable String provider, String url) throws RestClientException, IllegalArgumentException{ ResultWsSudocDto result = new ResultWsSudocDto(); try { result = mapper.readValue((provider != null && !provider.equals("")) ? getRestCall(url, type, id, provider) : getRestCall(url, type, id), ResultWsSudocDto.class); @@ -103,10 +103,7 @@ private ResultWsSudocDto getResultWsSudocDto(String type, String id, @Nullable S throw ex; } catch (IllegalArgumentException ex) { if( ex.getMessage().equals("argument \"content\" is null")) { - // ATTENTION si le message d'erreur ci-dessous est modifié, il faut mettre à jour la condition de filtrage des messages d'erreurs dans logskbart-api (LogsListener.java) - // TODO ajouter ici un log.info et supprimer la levée de BestPpnException + supprimer dans TopicConsumer.java + maj dans logskbart le filtrage sur la condition "Aucuns ppn correspondant" - // TODO vérifier sur kbar2kafka que les log.error vont bien dans le topic errorkbart2kafka - throw new BestPpnException("Aucuns ppn correspondant à l'"+ id); + log.info("Aucuns ppn correspondant à l'"+ id); } else { throw ex; } From 598981feb6a9b16e2be51e9137215bbbc6ebcf32 Mon Sep 17 00:00:00 2001 From: EryneKL <97091460+EryneKL@users.noreply.github.com> Date: Tue, 10 Oct 2023 16:12:39 +0200 Subject: [PATCH 13/23] =?UTF-8?q?FEAT=20:=20CDE-195-creer-consumer-pour-ge?= =?UTF-8?q?nerer-les-2-fichiers-de-log=20=20=20=20=20=20-=20refactor=20d'u?= =?UTF-8?q?n=20message=20d'erreur=20et=20suppression=20d'un=20log=20inutil?= =?UTF-8?q?e=20dans=20BestPpnService.java=20=20=20=20=20=20-=20correction?= =?UTF-8?q?=20d'un=20contr=C3=B4le=20erron=C3=A9=20dans=20LogsFileService.?= =?UTF-8?q?java=20=20=20=20=20=20-=20correction=20des=20erreurs=20de=20dou?= =?UTF-8?q?blon=20de=20lev=C3=A9e=20d'exception=20=20=20=20=20=20-=20simpl?= =?UTF-8?q?ification=20du=20code=20et=20appel=20de=20m=C3=A9thode=20dans?= =?UTF-8?q?=20le=20catch=20=20=20=20=20=20-=20refactor=20de=20la=20m=C3=A9?= =?UTF-8?q?thode=20sendEndOfTraitmentReport?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../fr/abes/bestppn/kafka/TopicConsumer.java | 34 +++++++++---------- .../fr/abes/bestppn/kafka/TopicProducer.java | 18 ++++------ .../abes/bestppn/service/BestPpnService.java | 10 +++--- .../abes/bestppn/service/LogFileService.java | 3 +- 4 files changed, 29 insertions(+), 36 deletions(-) diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index 9258b56..0c37a3a 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -29,6 +29,7 @@ import java.io.IOException; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Optional; import java.util.concurrent.ExecutionException; @@ -74,26 +75,33 @@ public class TopicConsumer { private int linesWithErrorsInBestPPNSearch = 0; + private List
headerList = new ArrayList<>(); + + private String filename = ""; + + private String totalLine = ""; + /** * Listener Kafka qui écoute un topic et récupère les messages dès qu'ils y arrivent. * @param lignesKbart message kafka récupéré par le Consumer Kafka */ @KafkaListener(topics = {"${topic.name.source.kbart}"}, groupId = "${topic.groupid.source.kbart}", containerFactory = "kafkaKbartListenerContainerFactory") - public void listenKbartFromKafka(ConsumerRecord lignesKbart) throws BestPpnException { + public void listenKbartFromKafka(ConsumerRecord lignesKbart) throws ExecutionException, InterruptedException, IOException { try { - String filename = ""; String currentLine = ""; - String totalLine = ""; for (Header header : lignesKbart.headers().toArray()) { if (header.key().equals("FileName")) { filename = new String(header.value()); + headerList.add(header); if (filename.contains("_FORCE")) { injectKafka = true; } } else if (header.key().equals("CurrentLine")) { currentLine = new String(header.value()); + headerList.add(header); } else if (header.key().equals("TotalLine")) { totalLine = new String(header.value()); + headerList.add(header); } } ThreadContext.put("package", (filename + "[line : " + currentLine + "]")); // Ajoute le numéro de ligne courante au contexte log4j2 pour inscription dans le header kafka @@ -115,7 +123,7 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) thr log.info("Nombre de best ppn trouvé : " + this.nbBestPpnFind + "/" + totalLine); this.nbBestPpnFind = 0; serviceMail.sendMailWithAttachment(filename, mailAttachment); - producer.sendEndOfTraitmentReport(lignesKbart.headers()); // Appel le producer pour l'envoi du message de fin de traitement. + producer.sendEndOfTraitmentReport(headerList); // Appel le producer pour l'envoi du message de fin de traitement. logFileService.createExecutionReport(filename, Integer.parseInt(totalLine), Integer.parseInt(totalLine) - this.linesWithInputDataErrors - this.linesWithErrorsInBestPPNSearch, this.linesWithInputDataErrors, this.linesWithErrorsInBestPPNSearch); kbartToSend.clear(); ppnToCreate.clear(); @@ -157,24 +165,16 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) thr log.error(e.getMessage()); addLineToMailAttachementWithErrorMessage(e.getMessage()); linesWithInputDataErrors++; - } catch (IllegalPpnException e) { - isOnError = true; - log.error(e.getMessage()); - addLineToMailAttachementWithErrorMessage(e.getMessage()); - linesWithErrorsInBestPPNSearch++; - } catch (BestPpnException e) { + } catch (IllegalPpnException | BestPpnException e) { isOnError = true; log.error(e.getMessage()); addLineToMailAttachementWithErrorMessage(e.getMessage()); linesWithErrorsInBestPPNSearch++; - if (!injectKafka) { - throw new BestPpnException (e.getMessage()); - } - } catch (MessagingException e) { + } catch (MessagingException | ExecutionException | InterruptedException | RuntimeException e) { log.error(e.getMessage()); - throw new RuntimeException(e); - } catch (ExecutionException | InterruptedException e) { - throw new RuntimeException(e); + producer.sendEndOfTraitmentReport(headerList); + logFileService.createExecutionReport(filename, Integer.parseInt(totalLine), Integer.parseInt(totalLine) - this.linesWithInputDataErrors - this.linesWithErrorsInBestPPNSearch, this.linesWithInputDataErrors, this.linesWithErrorsInBestPPNSearch); + // TODO Ajouter une nouvelle donnée à la création du rapport de log de bestPpn-api : erreur bloquante sur l'application bestPpn, processus interrompu } } diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java index 6cd53ac..12d9dc8 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java @@ -13,7 +13,6 @@ import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.header.Header; -import org.apache.kafka.common.header.Headers; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.kafka.core.KafkaTemplate; @@ -55,7 +54,7 @@ public class TopicProducer { private KafkaProducer kafkaProducer; @Transactional(transactionManager = "kafkaTransactionManager", rollbackFor = {BestPpnException.class, JsonProcessingException.class}) - public void sendKbart(List kbart, ProviderPackage provider, String filename) throws JsonProcessingException, BestPpnException { + public void sendKbart(List kbart, ProviderPackage provider, String filename) throws JsonProcessingException, BestPpnException, ExecutionException, InterruptedException { int numLigneCourante = 0; for (LigneKbartDto ligne : kbart) { numLigneCourante++; @@ -72,7 +71,7 @@ public void sendKbart(List kbart, ProviderPackage provider, Strin } @Transactional(transactionManager = "kafkaTransactionManager") - public void sendPrintNotice(List ppnKbartProviderDtoList, ProviderPackage provider, String filename) throws JsonProcessingException { + public void sendPrintNotice(List ppnKbartProviderDtoList, ProviderPackage provider, String filename) throws JsonProcessingException, ExecutionException, InterruptedException { for (PpnKbartProviderDto ppnToCreate : ppnKbartProviderDtoList) { ppnToCreate.getKbart().setProviderPackagePackage(provider.getProviderPackageId().getPackageName()); ppnToCreate.getKbart().setProviderPackageDateP(provider.getProviderPackageId().getDateP()); @@ -86,7 +85,7 @@ public void sendPrintNotice(List ppnKbartProviderDtoList, P } @Transactional(transactionManager = "kafkaTransactionManager") - public void sendPpnExNihilo(List ppnFromKbartToCreate, ProviderPackage provider, String filename) throws JsonProcessingException { + public void sendPpnExNihilo(List ppnFromKbartToCreate, ProviderPackage provider, String filename) throws JsonProcessingException, ExecutionException, InterruptedException { for (LigneKbartDto ligne : ppnFromKbartToCreate) { ligne.setProviderPackagePackage(provider.getProviderPackageId().getPackageName()); ligne.setProviderPackageDateP(provider.getProviderPackageId().getDateP()); @@ -123,23 +122,18 @@ private SendResult sendObject(LigneKbartDto ligneKbartDto, String topic, List headerList = new ArrayList<>(); - for (Header header : headers.toArray()) { - headerList.add(constructHeader(header.key(), header.value())); - } + public void sendEndOfTraitmentReport(List
headerList) throws ExecutionException, InterruptedException { ProducerRecord record = new ProducerRecord<>(topicEndOfTraitment, null, "", "OK", headerList); kafkaProducer.send(record); - log.info("End of traitment report send."); + log.info("End of traitment report sent."); } } diff --git a/src/main/java/fr/abes/bestppn/service/BestPpnService.java b/src/main/java/fr/abes/bestppn/service/BestPpnService.java index 169f903..78215c8 100644 --- a/src/main/java/fr/abes/bestppn/service/BestPpnService.java +++ b/src/main/java/fr/abes/bestppn/service/BestPpnService.java @@ -85,12 +85,12 @@ public PpnWithDestinationDto getBestPpn(LigneKbartDto kbart, String provider, bo return getBestPpnByScore(kbart, ppnElecScoredList, ppnPrintResultList, injectKafka); } - private void feedPpnListFromOnline(LigneKbartDto kbart, String provider, Map ppnElecScoredList, Set ppnPrintResultList) throws IOException, IllegalPpnException, URISyntaxException, RestClientException, IllegalArgumentException, BestPpnException { + private void feedPpnListFromOnline(LigneKbartDto kbart, String provider, Map ppnElecScoredList, Set ppnPrintResultList) throws IOException, IllegalPpnException, URISyntaxException, RestClientException, IllegalArgumentException { log.debug("Entrée dans onlineId2Ppn"); setScoreToEveryPpnFromResultWS(service.callOnlineId2Ppn(kbart.getPublicationType(), kbart.getOnlineIdentifier(), provider), kbart.getTitleUrl(), this.scoreOnlineId2PpnElect, ppnElecScoredList, ppnPrintResultList); } - private void feedPpnListFromPrint(LigneKbartDto kbart, String provider, Map ppnElecScoredList, Set ppnPrintResultList) throws IOException, IllegalPpnException, URISyntaxException, RestClientException, IllegalArgumentException, BestPpnException { + private void feedPpnListFromPrint(LigneKbartDto kbart, String provider, Map ppnElecScoredList, Set ppnPrintResultList) throws IOException, IllegalPpnException, URISyntaxException, RestClientException, IllegalArgumentException { log.debug("Entrée dans printId2Ppn"); ResultWsSudocDto resultCallWs = service.callPrintId2Ppn(kbart.getPublicationType(), kbart.getPrintIdentifier(), provider); ResultWsSudocDto resultWithTypeElectronique = resultCallWs.getPpnWithTypeElectronique(); @@ -181,12 +181,13 @@ yield switch (ppnPrintResultList.size()) { } default -> { - kbart.setErrorType("Plusieurs ppn imprimés (" + String.join(", ", ppnPrintResultList) + ") ont été trouvés."); + String errorString = "Plusieurs ppn imprimés (" + String.join(", ", ppnPrintResultList) + ") ont été trouvés."; + kbart.setErrorType(errorString); // vérification du forçage if (injectKafka) { yield new PpnWithDestinationDto("",DESTINATION_TOPIC.BEST_PPN_BACON); } else { - throw new BestPpnException("Plusieurs ppn imprimés (" + String.join(", ", ppnPrintResultList) + ") ont été trouvés."); + throw new BestPpnException(errorString); } } }; @@ -201,7 +202,6 @@ yield switch (ppnPrintResultList.size()) { if (injectKafka) { yield new PpnWithDestinationDto("", DESTINATION_TOPIC.BEST_PPN_BACON); } else { - log.error(errorString); throw new BestPpnException(errorString); } } diff --git a/src/main/java/fr/abes/bestppn/service/LogFileService.java b/src/main/java/fr/abes/bestppn/service/LogFileService.java index 3e97625..94b6d07 100644 --- a/src/main/java/fr/abes/bestppn/service/LogFileService.java +++ b/src/main/java/fr/abes/bestppn/service/LogFileService.java @@ -46,8 +46,7 @@ public void createExecutionReport(String fileName, int totalLines, int linesOk, fh.close(); // Copie le fichier existant vers le répertoire temporaire en ajoutant sa date de création - assert source != null; - if (Files.exists(source)) { + if (source != null && Files.exists(source)) { LocalDateTime time = LocalDateTime.now(); DateTimeFormatter format = DateTimeFormatter.ofPattern("yyyy-MM-dd-HH-mm-ss", Locale.FRANCE); String date = format.format(time); From 52858dd4b128e5bab9f3295a4cdb170bb9010ad7 Mon Sep 17 00:00:00 2001 From: EryneKL <97091460+EryneKL@users.noreply.github.com> Date: Wed, 11 Oct 2023 20:42:29 +0200 Subject: [PATCH 14/23] =?UTF-8?q?FEAT=20:=20CDE-195-creer-consumer-pour-ge?= =?UTF-8?q?nerer-les-2-fichiers-de-log=20=20=20=20=20=20-=20ajout=20d'une?= =?UTF-8?q?=20donn=C3=A9e=20dans=20le=20rapport=20de=20log?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java | 5 ++--- .../java/fr/abes/bestppn/service/LogFileService.java | 9 ++++++--- src/main/resources/application.properties | 2 +- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index 0c37a3a..4b9ee32 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -124,7 +124,7 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) thr this.nbBestPpnFind = 0; serviceMail.sendMailWithAttachment(filename, mailAttachment); producer.sendEndOfTraitmentReport(headerList); // Appel le producer pour l'envoi du message de fin de traitement. - logFileService.createExecutionReport(filename, Integer.parseInt(totalLine), Integer.parseInt(totalLine) - this.linesWithInputDataErrors - this.linesWithErrorsInBestPPNSearch, this.linesWithInputDataErrors, this.linesWithErrorsInBestPPNSearch); + logFileService.createExecutionReport(filename, Integer.parseInt(totalLine), Integer.parseInt(totalLine) - this.linesWithInputDataErrors - this.linesWithErrorsInBestPPNSearch, this.linesWithInputDataErrors, this.linesWithErrorsInBestPPNSearch, injectKafka); kbartToSend.clear(); ppnToCreate.clear(); ppnFromKbartToCreate.clear(); @@ -173,8 +173,7 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) thr } catch (MessagingException | ExecutionException | InterruptedException | RuntimeException e) { log.error(e.getMessage()); producer.sendEndOfTraitmentReport(headerList); - logFileService.createExecutionReport(filename, Integer.parseInt(totalLine), Integer.parseInt(totalLine) - this.linesWithInputDataErrors - this.linesWithErrorsInBestPPNSearch, this.linesWithInputDataErrors, this.linesWithErrorsInBestPPNSearch); - // TODO Ajouter une nouvelle donnée à la création du rapport de log de bestPpn-api : erreur bloquante sur l'application bestPpn, processus interrompu + logFileService.createExecutionReport(filename, Integer.parseInt(totalLine), Integer.parseInt(totalLine) - this.linesWithInputDataErrors - this.linesWithErrorsInBestPPNSearch, this.linesWithInputDataErrors, this.linesWithErrorsInBestPPNSearch, injectKafka); } } diff --git a/src/main/java/fr/abes/bestppn/service/LogFileService.java b/src/main/java/fr/abes/bestppn/service/LogFileService.java index 94b6d07..49e28c2 100644 --- a/src/main/java/fr/abes/bestppn/service/LogFileService.java +++ b/src/main/java/fr/abes/bestppn/service/LogFileService.java @@ -29,7 +29,7 @@ public class LogFileService { * @param linesWithErrorsInBestPPNSearch le nombre total de lignes contenant des erreurs lors de la recherche du bestPpn * @throws IOException exception levée */ - public void createExecutionReport(String fileName, int totalLines, int linesOk, int linesWithInputDataErrors, int linesWithErrorsInBestPPNSearch) throws IOException { + public void createExecutionReport(String fileName, int totalLines, int linesOk, int linesWithInputDataErrors, int linesWithErrorsInBestPPNSearch, boolean injectKafka) throws IOException { try { // Création du fichier de log Logger logger = Logger.getLogger("ExecutionReport"); @@ -40,7 +40,11 @@ public void createExecutionReport(String fileName, int totalLines, int linesOk, SimpleFormatter formatter = new SimpleFormatter(); fh.setFormatter(formatter); logger.setUseParentHandlers(false); // désactive l'affichage du log dans le terminal - logger.info("TOTAL LINES : " + totalLines + " / LINES OK : " + linesOk + " / LINES WITH INPUT DATA ERRORS : " + linesWithInputDataErrors + " / LINES WITH ERRORS IN BESTPPN SEARCH : " + linesWithErrorsInBestPPNSearch); + logger.info("TOTAL LINES : " + totalLines + System.lineSeparator() + + "LINES OK : " + linesOk + System.lineSeparator() + + "LINES WITH INPUT DATA ERRORS : " + linesWithInputDataErrors + System.lineSeparator() + + "LINES WITH ERRORS IN BESTPPN SEARCH : " + linesWithErrorsInBestPPNSearch + System.lineSeparator() + + "FORCE_OPTION : " + injectKafka + System.lineSeparator()); // Fermeture du fichier de log fh.close(); @@ -67,5 +71,4 @@ public void createExecutionReport(String fileName, int totalLines, int linesOk, e.printStackTrace(); } } - } diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 02880f2..9df4eb6 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -25,7 +25,7 @@ auto.create.topics.enable=true spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer spring.kafka.producer.transaction-id-prefix=tx- -spring.kafka.consumer.properties.isolation.level= read_committed +spring.kafka.consumer.properties.isolation.level=read_committed # Topic Kafka topic.name.target.kbart=bacon.kbart.withppn.toload From 70906164871f658d941dbbf32b2174b0c51c2da0 Mon Sep 17 00:00:00 2001 From: EryneKL <97091460+EryneKL@users.noreply.github.com> Date: Wed, 11 Oct 2023 21:16:06 +0200 Subject: [PATCH 15/23] FEAT : CDE-195-creer-consumer-pour-generer-les-2-fichiers-de-log - merge branche develop --- .../java/fr/abes/bestppn/kafka/TopicConsumer.java | 6 ++---- .../java/fr/abes/bestppn/kafka/TopicProducer.java | 12 ++++-------- 2 files changed, 6 insertions(+), 12 deletions(-) diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index 72ea87e..ff952e2 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -4,7 +4,6 @@ import fr.abes.LigneKbartImprime; import fr.abes.bestppn.dto.PackageKbartDto; import fr.abes.bestppn.dto.kafka.LigneKbartDto; -import fr.abes.bestppn.dto.kafka.PpnKbartProviderDto; import fr.abes.bestppn.dto.kafka.PpnWithDestinationDto; import fr.abes.bestppn.entity.bacon.Provider; import fr.abes.bestppn.entity.bacon.ProviderPackage; @@ -30,7 +29,6 @@ import java.io.IOException; import java.net.URISyntaxException; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import java.util.Optional; import java.util.concurrent.ExecutionException; @@ -66,6 +64,8 @@ public class TopicConsumer { private final ProviderRepository providerRepository; + private final List
headerList = new ArrayList<>(); + private boolean isOnError = false; boolean injectKafka = false; @@ -76,8 +76,6 @@ public class TopicConsumer { private int linesWithErrorsInBestPPNSearch = 0; - private List
headerList = new ArrayList<>(); - private String filename = ""; private String totalLine = ""; diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java index 7d950ef..63eac01 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java @@ -45,22 +45,18 @@ public class TopicProducer { @Value("${topic.name.target.ppnFromKbart}") private String topicKbartPpnToCreate; - private KafkaTemplate kafkaTemplate; - private KafkaTemplate kafkaTemplateConnect; private KafkaTemplate kafkaTemplateImprime; - private UtilsMapper utilsMapper; - - @Autowired private KafkaProducer kafkaProducer; + private UtilsMapper utilsMapper; + @Autowired - public TopicProducer(KafkaTemplate kafkaTemplateConnect, KafkaTemplate kafkaTemplateImprime, KafkaTemplate kafkaTemplate, KafkaProducer kafkaProducer, UtilsMapper utilsMapper) { + public TopicProducer(KafkaTemplate kafkaTemplateConnect, KafkaTemplate kafkaTemplateImprime, KafkaProducer kafkaProducer, UtilsMapper utilsMapper) { this.kafkaTemplateConnect = kafkaTemplateConnect; this.kafkaTemplateImprime = kafkaTemplateImprime; - this.kafkaTemplate = kafkaTemplate; this.kafkaProducer = kafkaProducer; this.utilsMapper = utilsMapper; } @@ -69,7 +65,7 @@ public TopicProducer(KafkaTemplate kafkaTemplateConne * Méthode d'envoi d'une ligne kbart vers topic kafka pour chargement * * @param kbart : ligne kbart à envoyer - * @param provider + * @param provider : provider * @param filename : nom du fichier du traitement en cours */ @Transactional(transactionManager = "kafkaTransactionManager", rollbackFor = {BestPpnException.class, JsonProcessingException.class}) From 35795db93176b7e84b6ca7ae6d181af17d958d63 Mon Sep 17 00:00:00 2001 From: EryneKL <97091460+EryneKL@users.noreply.github.com> Date: Fri, 13 Oct 2023 08:39:16 +0200 Subject: [PATCH 16/23] FEAT : CDE-195-creer-consumer-pour-generer-les-2-fichiers-de-log - ajout d'une ligne de log pour les erreurs sur le calcul du bestppn - ajout d'un try catch sur un producer --- .../java/fr/abes/bestppn/kafka/TopicConsumer.java | 2 +- .../java/fr/abes/bestppn/kafka/TopicProducer.java | 15 ++++++++++----- .../fr/abes/bestppn/service/BestPpnService.java | 2 ++ 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index ff952e2..0a54ed1 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -85,7 +85,7 @@ public class TopicConsumer { * @param lignesKbart message kafka récupéré par le Consumer Kafka */ @KafkaListener(topics = {"${topic.name.source.kbart}"}, groupId = "${topic.groupid.source.kbart}", containerFactory = "kafkaKbartListenerContainerFactory") - public void listenKbartFromKafka(ConsumerRecord lignesKbart) throws ExecutionException, InterruptedException, IOException { + public void listenKbartFromKafka(ConsumerRecord lignesKbart) throws Exception { try { String currentLine = ""; for (Header header : lignesKbart.headers().toArray()) { diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java index 63eac01..bd47d1d 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java @@ -49,7 +49,7 @@ public class TopicProducer { private KafkaTemplate kafkaTemplateImprime; - private KafkaProducer kafkaProducer; + private KafkaProducer kafkaProducerOk; private UtilsMapper utilsMapper; @@ -57,7 +57,7 @@ public class TopicProducer { public TopicProducer(KafkaTemplate kafkaTemplateConnect, KafkaTemplate kafkaTemplateImprime, KafkaProducer kafkaProducer, UtilsMapper utilsMapper) { this.kafkaTemplateConnect = kafkaTemplateConnect; this.kafkaTemplateImprime = kafkaTemplateImprime; - this.kafkaProducer = kafkaProducer; + this.kafkaProducerOk = kafkaProducer; this.utilsMapper = utilsMapper; } @@ -185,8 +185,13 @@ public byte[] value() { */ @Transactional(transactionManager = "kafkaTransactionManager") public void sendEndOfTraitmentReport(List
headerList) throws ExecutionException, InterruptedException { - ProducerRecord record = new ProducerRecord<>(topicEndOfTraitment, null, "", "OK", headerList); - kafkaProducer.send(record); - log.info("End of traitment report sent."); + try { + ProducerRecord record = new ProducerRecord<>(topicEndOfTraitment, null, "", "OK", headerList); + kafkaProducerOk.send(record); + log.info("End of traitment report sent."); + } catch (Exception e) { + String message = "Error sending message to topic " + topicEndOfTraitment; + throw new RuntimeException(message, e); + } } } diff --git a/src/main/java/fr/abes/bestppn/service/BestPpnService.java b/src/main/java/fr/abes/bestppn/service/BestPpnService.java index 65fef6e..d3080b2 100644 --- a/src/main/java/fr/abes/bestppn/service/BestPpnService.java +++ b/src/main/java/fr/abes/bestppn/service/BestPpnService.java @@ -185,6 +185,7 @@ yield switch (ppnPrintResultList.size()) { kbart.setErrorType(errorString); // vérification du forçage if (injectKafka) { + log.error(errorString); yield new PpnWithDestinationDto("",DESTINATION_TOPIC.BEST_PPN_BACON); } else { throw new BestPpnException(errorString); @@ -200,6 +201,7 @@ yield switch (ppnPrintResultList.size()) { kbart.setErrorType(errorString); // vérification du forçage if (injectKafka) { + log.error(errorString); yield new PpnWithDestinationDto("", DESTINATION_TOPIC.BEST_PPN_BACON); } else { throw new BestPpnException(errorString); From a710dba3e637e496b0052c555e9f93dba8ed828a Mon Sep 17 00:00:00 2001 From: SamuelQuetin Date: Mon, 23 Oct 2023 14:30:31 +0200 Subject: [PATCH 17/23] CDE157 Check si package existe --- .../bestppn/entity/bacon/ProviderPackage.java | 10 +++++++++ .../fr/abes/bestppn/kafka/TopicConsumer.java | 22 +++++++++++++------ .../bacon/ProviderPackageRepository.java | 4 ++++ 3 files changed, 29 insertions(+), 7 deletions(-) diff --git a/src/main/java/fr/abes/bestppn/entity/bacon/ProviderPackage.java b/src/main/java/fr/abes/bestppn/entity/bacon/ProviderPackage.java index ec22326..cda7490 100644 --- a/src/main/java/fr/abes/bestppn/entity/bacon/ProviderPackage.java +++ b/src/main/java/fr/abes/bestppn/entity/bacon/ProviderPackage.java @@ -7,6 +7,7 @@ import lombok.Setter; import java.io.Serializable; +import java.util.Date; @Entity @Table(name = "PROVIDER_PACKAGE") @@ -17,9 +18,18 @@ public class ProviderPackage implements Serializable { @EmbeddedId private ProviderPackageId providerPackageId; + @Column(name = "PACKAGE", insertable=false, updatable=false) + private String packageName; + + @Column(name = "DATE_P", insertable=false, updatable=false) + private Date dateP; + @Column(name = "LABEL_ABES") private char labelAbes; + @Column(name = "PROVIDER_IDT_PROVIDER", insertable=false, updatable=false) + private Integer providerIdtProvider; + @ManyToOne @JoinColumn(referencedColumnName = "IDT_PROVIDER", insertable = false, updatable = false) private Provider provider; diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index 999a8a5..5eab1eb 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -4,7 +4,6 @@ import fr.abes.LigneKbartImprime; import fr.abes.bestppn.dto.PackageKbartDto; import fr.abes.bestppn.dto.kafka.LigneKbartDto; -import fr.abes.bestppn.dto.kafka.PpnKbartProviderDto; import fr.abes.bestppn.dto.kafka.PpnWithDestinationDto; import fr.abes.bestppn.entity.bacon.Provider; import fr.abes.bestppn.entity.bacon.ProviderPackage; @@ -94,8 +93,9 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { if (lignesKbart.value().equals("OK")) { if (!isOnError) { ProviderPackage provider = handlerProvider(providerOpt, filename, providerName); - // TODO vérifier s'il est pertinent de retirer le "_FORCE" du paramètre FileName du header avant envoi au producer - // fileName = fileName.contains("_FORCE") ? fileName.replace("_FORCE", "") : fileName; + + + producer.sendKbart(kbartToSend, provider, filename); producer.sendPrintNotice(ppnToCreate, filename); producer.sendPpnExNihilo(ppnFromKbartToCreate, provider, filename); @@ -185,10 +185,18 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { private ProviderPackage handlerProvider(Optional providerOpt, String filename, String providerName) throws IllegalPackageException, IllegalDateException { if (providerOpt.isPresent()) { Provider provider = providerOpt.get(); - ProviderPackageId providerPackageId = new ProviderPackageId(Utils.extractPackageName(filename), Utils.extractDate(filename), provider.getIdtProvider()); - Optional providerPackage = providerPackageRepository.findByProviderPackageId(providerPackageId); - //pas d'info de package, on le crée - return providerPackage.orElseGet(() -> providerPackageRepository.save(new ProviderPackage(providerPackageId, 'N'))); + + Optional providerPackageOpt = providerPackageRepository.findAllByPackageNameAndProviderIdtProviderAndDateP(Utils.extractPackageName(filename),provider.getIdtProvider(),Utils.extractDate(filename)); + if( providerPackageOpt.isPresent()){ + log.info("clear row package"); + + return providerPackageOpt.get(); + } else { + //pas d'info de package, on le crée + ProviderPackageId providerPackageId = new ProviderPackageId(Utils.extractPackageName(filename), Utils.extractDate(filename), provider.getIdtProvider()); + Optional providerPackage = providerPackageRepository.findByProviderPackageId(providerPackageId); + return providerPackage.orElseGet(() -> providerPackageRepository.save(new ProviderPackage(providerPackageId, 'N'))); + } } else { //pas de provider, ni de package, on les crée tous les deux Provider newProvider = new Provider(providerName); diff --git a/src/main/java/fr/abes/bestppn/repository/bacon/ProviderPackageRepository.java b/src/main/java/fr/abes/bestppn/repository/bacon/ProviderPackageRepository.java index 6cf607a..4a16e07 100644 --- a/src/main/java/fr/abes/bestppn/repository/bacon/ProviderPackageRepository.java +++ b/src/main/java/fr/abes/bestppn/repository/bacon/ProviderPackageRepository.java @@ -6,10 +6,14 @@ import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.stereotype.Repository; +import java.util.Date; import java.util.Optional; @Repository @BaconDbConfiguration public interface ProviderPackageRepository extends JpaRepository { Optional findByProviderPackageId(ProviderPackageId providerPackageId); + + Optional findAllByPackageNameAndProviderIdtProviderAndDateP(String packageName, Integer providerIdtProvider, Date date_P); + } From 949ca217a2b6aa31a86581b54382caebb7c17cde Mon Sep 17 00:00:00 2001 From: SamuelQuetin Date: Wed, 25 Oct 2023 15:49:13 +0200 Subject: [PATCH 18/23] Ajout ExecutionReport Ajout Gestion Different Producer avec et sans transaction. --- .../bestppn/configuration/KafkaConfig.java | 49 ++++++++++++++----- .../abes/bestppn/entity/ExecutionReport.java | 37 ++++++++++++++ .../fr/abes/bestppn/kafka/TopicConsumer.java | 28 +++++------ .../fr/abes/bestppn/kafka/TopicProducer.java | 16 +++--- 4 files changed, 93 insertions(+), 37 deletions(-) create mode 100644 src/main/java/fr/abes/bestppn/entity/ExecutionReport.java diff --git a/src/main/java/fr/abes/bestppn/configuration/KafkaConfig.java b/src/main/java/fr/abes/bestppn/configuration/KafkaConfig.java index ab9b269..138bb7b 100644 --- a/src/main/java/fr/abes/bestppn/configuration/KafkaConfig.java +++ b/src/main/java/fr/abes/bestppn/configuration/KafkaConfig.java @@ -5,7 +5,6 @@ import io.confluent.kafka.serializers.KafkaAvroSerializer; import io.confluent.kafka.serializers.KafkaAvroSerializerConfig; import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringSerializer; @@ -60,7 +59,7 @@ public ConsumerFactory consumerKbartFactory() { } @Bean - public Map producerConfigs() { + public Map producerConfigsWithTransaction() { Map props = new HashMap<>(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); @@ -72,26 +71,52 @@ public Map producerConfigs() { } @Bean - public ProducerFactory producerFactory() { - DefaultKafkaProducerFactory factory = new DefaultKafkaProducerFactory<>(producerConfigs()); - factory.setTransactionIdPrefix(transactionIdPrefix); + public Map producerConfigs() { + Map props = new HashMap<>(); + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class); + props.put(KafkaAvroSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, registryUrl); + props.put(KafkaAvroSerializerConfig.AUTO_REGISTER_SCHEMAS, autoRegisterSchema); + return props; + } + + @Bean + public ProducerFactory producerFactoryLigneKbartConnectWithTransaction() { + DefaultKafkaProducerFactory factory = new DefaultKafkaProducerFactory<>(producerConfigsWithTransaction()); + factory.setTransactionIdPrefix(transactionIdPrefix+"connect-"); + return factory; + } + @Bean + public ProducerFactory producerFactoryLigneKbartImprimeWithTransaction() { + DefaultKafkaProducerFactory factory = new DefaultKafkaProducerFactory<>(producerConfigsWithTransaction()); + factory.setTransactionIdPrefix(transactionIdPrefix+"print-"); + return factory; + } + + @Bean + public ProducerFactory producerFactory() { + DefaultKafkaProducerFactory factory = new DefaultKafkaProducerFactory<>(producerConfigs()); return factory; } @Bean - public KafkaTransactionManager kafkaTransactionManager(){ - return new KafkaTransactionManager<>(producerFactory()); + public KafkaTransactionManager kafkaTransactionManagerKbartConnect(){ + return new KafkaTransactionManager<>(producerFactoryLigneKbartConnectWithTransaction()); + } + @Bean + public KafkaTransactionManager kafkaTransactionManagerKbartImprime(){ + return new KafkaTransactionManager<>(producerFactoryLigneKbartImprimeWithTransaction()); } @Bean - public KafkaTemplate kafkaTemplateConnect(final ProducerFactory producerFactory) { return new KafkaTemplate<>(producerFactory);} + public KafkaTemplate kafkaTemplateConnect(final ProducerFactory producerFactoryLigneKbartConnectWithTransaction) { return new KafkaTemplate<>(producerFactoryLigneKbartConnectWithTransaction);} @Bean - public KafkaTemplate kafkaTemplateImprime(final ProducerFactory producerFactory) { return new KafkaTemplate<>(producerFactory);} + public KafkaTemplate kafkaTemplateImprime(final ProducerFactory producerFactoryLigneKbartImprimeWithTransaction) { return new KafkaTemplate<>(producerFactoryLigneKbartImprimeWithTransaction);} @Bean - public KafkaProducer kafkaProducerOk() { - StringSerializer serial = new StringSerializer(); - return new KafkaProducer<>(producerConfigs(), serial, serial); + public KafkaTemplate kafkatemplateEndoftraitement(final ProducerFactory producerFactory) { + return new KafkaTemplate<>(producerFactory); } } diff --git a/src/main/java/fr/abes/bestppn/entity/ExecutionReport.java b/src/main/java/fr/abes/bestppn/entity/ExecutionReport.java new file mode 100644 index 0000000..5b30410 --- /dev/null +++ b/src/main/java/fr/abes/bestppn/entity/ExecutionReport.java @@ -0,0 +1,37 @@ +package fr.abes.bestppn.entity; + +import lombok.Data; + +@Data +public class ExecutionReport { + private int nbtotalLines = 0; + + private int nbBestPpnFind = 0; + + private int nbLinesWithInputDataErrors = 0; + + private int nbLinesWithErrorsInBestPPNSearch = 0; + + public int getNbLinesOk(){ + return nbtotalLines - nbLinesWithErrorsInBestPPNSearch - nbLinesWithInputDataErrors; + } + + public void addNbBestPpnFind(){ + nbBestPpnFind++; + } + + public void addNbLinesWithInputDataErrors(){ + nbLinesWithInputDataErrors++; + } + + public void addNbLinesWithErrorsInBestPPNSearch(){ + nbLinesWithErrorsInBestPPNSearch++; + } + + public void clear(){ + nbtotalLines = 0; + nbBestPpnFind = 0; + nbLinesWithInputDataErrors = 0; + nbLinesWithErrorsInBestPPNSearch = 0; + } +} diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index 0a54ed1..a8a12fe 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -5,6 +5,7 @@ import fr.abes.bestppn.dto.PackageKbartDto; import fr.abes.bestppn.dto.kafka.LigneKbartDto; import fr.abes.bestppn.dto.kafka.PpnWithDestinationDto; +import fr.abes.bestppn.entity.ExecutionReport; import fr.abes.bestppn.entity.bacon.Provider; import fr.abes.bestppn.entity.bacon.ProviderPackage; import fr.abes.bestppn.entity.bacon.ProviderPackageId; @@ -70,11 +71,7 @@ public class TopicConsumer { boolean injectKafka = false; - private int nbBestPpnFind = 0; - - private int linesWithInputDataErrors = 0; - - private int linesWithErrorsInBestPPNSearch = 0; + private ExecutionReport executionReport = new ExecutionReport(); private String filename = ""; @@ -100,6 +97,7 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) thr headerList.add(header); } else if (header.key().equals("TotalLine")) { totalLine = new String(header.value()); + executionReport.setNbtotalLines(Integer.parseInt(totalLine)); headerList.add(header); } } @@ -118,17 +116,15 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) thr } else { isOnError = false; } - log.info("Nombre de best ppn trouvé : " + this.nbBestPpnFind + "/" + totalLine); - this.nbBestPpnFind = 0; + log.info("Nombre de best ppn trouvé : " + executionReport.getNbBestPpnFind() + "/" + totalLine); serviceMail.sendMailWithAttachment(filename, mailAttachment); producer.sendEndOfTraitmentReport(headerList); // Appel le producer pour l'envoi du message de fin de traitement. - logFileService.createExecutionReport(filename, Integer.parseInt(totalLine), Integer.parseInt(totalLine) - this.linesWithInputDataErrors - this.linesWithErrorsInBestPPNSearch, this.linesWithInputDataErrors, this.linesWithErrorsInBestPPNSearch, injectKafka); + logFileService.createExecutionReport(filename, Integer.parseInt(totalLine), executionReport.getNbLinesOk(), executionReport.getNbLinesWithInputDataErrors(), executionReport.getNbLinesWithErrorsInBestPPNSearch(), injectKafka); kbartToSend.clear(); ppnToCreate.clear(); ppnFromKbartToCreate.clear(); mailAttachment.clearKbartDto(); - this.linesWithInputDataErrors = 0; - this.linesWithErrorsInBestPPNSearch = 0; + executionReport.clear(); } else { LigneKbartDto ligneFromKafka = mapper.readValue(lignesKbart.value(), LigneKbartDto.class); if (ligneFromKafka.isBestPpnEmpty()) { @@ -138,7 +134,7 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) thr switch (ppnWithDestinationDto.getDestination()) { case BEST_PPN_BACON -> { ligneFromKafka.setBestPpn(ppnWithDestinationDto.getPpn()); - this.nbBestPpnFind++; + executionReport.addNbBestPpnFind(); kbartToSend.add(ligneFromKafka); } case PRINT_PPN_SUDOC -> { @@ -188,22 +184,22 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) thr isOnError = true; log.error("Erreur dans les données en entrée, provider incorrect"); addLineToMailAttachementWithErrorMessage(e.getMessage()); - linesWithInputDataErrors++; + executionReport.addNbLinesWithInputDataErrors(); } catch (URISyntaxException | RestClientException | IllegalArgumentException | IOException | IllegalPackageException | IllegalDateException e) { isOnError = true; log.error(e.getMessage()); addLineToMailAttachementWithErrorMessage(e.getMessage()); - linesWithInputDataErrors++; + executionReport.addNbLinesWithInputDataErrors(); } catch (IllegalPpnException | BestPpnException e) { isOnError = true; log.error(e.getMessage()); addLineToMailAttachementWithErrorMessage(e.getMessage()); - linesWithErrorsInBestPPNSearch++; + executionReport.addNbLinesWithErrorsInBestPPNSearch(); } catch (MessagingException | ExecutionException | InterruptedException | RuntimeException e) { log.error(e.getMessage()); - producer.sendEndOfTraitmentReport(headerList); - logFileService.createExecutionReport(filename, Integer.parseInt(totalLine), Integer.parseInt(totalLine) - this.linesWithInputDataErrors - this.linesWithErrorsInBestPPNSearch, this.linesWithInputDataErrors, this.linesWithErrorsInBestPPNSearch, injectKafka); +// producer.sendEndOfTraitmentReport(headerList); +// logFileService.createExecutionReport(filename, Integer.parseInt(totalLine), Integer.parseInt(totalLine) - this.nbLinesWithInputDataErrors - this.nbLinesWithErrorsInBestPPNSearch, this.nbLinesWithInputDataErrors, this.nbLinesWithErrorsInBestPPNSearch, injectKafka); } } diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java index bd47d1d..336ce54 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java @@ -9,7 +9,6 @@ import fr.abes.bestppn.utils.UtilsMapper; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.header.Header; @@ -49,15 +48,15 @@ public class TopicProducer { private KafkaTemplate kafkaTemplateImprime; - private KafkaProducer kafkaProducerOk; + private KafkaTemplate kafkatemplateEndoftraitement; private UtilsMapper utilsMapper; @Autowired - public TopicProducer(KafkaTemplate kafkaTemplateConnect, KafkaTemplate kafkaTemplateImprime, KafkaProducer kafkaProducer, UtilsMapper utilsMapper) { + public TopicProducer(KafkaTemplate kafkaTemplateConnect, KafkaTemplate kafkaTemplateImprime, KafkaTemplate kafkatemplateEndoftraitement, UtilsMapper utilsMapper) { this.kafkaTemplateConnect = kafkaTemplateConnect; this.kafkaTemplateImprime = kafkaTemplateImprime; - this.kafkaProducerOk = kafkaProducer; + this.kafkatemplateEndoftraitement = kafkatemplateEndoftraitement; this.utilsMapper = utilsMapper; } @@ -68,7 +67,7 @@ public TopicProducer(KafkaTemplate kafkaTemplateConne * @param provider : provider * @param filename : nom du fichier du traitement en cours */ - @Transactional(transactionManager = "kafkaTransactionManager", rollbackFor = {BestPpnException.class, JsonProcessingException.class}) + @Transactional(transactionManager = "kafkaTransactionManagerKbartConnect", rollbackFor = {BestPpnException.class, JsonProcessingException.class}) public void sendKbart(List kbart, ProviderPackage provider, String filename) throws JsonProcessingException, BestPpnException, ExecutionException, InterruptedException { int numLigneCourante = 0; for (LigneKbartDto ligne : kbart) { @@ -93,7 +92,7 @@ public void sendKbart(List kbart, ProviderPackage provider, Strin * @param ligneKbartImprimes : liste de kbart * @param filename : nom du fichier à traiter */ - @Transactional(transactionManager = "kafkaTransactionManager") + @Transactional(transactionManager = "kafkaTransactionManagerKbartImprime") public void sendPrintNotice(List ligneKbartImprimes, String filename) { for (LigneKbartImprime ppnToCreate : ligneKbartImprimes) { List
headerList = new ArrayList<>(); @@ -110,7 +109,7 @@ public void sendPrintNotice(List ligneKbartImprimes, String f * @param ppnFromKbartToCreate : liste de lignes kbart * @param filename : nom du fichier à traiter */ - @Transactional(transactionManager = "kafkaTransactionManager") + @Transactional(transactionManager = "kafkaTransactionManagerKbartConnect") public void sendPpnExNihilo(List ppnFromKbartToCreate, ProviderPackage provider, String filename) throws JsonProcessingException { for (LigneKbartDto ligne : ppnFromKbartToCreate) { ligne.setProviderPackagePackage(provider.getProviderPackageId().getPackageName()); @@ -183,11 +182,10 @@ public byte[] value() { * Envoie un message de fin de traitement sur le topic kafka endOfTraitment_kbart2kafka * @param headerList list de Header (contient le nom du package et la date) */ - @Transactional(transactionManager = "kafkaTransactionManager") public void sendEndOfTraitmentReport(List
headerList) throws ExecutionException, InterruptedException { try { ProducerRecord record = new ProducerRecord<>(topicEndOfTraitment, null, "", "OK", headerList); - kafkaProducerOk.send(record); + kafkatemplateEndoftraitement.send(record); log.info("End of traitment report sent."); } catch (Exception e) { String message = "Error sending message to topic " + topicEndOfTraitment; From b74ad605cb2ab893e0696e9dc9a58f7b37ce6941 Mon Sep 17 00:00:00 2001 From: pierre-maraval Date: Thu, 26 Oct 2023 11:16:41 +0200 Subject: [PATCH 19/23] =?UTF-8?q?CDE-249=20:=20FIX=20:=20correction=20enti?= =?UTF-8?q?t=C3=A9s=20suite=20=C3=A0=20modification=20bdd=20Bacon?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/main/java/fr/abes/LigneKbartConnect.java | 100 +++++++++++++++--- .../abes/bestppn/dto/kafka/LigneKbartDto.java | 2 + .../bestppn/entity/bacon/ProviderPackage.java | 19 +++- .../entity/bacon/ProviderPackageId.java | 44 -------- .../fr/abes/bestppn/kafka/TopicConsumer.java | 12 +-- .../fr/abes/bestppn/kafka/TopicProducer.java | 14 +-- .../bacon/ProviderPackageRepository.java | 7 +- .../abes/bestppn/utils/LigneKbartMapper.java | 1 + .../avro/ligne_kbart_convergence.avsc | 4 + 9 files changed, 126 insertions(+), 77 deletions(-) delete mode 100644 src/main/java/fr/abes/bestppn/entity/bacon/ProviderPackageId.java diff --git a/src/main/java/fr/abes/LigneKbartConnect.java b/src/main/java/fr/abes/LigneKbartConnect.java index 3f8037b..29ec1dd 100644 --- a/src/main/java/fr/abes/LigneKbartConnect.java +++ b/src/main/java/fr/abes/LigneKbartConnect.java @@ -14,10 +14,10 @@ @org.apache.avro.specific.AvroGenerated public class LigneKbartConnect extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { - private static final long serialVersionUID = -4747621436631233106L; + private static final long serialVersionUID = -6107934252124705352L; - public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"LigneKbartConnect\",\"namespace\":\"fr.abes\",\"fields\":[{\"name\":\"PUBLICATION_TITLE\",\"type\":[\"null\",\"string\"]},{\"name\":\"PRINT_IDENTIFIER\",\"type\":[\"null\",\"string\"]},{\"name\":\"ONLINE_IDENTIFIER\",\"type\":[\"null\",\"string\"]},{\"name\":\"DATE_FIRST_ISSUE_ONLINE\",\"type\":[\"null\",\"string\"]},{\"name\":\"NUM_FIRST_VOL_ONLINE\",\"type\":[\"null\",\"string\"]},{\"name\":\"NUM_FIRST_ISSUE_ONLINE\",\"type\":[\"null\",\"string\"]},{\"name\":\"DATE_LAST_ISSUE_ONLINE\",\"type\":[\"null\",\"string\"]},{\"name\":\"NUM_LAST_VOL_ONLINE\",\"type\":[\"null\",\"string\"]},{\"name\":\"NUM_LAST_ISSUE_ONLINE\",\"type\":[\"null\",\"string\"]},{\"name\":\"TITLE_URL\",\"type\":[\"null\",\"string\"]},{\"name\":\"FIRST_AUTHOR\",\"type\":[\"null\",\"string\"]},{\"name\":\"TITLE_ID\",\"type\":[\"null\",\"string\"]},{\"name\":\"EMBARGO_INFO\",\"type\":[\"null\",\"string\"]},{\"name\":\"COVERAGE_DEPTH\",\"type\":[\"null\",\"string\"]},{\"name\":\"NOTES\",\"type\":[\"null\",\"string\"]},{\"name\":\"PUBLISHER_NAME\",\"type\":[\"null\",\"string\"]},{\"name\":\"PUBLICATION_TYPE\",\"type\":[\"null\",\"string\"]},{\"name\":\"DATE_MONOGRAPH_PUBLISHED_PRINT\",\"type\":[\"null\",\"string\"]},{\"name\":\"DATE_MONOGRAPH_PUBLISHED_ONLIN\",\"type\":[\"null\",\"string\"]},{\"name\":\"MONOGRAPH_VOLUME\",\"type\":[\"null\",\"string\"]},{\"name\":\"MONOGRAPH_EDITION\",\"type\":[\"null\",\"string\"]},{\"name\":\"FIRST_EDITOR\",\"type\":[\"null\",\"string\"]},{\"name\":\"PARENT_PUBLICATION_TITLE_ID\",\"type\":[\"null\",\"string\"]},{\"name\":\"PRECEDING_PUBLICATION_TITLE_ID\",\"type\":[\"null\",\"string\"]},{\"name\":\"ACCESS_TYPE\",\"type\":[\"null\",\"string\"]},{\"name\":\"PROVIDER_PACKAGE_PACKAGE\",\"type\":\"string\"},{\"name\":\"PROVIDER_PACKAGE_DATE_P\",\"type\":{\"type\":\"int\",\"logicalType\":\"date\"}},{\"name\":\"PROVIDER_PACKAGE_IDT_PROVIDER\",\"type\":\"int\"},{\"name\":\"BEST_PPN\",\"type\":[\"null\",\"string\"]}]}"); + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"LigneKbartConnect\",\"namespace\":\"fr.abes\",\"fields\":[{\"name\":\"PUBLICATION_TITLE\",\"type\":[\"null\",\"string\"]},{\"name\":\"PRINT_IDENTIFIER\",\"type\":[\"null\",\"string\"]},{\"name\":\"ONLINE_IDENTIFIER\",\"type\":[\"null\",\"string\"]},{\"name\":\"DATE_FIRST_ISSUE_ONLINE\",\"type\":[\"null\",\"string\"]},{\"name\":\"NUM_FIRST_VOL_ONLINE\",\"type\":[\"null\",\"string\"]},{\"name\":\"NUM_FIRST_ISSUE_ONLINE\",\"type\":[\"null\",\"string\"]},{\"name\":\"DATE_LAST_ISSUE_ONLINE\",\"type\":[\"null\",\"string\"]},{\"name\":\"NUM_LAST_VOL_ONLINE\",\"type\":[\"null\",\"string\"]},{\"name\":\"NUM_LAST_ISSUE_ONLINE\",\"type\":[\"null\",\"string\"]},{\"name\":\"TITLE_URL\",\"type\":[\"null\",\"string\"]},{\"name\":\"FIRST_AUTHOR\",\"type\":[\"null\",\"string\"]},{\"name\":\"TITLE_ID\",\"type\":[\"null\",\"string\"]},{\"name\":\"EMBARGO_INFO\",\"type\":[\"null\",\"string\"]},{\"name\":\"COVERAGE_DEPTH\",\"type\":[\"null\",\"string\"]},{\"name\":\"NOTES\",\"type\":[\"null\",\"string\"]},{\"name\":\"PUBLISHER_NAME\",\"type\":[\"null\",\"string\"]},{\"name\":\"PUBLICATION_TYPE\",\"type\":[\"null\",\"string\"]},{\"name\":\"DATE_MONOGRAPH_PUBLISHED_PRINT\",\"type\":[\"null\",\"string\"]},{\"name\":\"DATE_MONOGRAPH_PUBLISHED_ONLIN\",\"type\":[\"null\",\"string\"]},{\"name\":\"MONOGRAPH_VOLUME\",\"type\":[\"null\",\"string\"]},{\"name\":\"MONOGRAPH_EDITION\",\"type\":[\"null\",\"string\"]},{\"name\":\"FIRST_EDITOR\",\"type\":[\"null\",\"string\"]},{\"name\":\"PARENT_PUBLICATION_TITLE_ID\",\"type\":[\"null\",\"string\"]},{\"name\":\"PRECEDING_PUBLICATION_TITLE_ID\",\"type\":[\"null\",\"string\"]},{\"name\":\"ACCESS_TYPE\",\"type\":[\"null\",\"string\"]},{\"name\":\"PROVIDER_PACKAGE_PACKAGE\",\"type\":\"string\"},{\"name\":\"PROVIDER_PACKAGE_DATE_P\",\"type\":{\"type\":\"int\",\"logicalType\":\"date\"}},{\"name\":\"PROVIDER_PACKAGE_IDT_PROVIDER\",\"type\":\"int\"},{\"name\":\"ID_PROVIDER_PACKAGE\",\"type\":\"int\"},{\"name\":\"BEST_PPN\",\"type\":[\"null\",\"string\"]}]}"); public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } private static final SpecificData MODEL$ = new SpecificData(); @@ -104,6 +104,7 @@ public static LigneKbartConnect fromByteBuffer( private java.lang.CharSequence PROVIDER_PACKAGE_PACKAGE; private java.time.LocalDate PROVIDER_PACKAGE_DATE_P; private int PROVIDER_PACKAGE_IDT_PROVIDER; + private int ID_PROVIDER_PACKAGE; private java.lang.CharSequence BEST_PPN; /** @@ -143,9 +144,10 @@ public LigneKbartConnect() {} * @param PROVIDER_PACKAGE_PACKAGE The new value for PROVIDER_PACKAGE_PACKAGE * @param PROVIDER_PACKAGE_DATE_P The new value for PROVIDER_PACKAGE_DATE_P * @param PROVIDER_PACKAGE_IDT_PROVIDER The new value for PROVIDER_PACKAGE_IDT_PROVIDER + * @param ID_PROVIDER_PACKAGE The new value for ID_PROVIDER_PACKAGE * @param BEST_PPN The new value for BEST_PPN */ - public LigneKbartConnect(java.lang.CharSequence PUBLICATION_TITLE, java.lang.CharSequence PRINT_IDENTIFIER, java.lang.CharSequence ONLINE_IDENTIFIER, java.lang.CharSequence DATE_FIRST_ISSUE_ONLINE, java.lang.CharSequence NUM_FIRST_VOL_ONLINE, java.lang.CharSequence NUM_FIRST_ISSUE_ONLINE, java.lang.CharSequence DATE_LAST_ISSUE_ONLINE, java.lang.CharSequence NUM_LAST_VOL_ONLINE, java.lang.CharSequence NUM_LAST_ISSUE_ONLINE, java.lang.CharSequence TITLE_URL, java.lang.CharSequence FIRST_AUTHOR, java.lang.CharSequence TITLE_ID, java.lang.CharSequence EMBARGO_INFO, java.lang.CharSequence COVERAGE_DEPTH, java.lang.CharSequence NOTES, java.lang.CharSequence PUBLISHER_NAME, java.lang.CharSequence PUBLICATION_TYPE, java.lang.CharSequence DATE_MONOGRAPH_PUBLISHED_PRINT, java.lang.CharSequence DATE_MONOGRAPH_PUBLISHED_ONLIN, java.lang.CharSequence MONOGRAPH_VOLUME, java.lang.CharSequence MONOGRAPH_EDITION, java.lang.CharSequence FIRST_EDITOR, java.lang.CharSequence PARENT_PUBLICATION_TITLE_ID, java.lang.CharSequence PRECEDING_PUBLICATION_TITLE_ID, java.lang.CharSequence ACCESS_TYPE, java.lang.CharSequence PROVIDER_PACKAGE_PACKAGE, java.time.LocalDate PROVIDER_PACKAGE_DATE_P, java.lang.Integer PROVIDER_PACKAGE_IDT_PROVIDER, java.lang.CharSequence BEST_PPN) { + public LigneKbartConnect(java.lang.CharSequence PUBLICATION_TITLE, java.lang.CharSequence PRINT_IDENTIFIER, java.lang.CharSequence ONLINE_IDENTIFIER, java.lang.CharSequence DATE_FIRST_ISSUE_ONLINE, java.lang.CharSequence NUM_FIRST_VOL_ONLINE, java.lang.CharSequence NUM_FIRST_ISSUE_ONLINE, java.lang.CharSequence DATE_LAST_ISSUE_ONLINE, java.lang.CharSequence NUM_LAST_VOL_ONLINE, java.lang.CharSequence NUM_LAST_ISSUE_ONLINE, java.lang.CharSequence TITLE_URL, java.lang.CharSequence FIRST_AUTHOR, java.lang.CharSequence TITLE_ID, java.lang.CharSequence EMBARGO_INFO, java.lang.CharSequence COVERAGE_DEPTH, java.lang.CharSequence NOTES, java.lang.CharSequence PUBLISHER_NAME, java.lang.CharSequence PUBLICATION_TYPE, java.lang.CharSequence DATE_MONOGRAPH_PUBLISHED_PRINT, java.lang.CharSequence DATE_MONOGRAPH_PUBLISHED_ONLIN, java.lang.CharSequence MONOGRAPH_VOLUME, java.lang.CharSequence MONOGRAPH_EDITION, java.lang.CharSequence FIRST_EDITOR, java.lang.CharSequence PARENT_PUBLICATION_TITLE_ID, java.lang.CharSequence PRECEDING_PUBLICATION_TITLE_ID, java.lang.CharSequence ACCESS_TYPE, java.lang.CharSequence PROVIDER_PACKAGE_PACKAGE, java.time.LocalDate PROVIDER_PACKAGE_DATE_P, java.lang.Integer PROVIDER_PACKAGE_IDT_PROVIDER, java.lang.Integer ID_PROVIDER_PACKAGE, java.lang.CharSequence BEST_PPN) { this.PUBLICATION_TITLE = PUBLICATION_TITLE; this.PRINT_IDENTIFIER = PRINT_IDENTIFIER; this.ONLINE_IDENTIFIER = ONLINE_IDENTIFIER; @@ -174,6 +176,7 @@ public LigneKbartConnect(java.lang.CharSequence PUBLICATION_TITLE, java.lang.Cha this.PROVIDER_PACKAGE_PACKAGE = PROVIDER_PACKAGE_PACKAGE; this.PROVIDER_PACKAGE_DATE_P = PROVIDER_PACKAGE_DATE_P; this.PROVIDER_PACKAGE_IDT_PROVIDER = PROVIDER_PACKAGE_IDT_PROVIDER; + this.ID_PROVIDER_PACKAGE = ID_PROVIDER_PACKAGE; this.BEST_PPN = BEST_PPN; } @@ -215,7 +218,8 @@ public java.lang.Object get(int field$) { case 25: return PROVIDER_PACKAGE_PACKAGE; case 26: return PROVIDER_PACKAGE_DATE_P; case 27: return PROVIDER_PACKAGE_IDT_PROVIDER; - case 28: return BEST_PPN; + case 28: return ID_PROVIDER_PACKAGE; + case 29: return BEST_PPN; default: throw new IndexOutOfBoundsException("Invalid index: " + field$); } } @@ -251,6 +255,7 @@ public java.lang.Object get(int field$) { new org.apache.avro.data.TimeConversions.DateConversion(), null, null, + null, null }; @@ -292,7 +297,8 @@ public void put(int field$, java.lang.Object value$) { case 25: PROVIDER_PACKAGE_PACKAGE = (java.lang.CharSequence)value$; break; case 26: PROVIDER_PACKAGE_DATE_P = (java.time.LocalDate)value$; break; case 27: PROVIDER_PACKAGE_IDT_PROVIDER = (java.lang.Integer)value$; break; - case 28: BEST_PPN = (java.lang.CharSequence)value$; break; + case 28: ID_PROVIDER_PACKAGE = (java.lang.Integer)value$; break; + case 29: BEST_PPN = (java.lang.CharSequence)value$; break; default: throw new IndexOutOfBoundsException("Invalid index: " + field$); } } @@ -773,6 +779,23 @@ public void setPROVIDERPACKAGEIDTPROVIDER(int value) { this.PROVIDER_PACKAGE_IDT_PROVIDER = value; } + /** + * Gets the value of the 'ID_PROVIDER_PACKAGE' field. + * @return The value of the 'ID_PROVIDER_PACKAGE' field. + */ + public int getIDPROVIDERPACKAGE() { + return ID_PROVIDER_PACKAGE; + } + + + /** + * Sets the value of the 'ID_PROVIDER_PACKAGE' field. + * @param value the value to set. + */ + public void setIDPROVIDERPACKAGE(int value) { + this.ID_PROVIDER_PACKAGE = value; + } + /** * Gets the value of the 'BEST_PPN' field. * @return The value of the 'BEST_PPN' field. @@ -859,6 +882,7 @@ public static class Builder extends org.apache.avro.specific.SpecificRecordBuild private java.lang.CharSequence PROVIDER_PACKAGE_PACKAGE; private java.time.LocalDate PROVIDER_PACKAGE_DATE_P; private int PROVIDER_PACKAGE_IDT_PROVIDER; + private int ID_PROVIDER_PACKAGE; private java.lang.CharSequence BEST_PPN; /** Creates a new Builder */ @@ -984,10 +1008,14 @@ private Builder(fr.abes.LigneKbartConnect.Builder other) { this.PROVIDER_PACKAGE_IDT_PROVIDER = data().deepCopy(fields()[27].schema(), other.PROVIDER_PACKAGE_IDT_PROVIDER); fieldSetFlags()[27] = other.fieldSetFlags()[27]; } - if (isValidValue(fields()[28], other.BEST_PPN)) { - this.BEST_PPN = data().deepCopy(fields()[28].schema(), other.BEST_PPN); + if (isValidValue(fields()[28], other.ID_PROVIDER_PACKAGE)) { + this.ID_PROVIDER_PACKAGE = data().deepCopy(fields()[28].schema(), other.ID_PROVIDER_PACKAGE); fieldSetFlags()[28] = other.fieldSetFlags()[28]; } + if (isValidValue(fields()[29], other.BEST_PPN)) { + this.BEST_PPN = data().deepCopy(fields()[29].schema(), other.BEST_PPN); + fieldSetFlags()[29] = other.fieldSetFlags()[29]; + } } /** @@ -1108,10 +1136,14 @@ private Builder(fr.abes.LigneKbartConnect other) { this.PROVIDER_PACKAGE_IDT_PROVIDER = data().deepCopy(fields()[27].schema(), other.PROVIDER_PACKAGE_IDT_PROVIDER); fieldSetFlags()[27] = true; } - if (isValidValue(fields()[28], other.BEST_PPN)) { - this.BEST_PPN = data().deepCopy(fields()[28].schema(), other.BEST_PPN); + if (isValidValue(fields()[28], other.ID_PROVIDER_PACKAGE)) { + this.ID_PROVIDER_PACKAGE = data().deepCopy(fields()[28].schema(), other.ID_PROVIDER_PACKAGE); fieldSetFlags()[28] = true; } + if (isValidValue(fields()[29], other.BEST_PPN)) { + this.BEST_PPN = data().deepCopy(fields()[29].schema(), other.BEST_PPN); + fieldSetFlags()[29] = true; + } } /** @@ -2232,6 +2264,45 @@ public fr.abes.LigneKbartConnect.Builder clearPROVIDERPACKAGEIDTPROVIDER() { return this; } + /** + * Gets the value of the 'ID_PROVIDER_PACKAGE' field. + * @return The value. + */ + public int getIDPROVIDERPACKAGE() { + return ID_PROVIDER_PACKAGE; + } + + + /** + * Sets the value of the 'ID_PROVIDER_PACKAGE' field. + * @param value The value of 'ID_PROVIDER_PACKAGE'. + * @return This builder. + */ + public fr.abes.LigneKbartConnect.Builder setIDPROVIDERPACKAGE(int value) { + validate(fields()[28], value); + this.ID_PROVIDER_PACKAGE = value; + fieldSetFlags()[28] = true; + return this; + } + + /** + * Checks whether the 'ID_PROVIDER_PACKAGE' field has been set. + * @return True if the 'ID_PROVIDER_PACKAGE' field has been set, false otherwise. + */ + public boolean hasIDPROVIDERPACKAGE() { + return fieldSetFlags()[28]; + } + + + /** + * Clears the value of the 'ID_PROVIDER_PACKAGE' field. + * @return This builder. + */ + public fr.abes.LigneKbartConnect.Builder clearIDPROVIDERPACKAGE() { + fieldSetFlags()[28] = false; + return this; + } + /** * Gets the value of the 'BEST_PPN' field. * @return The value. @@ -2247,9 +2318,9 @@ public java.lang.CharSequence getBESTPPN() { * @return This builder. */ public fr.abes.LigneKbartConnect.Builder setBESTPPN(java.lang.CharSequence value) { - validate(fields()[28], value); + validate(fields()[29], value); this.BEST_PPN = value; - fieldSetFlags()[28] = true; + fieldSetFlags()[29] = true; return this; } @@ -2258,7 +2329,7 @@ public fr.abes.LigneKbartConnect.Builder setBESTPPN(java.lang.CharSequence value * @return True if the 'BEST_PPN' field has been set, false otherwise. */ public boolean hasBESTPPN() { - return fieldSetFlags()[28]; + return fieldSetFlags()[29]; } @@ -2268,7 +2339,7 @@ public boolean hasBESTPPN() { */ public fr.abes.LigneKbartConnect.Builder clearBESTPPN() { BEST_PPN = null; - fieldSetFlags()[28] = false; + fieldSetFlags()[29] = false; return this; } @@ -2305,7 +2376,8 @@ public LigneKbartConnect build() { record.PROVIDER_PACKAGE_PACKAGE = fieldSetFlags()[25] ? this.PROVIDER_PACKAGE_PACKAGE : (java.lang.CharSequence) defaultValue(fields()[25]); record.PROVIDER_PACKAGE_DATE_P = fieldSetFlags()[26] ? this.PROVIDER_PACKAGE_DATE_P : (java.time.LocalDate) defaultValue(fields()[26]); record.PROVIDER_PACKAGE_IDT_PROVIDER = fieldSetFlags()[27] ? this.PROVIDER_PACKAGE_IDT_PROVIDER : (java.lang.Integer) defaultValue(fields()[27]); - record.BEST_PPN = fieldSetFlags()[28] ? this.BEST_PPN : (java.lang.CharSequence) defaultValue(fields()[28]); + record.ID_PROVIDER_PACKAGE = fieldSetFlags()[28] ? this.ID_PROVIDER_PACKAGE : (java.lang.Integer) defaultValue(fields()[28]); + record.BEST_PPN = fieldSetFlags()[29] ? this.BEST_PPN : (java.lang.CharSequence) defaultValue(fields()[29]); return record; } catch (org.apache.avro.AvroMissingFieldException e) { throw e; diff --git a/src/main/java/fr/abes/bestppn/dto/kafka/LigneKbartDto.java b/src/main/java/fr/abes/bestppn/dto/kafka/LigneKbartDto.java index 4528421..fa07864 100644 --- a/src/main/java/fr/abes/bestppn/dto/kafka/LigneKbartDto.java +++ b/src/main/java/fr/abes/bestppn/dto/kafka/LigneKbartDto.java @@ -123,6 +123,8 @@ public class LigneKbartDto { private Date providerPackageDateP; @JsonProperty("provider_package_idt_provider") private Integer providerPackageIdtProvider; + @JsonProperty("id_provider_package") + private Integer idProviderPackage; @JsonIgnore diff --git a/src/main/java/fr/abes/bestppn/entity/bacon/ProviderPackage.java b/src/main/java/fr/abes/bestppn/entity/bacon/ProviderPackage.java index ec22326..18b3efe 100644 --- a/src/main/java/fr/abes/bestppn/entity/bacon/ProviderPackage.java +++ b/src/main/java/fr/abes/bestppn/entity/bacon/ProviderPackage.java @@ -7,6 +7,7 @@ import lombok.Setter; import java.io.Serializable; +import java.util.Date; @Entity @Table(name = "PROVIDER_PACKAGE") @@ -14,8 +15,16 @@ @AllArgsConstructor @NoArgsConstructor public class ProviderPackage implements Serializable { - @EmbeddedId - private ProviderPackageId providerPackageId; + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(name = "ID_PROVIDER_PACKAGE") + private Integer idProviderPackage; + @Column(name = "PACKAGE") + private String packageName; + @Column(name = "DATE_P") + private Date dateP; + @Column(name = "PROVIDER_IDT_PROVIDER") + private Integer providerIdtProvider; @Column(name = "LABEL_ABES") private char labelAbes; @@ -24,8 +33,10 @@ public class ProviderPackage implements Serializable { @JoinColumn(referencedColumnName = "IDT_PROVIDER", insertable = false, updatable = false) private Provider provider; - public ProviderPackage(ProviderPackageId providerPackageId, char labelAbes) { - this.providerPackageId = providerPackageId; + public ProviderPackage(String packageName, Date dateP, Integer providerIdtProvider, char labelAbes) { + this.packageName = packageName; + this.dateP = dateP; + this.providerIdtProvider = providerIdtProvider; this.labelAbes = labelAbes; } } diff --git a/src/main/java/fr/abes/bestppn/entity/bacon/ProviderPackageId.java b/src/main/java/fr/abes/bestppn/entity/bacon/ProviderPackageId.java deleted file mode 100644 index 744f835..0000000 --- a/src/main/java/fr/abes/bestppn/entity/bacon/ProviderPackageId.java +++ /dev/null @@ -1,44 +0,0 @@ -package fr.abes.bestppn.entity.bacon; - -import jakarta.persistence.Column; -import jakarta.persistence.Embeddable; -import lombok.Getter; -import lombok.NoArgsConstructor; - -import java.io.Serializable; -import java.util.Date; -import java.util.Objects; - - -@Embeddable -@Getter -@NoArgsConstructor -public class ProviderPackageId implements Serializable { - @Column(name = "PACKAGE") - private String packageName; - @Column(name = "DATE_P") - private Date dateP; - @Column(name = "PROVIDER_IDT_PROVIDER") - private Integer providerIdtProvider; - - public ProviderPackageId(String packageName, Date datePackage, Integer idtProvider) { - this.packageName = packageName; - this.dateP = datePackage; - this.providerIdtProvider = idtProvider; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (!(o instanceof ProviderPackageId)) return false; - ProviderPackageId that = (ProviderPackageId) o; - return Objects.equals(getProviderIdtProvider(), that.getProviderIdtProvider()) && - Objects.equals(getDateP(), that.getDateP()) && - Objects.equals(getPackageName(), that.getPackageName()); - } - - @Override - public int hashCode() { - return Objects.hash(getProviderIdtProvider(), getDateP(), getPackageName()); - } -} diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index 999a8a5..a0dc120 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -4,11 +4,9 @@ import fr.abes.LigneKbartImprime; import fr.abes.bestppn.dto.PackageKbartDto; import fr.abes.bestppn.dto.kafka.LigneKbartDto; -import fr.abes.bestppn.dto.kafka.PpnKbartProviderDto; import fr.abes.bestppn.dto.kafka.PpnWithDestinationDto; import fr.abes.bestppn.entity.bacon.Provider; import fr.abes.bestppn.entity.bacon.ProviderPackage; -import fr.abes.bestppn.entity.bacon.ProviderPackageId; import fr.abes.bestppn.exception.*; import fr.abes.bestppn.repository.bacon.ProviderPackageRepository; import fr.abes.bestppn.repository.bacon.ProviderRepository; @@ -29,6 +27,7 @@ import java.io.IOException; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.Date; import java.util.List; import java.util.Optional; @@ -183,17 +182,18 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) { } private ProviderPackage handlerProvider(Optional providerOpt, String filename, String providerName) throws IllegalPackageException, IllegalDateException { + String packageName = Utils.extractPackageName(filename); + Date packageDate = Utils.extractDate(filename); if (providerOpt.isPresent()) { Provider provider = providerOpt.get(); - ProviderPackageId providerPackageId = new ProviderPackageId(Utils.extractPackageName(filename), Utils.extractDate(filename), provider.getIdtProvider()); - Optional providerPackage = providerPackageRepository.findByProviderPackageId(providerPackageId); + Optional providerPackage = providerPackageRepository.findByPackageNameAndDatePAndProviderIdtProvider(packageName, packageDate, provider.getIdtProvider()); //pas d'info de package, on le crée - return providerPackage.orElseGet(() -> providerPackageRepository.save(new ProviderPackage(providerPackageId, 'N'))); + return providerPackage.orElseGet(() -> providerPackageRepository.save(new ProviderPackage(packageName, packageDate, provider.getIdtProvider(), 'N'))); } else { //pas de provider, ni de package, on les crée tous les deux Provider newProvider = new Provider(providerName); Provider savedProvider = providerRepository.save(newProvider); - ProviderPackage providerPackage = new ProviderPackage(new ProviderPackageId(Utils.extractPackageName(filename), Utils.extractDate(filename), savedProvider.getIdtProvider()), 'N'); + ProviderPackage providerPackage = new ProviderPackage(packageName, packageDate, savedProvider.getIdtProvider(), 'N'); return providerPackageRepository.save(providerPackage); } } diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java index ee1d5b4..ede9a64 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicProducer.java @@ -66,9 +66,10 @@ public void sendKbart(List kbart, ProviderPackage provider, Strin int numLigneCourante = 0; for (LigneKbartDto ligne : kbart) { numLigneCourante++; - ligne.setProviderPackagePackage(provider.getProviderPackageId().getPackageName()); - ligne.setProviderPackageDateP(provider.getProviderPackageId().getDateP()); - ligne.setProviderPackageIdtProvider(provider.getProviderPackageId().getProviderIdtProvider()); + ligne.setIdProviderPackage(provider.getIdProviderPackage()); + ligne.setProviderPackagePackage(provider.getPackageName()); + ligne.setProviderPackageDateP(provider.getDateP()); + ligne.setProviderPackageIdtProvider(provider.getProviderIdtProvider()); List
headerList = new ArrayList<>(); headerList.add(constructHeader("filename", filename.getBytes())); if (numLigneCourante == kbart.size()) @@ -106,9 +107,10 @@ public void sendPrintNotice(List ligneKbartImprimes, String f @Transactional(transactionManager = "kafkaTransactionManager") public void sendPpnExNihilo(List ppnFromKbartToCreate, ProviderPackage provider, String filename) { for (LigneKbartDto ligne : ppnFromKbartToCreate) { - ligne.setProviderPackagePackage(provider.getProviderPackageId().getPackageName()); - ligne.setProviderPackageDateP(provider.getProviderPackageId().getDateP()); - ligne.setProviderPackageIdtProvider(provider.getProviderPackageId().getProviderIdtProvider()); + ligne.setIdProviderPackage(provider.getIdProviderPackage()); + ligne.setProviderPackagePackage(provider.getPackageName()); + ligne.setProviderPackageDateP(provider.getDateP()); + ligne.setProviderPackageIdtProvider(provider.getProviderIdtProvider()); List
headerList = new ArrayList<>(); headerList.add(constructHeader("filename", filename.getBytes(StandardCharsets.US_ASCII))); sendObject(ligne, topicKbartPpnToCreate, headerList); diff --git a/src/main/java/fr/abes/bestppn/repository/bacon/ProviderPackageRepository.java b/src/main/java/fr/abes/bestppn/repository/bacon/ProviderPackageRepository.java index 6cf607a..aa493e0 100644 --- a/src/main/java/fr/abes/bestppn/repository/bacon/ProviderPackageRepository.java +++ b/src/main/java/fr/abes/bestppn/repository/bacon/ProviderPackageRepository.java @@ -2,14 +2,15 @@ import fr.abes.bestppn.configuration.BaconDbConfiguration; import fr.abes.bestppn.entity.bacon.ProviderPackage; -import fr.abes.bestppn.entity.bacon.ProviderPackageId; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.stereotype.Repository; +import java.util.Date; import java.util.Optional; @Repository @BaconDbConfiguration -public interface ProviderPackageRepository extends JpaRepository { - Optional findByProviderPackageId(ProviderPackageId providerPackageId); +public interface ProviderPackageRepository extends JpaRepository { + Optional findByPackageNameAndDatePAndProviderIdtProvider(String packageName, Date dateP, Integer providerIdtProvider); + } diff --git a/src/main/java/fr/abes/bestppn/utils/LigneKbartMapper.java b/src/main/java/fr/abes/bestppn/utils/LigneKbartMapper.java index 81287a5..a2107c5 100644 --- a/src/main/java/fr/abes/bestppn/utils/LigneKbartMapper.java +++ b/src/main/java/fr/abes/bestppn/utils/LigneKbartMapper.java @@ -50,6 +50,7 @@ public LigneKbartConnect convert(MappingContext Date: Thu, 26 Oct 2023 15:30:56 +0200 Subject: [PATCH 20/23] message erreur --- .../java/fr/abes/bestppn/service/EmailService.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/main/java/fr/abes/bestppn/service/EmailService.java b/src/main/java/fr/abes/bestppn/service/EmailService.java index 601d370..f93e0c5 100644 --- a/src/main/java/fr/abes/bestppn/service/EmailService.java +++ b/src/main/java/fr/abes/bestppn/service/EmailService.java @@ -47,7 +47,7 @@ public void sendMailWithAttachment(String packageName, PackageKbartDto dataLines createAttachment(dataLines, csvPath); // Création du mail - String requestJson = mailToJSON(this.recipient, "["+env.toUpperCase()+"] Rapport de traitement BestPPN " + packageName + ".csv", ""); + String requestJson = mailToJSON(this.recipient, "["+env.toUpperCase()+"] Rapport de traitement BestPPN " + packageName + ".csv"); // Récupération du fichier File file = csvPath.toFile(); @@ -110,7 +110,7 @@ protected void sendMail(String requestJson, File f) { f.getName() ); } catch (FileNotFoundException e) { - log.error("Le fichier n'a pas été trouvé. " + e.toString()); + log.error("Le fichier n'a pas été trouvé. " + e.getMessage()); } // Envoi du mail @@ -120,11 +120,11 @@ protected void sendMail(String requestJson, File f) { try (CloseableHttpClient httpClient = HttpClients.createDefault()) { httpClient.execute(uploadFile); } catch (IOException e) { - log.error("Erreur lors de l'envoi du mail. " + e.toString()); + log.error("Erreur lors de l'envoi du mail. " + e.getMessage()); } } - protected String mailToJSON(String to, String subject, String text) { + protected String mailToJSON(String to, String subject) { String json = ""; ObjectMapper mapper = new ObjectMapper(); MailDto mail = new MailDto(); @@ -133,7 +133,7 @@ protected String mailToJSON(String to, String subject, String text) { mail.setCc(new String[]{}); mail.setCci(new String[]{}); mail.setSubject(subject); - mail.setText(text); + mail.setText(""); try { json = mapper.writeValueAsString(mail); } catch (JsonProcessingException e) { From 1c592e2bac3e2b6bd0cc48f62970b696f5eb650e Mon Sep 17 00:00:00 2001 From: SamuelQuetin Date: Thu, 26 Oct 2023 16:10:29 +0200 Subject: [PATCH 21/23] fix after merge --- src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index 527c61b..0a6f6b7 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -188,16 +188,14 @@ private ProviderPackage handlerProvider(Optional providerOpt, String f if (providerOpt.isPresent()) { Provider provider = providerOpt.get(); - Optional providerPackageOpt = providerPackageRepository.findAllByPackageNameAndProviderIdtProviderAndDateP(Utils.extractPackageName(filename),provider.getIdtProvider(),Utils.extractDate(filename)); + Optional providerPackageOpt = providerPackageRepository.findByPackageNameAndDatePAndProviderIdtProvider(Utils.extractPackageName(filename),Utils.extractDate(filename),provider.getIdtProvider()); if( providerPackageOpt.isPresent()){ log.info("clear row package"); return providerPackageOpt.get(); } else { //pas d'info de package, on le crée - ProviderPackageId providerPackageId = new ProviderPackageId(Utils.extractPackageName(filename), Utils.extractDate(filename), provider.getIdtProvider()); - Optional providerPackage = providerPackageRepository.findByProviderPackageId(providerPackageId); - return providerPackage.orElseGet(() -> providerPackageRepository.save(new ProviderPackage(providerPackageId, 'N'))); + return providerPackageRepository.save(new ProviderPackage(packageName, packageDate, provider.getIdtProvider(), 'N')); } } else { //pas de provider, ni de package, on les crée tous les deux From 24ed81a89e9810d253280b4b300b129f28de7df7 Mon Sep 17 00:00:00 2001 From: SamuelQuetin Date: Fri, 27 Oct 2023 10:04:52 +0200 Subject: [PATCH 22/23] ajout LigneKbart repo et ajout supp ligne au cas ou packageProvider existe pour creer nouvelle ligne etc --- .../abes/bestppn/entity/bacon/LigneKbart.java | 23 +++++++++++++++++++ .../bestppn/entity/bacon/ProviderPackage.java | 15 ++++-------- .../fr/abes/bestppn/kafka/TopicConsumer.java | 9 +++++--- .../bacon/LigneKbartRepository.java | 15 ++++++++++++ 4 files changed, 49 insertions(+), 13 deletions(-) create mode 100644 src/main/java/fr/abes/bestppn/entity/bacon/LigneKbart.java create mode 100644 src/main/java/fr/abes/bestppn/repository/bacon/LigneKbartRepository.java diff --git a/src/main/java/fr/abes/bestppn/entity/bacon/LigneKbart.java b/src/main/java/fr/abes/bestppn/entity/bacon/LigneKbart.java new file mode 100644 index 0000000..e1b407c --- /dev/null +++ b/src/main/java/fr/abes/bestppn/entity/bacon/LigneKbart.java @@ -0,0 +1,23 @@ +package fr.abes.bestppn.entity.bacon; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.Id; +import jakarta.persistence.Table; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Entity +@Table(name = "LIGNE_KBART") +@Getter +@Setter +@NoArgsConstructor +public class LigneKbart { + @Column(name = "IDT_LIGNE_KBART") + @Id + private Integer id; + + @Column(name = "ID_PROVIDER_PACKAGE") + private Integer idProviderPackage; +} diff --git a/src/main/java/fr/abes/bestppn/entity/bacon/ProviderPackage.java b/src/main/java/fr/abes/bestppn/entity/bacon/ProviderPackage.java index 3119761..d08a17c 100644 --- a/src/main/java/fr/abes/bestppn/entity/bacon/ProviderPackage.java +++ b/src/main/java/fr/abes/bestppn/entity/bacon/ProviderPackage.java @@ -25,19 +25,9 @@ public class ProviderPackage implements Serializable { private Date dateP; @Column(name = "PROVIDER_IDT_PROVIDER") private Integer providerIdtProvider; - - @Column(name = "PACKAGE", insertable=false, updatable=false) - private String packageName; - - @Column(name = "DATE_P", insertable=false, updatable=false) - private Date dateP; - @Column(name = "LABEL_ABES") private char labelAbes; - @Column(name = "PROVIDER_IDT_PROVIDER", insertable=false, updatable=false) - private Integer providerIdtProvider; - @ManyToOne @JoinColumn(referencedColumnName = "IDT_PROVIDER", insertable = false, updatable = false) private Provider provider; @@ -48,4 +38,9 @@ public ProviderPackage(String packageName, Date dateP, Integer providerIdtProvid this.providerIdtProvider = providerIdtProvider; this.labelAbes = labelAbes; } + + @Override + public String toString() { + return "{ id:"+idProviderPackage + ", packageName:"+packageName+", providerIdt:"+providerIdtProvider+" dateP:"+dateP+" }"; + } } diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index 0a6f6b7..bf2d16a 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -8,6 +8,7 @@ import fr.abes.bestppn.entity.bacon.Provider; import fr.abes.bestppn.entity.bacon.ProviderPackage; import fr.abes.bestppn.exception.*; +import fr.abes.bestppn.repository.bacon.LigneKbartRepository; import fr.abes.bestppn.repository.bacon.ProviderPackageRepository; import fr.abes.bestppn.repository.bacon.ProviderRepository; import fr.abes.bestppn.service.BestPpnService; @@ -58,6 +59,8 @@ public class TopicConsumer { private final ProviderRepository providerRepository; + private final LigneKbartRepository ligneKbartRepository; + private boolean isOnError = false; private int nbBestPpnFind = 0; @@ -188,10 +191,10 @@ private ProviderPackage handlerProvider(Optional providerOpt, String f if (providerOpt.isPresent()) { Provider provider = providerOpt.get(); - Optional providerPackageOpt = providerPackageRepository.findByPackageNameAndDatePAndProviderIdtProvider(Utils.extractPackageName(filename),Utils.extractDate(filename),provider.getIdtProvider()); + Optional providerPackageOpt = providerPackageRepository.findByPackageNameAndDatePAndProviderIdtProvider(packageName,packageDate,provider.getIdtProvider()); if( providerPackageOpt.isPresent()){ - log.info("clear row package"); - + log.info("clear row package : " + providerPackageOpt.get()); + ligneKbartRepository.deleteAllByIdProviderPackage(providerPackageOpt.get().getIdProviderPackage()); return providerPackageOpt.get(); } else { //pas d'info de package, on le crée diff --git a/src/main/java/fr/abes/bestppn/repository/bacon/LigneKbartRepository.java b/src/main/java/fr/abes/bestppn/repository/bacon/LigneKbartRepository.java new file mode 100644 index 0000000..9c44e59 --- /dev/null +++ b/src/main/java/fr/abes/bestppn/repository/bacon/LigneKbartRepository.java @@ -0,0 +1,15 @@ +package fr.abes.bestppn.repository.bacon; + +import fr.abes.bestppn.configuration.BaconDbConfiguration; +import fr.abes.bestppn.entity.bacon.LigneKbart; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.stereotype.Repository; +import org.springframework.transaction.annotation.Transactional; + +@Repository +@BaconDbConfiguration +public interface LigneKbartRepository extends JpaRepository { + + @Transactional + void deleteAllByIdProviderPackage(Integer idProviderPackage); +} From d13e55e6c2a4263cd4f55cdbc77960a4dea91e11 Mon Sep 17 00:00:00 2001 From: SamuelQuetin Date: Fri, 27 Oct 2023 15:24:27 +0200 Subject: [PATCH 23/23] Fix fichier log --- .../fr/abes/bestppn/kafka/TopicConsumer.java | 4 ++-- .../fr/abes/bestppn/service/LogFileService.java | 16 +++++----------- 2 files changed, 7 insertions(+), 13 deletions(-) diff --git a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java index bb364ec..d8540fd 100644 --- a/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java +++ b/src/main/java/fr/abes/bestppn/kafka/TopicConsumer.java @@ -203,8 +203,8 @@ public void listenKbartFromKafka(ConsumerRecord lignesKbart) thr executionReport.addNbLinesWithErrorsInBestPPNSearch(); } catch (MessagingException | ExecutionException | InterruptedException | RuntimeException e) { log.error(e.getMessage()); -// producer.sendEndOfTraitmentReport(headerList); -// logFileService.createExecutionReport(filename, Integer.parseInt(totalLine), Integer.parseInt(totalLine) - this.nbLinesWithInputDataErrors - this.nbLinesWithErrorsInBestPPNSearch, this.nbLinesWithInputDataErrors, this.nbLinesWithErrorsInBestPPNSearch, injectKafka); + producer.sendEndOfTraitmentReport(headerList); + logFileService.createExecutionReport(filename, Integer.parseInt(totalLine), executionReport.getNbLinesOk(), executionReport.getNbLinesWithInputDataErrors(), executionReport.getNbLinesWithErrorsInBestPPNSearch(), injectKafka); } } diff --git a/src/main/java/fr/abes/bestppn/service/LogFileService.java b/src/main/java/fr/abes/bestppn/service/LogFileService.java index 49e28c2..fac2e9e 100644 --- a/src/main/java/fr/abes/bestppn/service/LogFileService.java +++ b/src/main/java/fr/abes/bestppn/service/LogFileService.java @@ -9,9 +9,6 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; -import java.time.LocalDateTime; -import java.time.format.DateTimeFormatter; -import java.util.Locale; import java.util.logging.FileHandler; import java.util.logging.Logger; import java.util.logging.SimpleFormatter; @@ -34,7 +31,7 @@ public void createExecutionReport(String fileName, int totalLines, int linesOk, // Création du fichier de log Logger logger = Logger.getLogger("ExecutionReport"); FileHandler fh; - Path source = Path.of(fileName.replaceAll(".tsv", ".log")); + Path source = Path.of(fileName.replace(".tsv", ".log")); fh = new FileHandler(String.valueOf(source), 1000, 1); logger.addHandler(fh); SimpleFormatter formatter = new SimpleFormatter(); @@ -51,17 +48,14 @@ public void createExecutionReport(String fileName, int totalLines, int linesOk, // Copie le fichier existant vers le répertoire temporaire en ajoutant sa date de création if (source != null && Files.exists(source)) { - LocalDateTime time = LocalDateTime.now(); - DateTimeFormatter format = DateTimeFormatter.ofPattern("yyyy-MM-dd-HH-mm-ss", Locale.FRANCE); - String date = format.format(time); // Vérification du chemin et création si inexistant - String tempLog = "tempLog/"; - File chemin = new File("tempLog/"); + String tempLogWithSeparator = "tempLog" + File.separator; + File chemin = new File(tempLogWithSeparator); if (!chemin.isDirectory()) { - Files.createDirectory(Paths.get(tempLog)); + Files.createDirectory(Paths.get(tempLogWithSeparator)); } - Path target = Path.of("tempLog\\" + date + "_" + source); + Path target = Path.of(tempLogWithSeparator + source); // Déplacement du fichier Files.move(source, target, StandardCopyOption.REPLACE_EXISTING);