From 4d9c4e8448564891722b83f31cb4e522ee3d5417 Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Thu, 7 Nov 2024 15:26:11 +0100 Subject: [PATCH 01/49] [kbss-cvut/termit-ui#553] Add language to File. --- .../cvut/kbss/termit/model/resource/File.java | 31 +++++++++---------- .../context/DescriptorFactoryTest.java | 4 +-- 2 files changed, 17 insertions(+), 18 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/model/resource/File.java b/src/main/java/cz/cvut/kbss/termit/model/resource/File.java index 26b45f940..c16d62a2a 100644 --- a/src/main/java/cz/cvut/kbss/termit/model/resource/File.java +++ b/src/main/java/cz/cvut/kbss/termit/model/resource/File.java @@ -21,16 +21,16 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import cz.cvut.kbss.jopa.model.annotations.FetchType; import cz.cvut.kbss.jopa.model.annotations.Inferred; +import cz.cvut.kbss.jopa.model.annotations.OWLAnnotationProperty; import cz.cvut.kbss.jopa.model.annotations.OWLClass; import cz.cvut.kbss.jopa.model.annotations.OWLObjectProperty; import cz.cvut.kbss.jopa.model.annotations.Types; +import cz.cvut.kbss.jopa.vocabulary.DC; import cz.cvut.kbss.jsonld.annotation.JsonLdAttributeOrder; -import cz.cvut.kbss.termit.exception.TermItException; import cz.cvut.kbss.termit.model.util.SupportsStorage; import cz.cvut.kbss.termit.service.IdentifierResolver; import cz.cvut.kbss.termit.util.Vocabulary; -import java.lang.reflect.Field; import java.util.Objects; import java.util.Set; @@ -43,6 +43,9 @@ public class File extends Resource implements SupportsStorage { @OWLObjectProperty(iri = Vocabulary.s_p_je_casti_dokumentu, fetch = FetchType.EAGER) private Document document; + @OWLAnnotationProperty(iri = DC.Terms.LANGUAGE, simpleLiteral = true) + private String language; + @Types private Set types; @@ -54,6 +57,14 @@ public void setDocument(Document document) { this.document = document; } + public String getLanguage() { + return language; + } + + public void setLanguage(String language) { + this.language = language; + } + public Set getTypes() { return types; } @@ -73,15 +84,11 @@ public boolean equals(Object o) { return Objects.equals(getUri(), file.getUri()); } - @Override - public int hashCode() { - return Objects.hash(getUri()); - } - @Override public String toString() { return "File{" + - super.toString() + (document != null ? "document=<" + document.getUri() + ">" : "") + '}'; + super.toString() + (language != null ? "@" + language : "") + + (document != null ? "document=<" + document.getUri() + ">" : "") + '}'; } /** @@ -109,12 +116,4 @@ public String getDirectoryName() { return IdentifierResolver.normalizeToAscii(labelPart) + '_' + getUri().hashCode(); } } - - public static Field getDocumentField() { - try { - return File.class.getDeclaredField("document"); - } catch (NoSuchFieldException e) { - throw new TermItException("Fatal error! Unable to retrieve \"document\" field.", e); - } - } } diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/context/DescriptorFactoryTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/context/DescriptorFactoryTest.java index c22fc8a49..621c8d823 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/context/DescriptorFactoryTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/context/DescriptorFactoryTest.java @@ -98,7 +98,7 @@ void termDescriptorCreatesDescriptorWithExactMatchesContextSetToDefaultToAllowEx } @Test - void fileDescriptorContainsAlsoDescriptorForDocument() { + void fileDescriptorContainsAlsoDescriptorForDocument() throws Exception { final File file = Generator.generateFileWithId("test.html"); final Document doc = Generator.generateDocumentWithId(); doc.addFile(file); @@ -106,7 +106,7 @@ void fileDescriptorContainsAlsoDescriptorForDocument() { doc.setVocabulary(Generator.generateUri()); final Descriptor result = sut.fileDescriptor(doc.getVocabulary()); final FieldSpecification docFieldSpec = mock(FieldSpecification.class); - when(docFieldSpec.getJavaField()).thenReturn(File.getDocumentField()); + when(docFieldSpec.getJavaField()).thenReturn(File.class.getDeclaredField("document")); final Descriptor docDescriptor = result.getAttributeDescriptor(docFieldSpec); assertNotNull(docDescriptor); } From 0a6fdba9c864c09cebb614645294942459c5c86a Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Thu, 7 Nov 2024 16:09:33 +0100 Subject: [PATCH 02/49] [kbss-cvut/termit-ui#553] Set File language when it is not provided on creation. --- .../service/business/ResourceService.java | 10 ++++- .../service/business/ResourceServiceTest.java | 39 +++++++++++++++++++ 2 files changed, 48 insertions(+), 1 deletion(-) diff --git a/src/main/java/cz/cvut/kbss/termit/service/business/ResourceService.java b/src/main/java/cz/cvut/kbss/termit/service/business/ResourceService.java index f8d8f87a3..08aee833d 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/business/ResourceService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/business/ResourceService.java @@ -37,6 +37,7 @@ import cz.cvut.kbss.termit.service.document.html.UnconfirmedTermOccurrenceRemover; import cz.cvut.kbss.termit.service.repository.ChangeRecordService; import cz.cvut.kbss.termit.service.repository.ResourceRepositoryService; +import cz.cvut.kbss.termit.util.Configuration; import cz.cvut.kbss.termit.util.TypeAwareResource; import jakarta.annotation.Nonnull; import org.slf4j.Logger; @@ -80,22 +81,26 @@ public class ResourceService private final ChangeRecordService changeRecordService; + private final Configuration config; + private ApplicationEventPublisher eventPublisher; @Autowired public ResourceService(ResourceRepositoryService repositoryService, DocumentManager documentManager, TextAnalysisService textAnalysisService, VocabularyService vocabularyService, - ChangeRecordService changeRecordService) { + ChangeRecordService changeRecordService, Configuration config) { this.repositoryService = repositoryService; this.documentManager = documentManager; this.textAnalysisService = textAnalysisService; this.vocabularyService = vocabularyService; this.changeRecordService = changeRecordService; + this.config = config; } /** * Ensures that document gets removed during Vocabulary removal */ + @Transactional @EventListener public void onVocabularyRemoval(VocabularyWillBeRemovedEvent event) { vocabularyService.find(event.getVocabularyIri()).ifPresent(vocabulary -> { @@ -239,6 +244,9 @@ public void addFileToDocument(Resource document, File file) { throw new UnsupportedAssetOperationException("Cannot add file to the specified resource " + document); } doc.addFile(file); + if (file.getLanguage() == null) { + file.setLanguage(config.getPersistence().getLanguage()); + } if (doc.getVocabulary() != null) { final Vocabulary vocabulary = vocabularyService.getReference(doc.getVocabulary()); repositoryService.persist(file, vocabulary); diff --git a/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java index 6119b0f90..2777f42fe 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java @@ -35,6 +35,8 @@ import cz.cvut.kbss.termit.service.document.TextAnalysisService; import cz.cvut.kbss.termit.service.repository.ChangeRecordService; import cz.cvut.kbss.termit.service.repository.ResourceRepositoryService; +import cz.cvut.kbss.termit.util.Configuration; +import cz.cvut.kbss.termit.util.Constants; import cz.cvut.kbss.termit.util.TypeAwareByteArrayResource; import cz.cvut.kbss.termit.util.TypeAwareResource; import cz.cvut.kbss.termit.util.Utils; @@ -47,6 +49,7 @@ import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; +import org.mockito.Spy; import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.context.ApplicationEventPublisher; import org.springframework.http.MediaType; @@ -96,6 +99,9 @@ class ResourceServiceTest { @Mock private ApplicationEventPublisher eventPublisher; + @Spy + private Configuration config = new Configuration(); + @InjectMocks private ResourceService sut; @@ -515,4 +521,37 @@ void getContentWithoutUnconfirmedOccurrencesRemovesUnconfirmedOccurrencesFromFil final org.jsoup.nodes.Document doc = Jsoup.parse(result.getInputStream(), StandardCharsets.UTF_8.name(), ""); assertTrue(doc.select("span[score]").isEmpty()); } + + @Test + void addFileToDocumentSetsFileLanguageToDefaultConfiguredWhenNotProvided() { + config.getPersistence().setLanguage(Constants.DEFAULT_LANGUAGE); + final Vocabulary vocabulary = Generator.generateVocabularyWithId(); + final Document document = Generator.generateDocumentWithId(); + document.setVocabulary(vocabulary.getUri()); + final File file = Generator.generateFileWithId("test.hml"); + when(resourceRepositoryService.exists(document.getUri())).thenReturn(true); + when(resourceRepositoryService.findRequired(document.getUri())).thenReturn(document); + when(vocabularyService.getReference(vocabulary.getUri())).thenReturn(vocabulary); + + sut.addFileToDocument(document, file); + verify(resourceRepositoryService).persist(file, vocabulary); + assertEquals(config.getPersistence().getLanguage(), file.getLanguage()); + } + + @Test + void addFileToDocumentDoesNotModifyLanguageWhenItIsAlreadySet() { + config.getPersistence().setLanguage(Constants.DEFAULT_LANGUAGE); + final Vocabulary vocabulary = Generator.generateVocabularyWithId(); + final Document document = Generator.generateDocumentWithId(); + document.setVocabulary(vocabulary.getUri()); + final File file = Generator.generateFileWithId("test.hml"); + file.setLanguage("cs"); + when(resourceRepositoryService.exists(document.getUri())).thenReturn(true); + when(resourceRepositoryService.findRequired(document.getUri())).thenReturn(document); + when(vocabularyService.getReference(vocabulary.getUri())).thenReturn(vocabulary); + + sut.addFileToDocument(document, file); + verify(resourceRepositoryService).persist(file, vocabulary); + assertEquals("cs", file.getLanguage()); + } } From 8c8a15f63bac28f2ed07b9f05a9c2fbaaee1ae14 Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Thu, 7 Nov 2024 16:25:54 +0100 Subject: [PATCH 03/49] [kbss-cvut/termit-ui#553] Use File language (if available) when invoking text analysis. --- .../service/document/TextAnalysisService.java | 2 +- .../document/TextAnalysisServiceTest.java | 96 ++++++++++++------- 2 files changed, 63 insertions(+), 35 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/service/document/TextAnalysisService.java b/src/main/java/cz/cvut/kbss/termit/service/document/TextAnalysisService.java index adc9dfdae..12bddd7e4 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/document/TextAnalysisService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/document/TextAnalysisService.java @@ -107,7 +107,7 @@ private TextAnalysisInput createAnalysisInput(File file) { publicUrl.isEmpty() || publicUrl.get().isEmpty() ? config.getRepository().getUrl() : publicUrl.get() ); input.setVocabularyRepository(repositoryUrl); - input.setLanguage(config.getPersistence().getLanguage()); + input.setLanguage(file.getLanguage() != null ? file.getLanguage() : config.getPersistence().getLanguage()); input.setVocabularyRepositoryUserName(config.getRepository().getUsername()); input.setVocabularyRepositoryPassword(config.getRepository().getPassword()); return input; diff --git a/src/test/java/cz/cvut/kbss/termit/service/document/TextAnalysisServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/document/TextAnalysisServiceTest.java index aa431671e..6bdc27284 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/document/TextAnalysisServiceTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/document/TextAnalysisServiceTest.java @@ -84,6 +84,7 @@ import static org.mockito.Mockito.when; import static org.springframework.test.web.client.match.MockRestRequestMatchers.content; import static org.springframework.test.web.client.match.MockRestRequestMatchers.header; +import static org.springframework.test.web.client.match.MockRestRequestMatchers.jsonPath; import static org.springframework.test.web.client.match.MockRestRequestMatchers.method; import static org.springframework.test.web.client.match.MockRestRequestMatchers.requestTo; import static org.springframework.test.web.client.response.MockRestResponseCreators.withServerError; @@ -143,14 +144,14 @@ void setUp() throws Exception { doCallRealMethod().when(documentManagerSpy).loadFileContent(any()); doNothing().when(documentManagerSpy).createBackup(any()); this.sut = new TextAnalysisService(restTemplate, config, documentManagerSpy, annotationGeneratorMock, - textAnalysisRecordDao, eventPublisher); + textAnalysisRecordDao, eventPublisher); } @Test void analyzeFileInvokesTextAnalysisServiceWithDocumentContent() { mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) - .andExpect(method(HttpMethod.POST)).andExpect(content().string(containsString(CONTENT))) - .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); + .andExpect(method(HttpMethod.POST)).andExpect(content().string(containsString(CONTENT))) + .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); mockServer.verify(); } @@ -159,7 +160,8 @@ private void generateFile() throws IOException { final java.io.File dir = Files.createTempDirectory("termit").toFile(); dir.deleteOnExit(); config.getFile().setStorage(dir.getAbsolutePath()); - final java.io.File docDir = new java.io.File(dir.getAbsolutePath() + java.io.File.separator + file.getDirectoryName()); + final java.io.File docDir = new java.io.File( + dir.getAbsolutePath() + java.io.File.separator + file.getDirectoryName()); Files.createDirectory(docDir.toPath()); docDir.deleteOnExit(); final java.io.File content = new java.io.File( @@ -172,9 +174,9 @@ private void generateFile() throws IOException { void analyzeFilePassesRepositoryAndVocabularyContextToService() throws Exception { final TextAnalysisInput input = textAnalysisInput(); mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) - .andExpect(method(HttpMethod.POST)) - .andExpect(content().string(objectMapper.writeValueAsString(input))) - .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); + .andExpect(method(HttpMethod.POST)) + .andExpect(content().string(objectMapper.writeValueAsString(input))) + .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); mockServer.verify(); } @@ -184,8 +186,8 @@ private TextAnalysisInput textAnalysisInput() { input.setContent(CONTENT); input.addVocabularyContext(vocabulary.getUri()); URI repositoryUrl = URI.create( - config.getRepository().getPublicUrl() - .orElse(config.getRepository().getUrl()) + config.getRepository().getPublicUrl() + .orElse(config.getRepository().getUrl()) ); input.setVocabularyRepository(repositoryUrl); input.setLanguage(config.getPersistence().getLanguage()); @@ -198,11 +200,11 @@ private TextAnalysisInput textAnalysisInput() { void analyzeFilePassesContentTypeAndAcceptHeadersToService() throws Exception { final TextAnalysisInput input = textAnalysisInput(); mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) - .andExpect(method(HttpMethod.POST)) - .andExpect(content().string(objectMapper.writeValueAsString(input))) - .andExpect(header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE)) - .andExpect(header(HttpHeaders.ACCEPT, MediaType.APPLICATION_XML_VALUE)) - .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); + .andExpect(method(HttpMethod.POST)) + .andExpect(content().string(objectMapper.writeValueAsString(input))) + .andExpect(header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE)) + .andExpect(header(HttpHeaders.ACCEPT, MediaType.APPLICATION_XML_VALUE)) + .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); mockServer.verify(); } @@ -228,11 +230,11 @@ void analyzeFilePassesRepositoryUsernameAndPasswordToServiceWhenProvided() throw void analyzeFileThrowsWebServiceIntegrationExceptionOnError() throws Exception { final TextAnalysisInput input = textAnalysisInput(); mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) - .andExpect(method(HttpMethod.POST)) - .andExpect(content().string(objectMapper.writeValueAsString(input))) - .andRespond(withServerError()); + .andExpect(method(HttpMethod.POST)) + .andExpect(content().string(objectMapper.writeValueAsString(input))) + .andRespond(withServerError()); assertThrows(WebServiceIntegrationException.class, - () -> sut.analyzeFile(file, Collections.singleton(vocabulary.getUri()))); + () -> sut.analyzeFile(file, Collections.singleton(vocabulary.getUri()))); mockServer.verify(); } @@ -256,7 +258,8 @@ void analyzeFileInvokesAnnotationGeneratorWithResultFromTextAnalysisService() th void analyzeFileThrowsNotFoundExceptionWhenFileCannotBeFound() { file.setLabel("unknown.html"); final NotFoundException result = assertThrows(NotFoundException.class, - () -> sut.analyzeFile(file, Collections.singleton(vocabulary.getUri()))); + () -> sut.analyzeFile(file, Collections.singleton( + vocabulary.getUri()))); assertThat(result.getMessage(), containsString("not found on file system")); } @@ -264,11 +267,12 @@ void analyzeFileThrowsNotFoundExceptionWhenFileCannotBeFound() { void analyzeFileThrowsWebServiceIntegrationExceptionWhenRemoteServiceReturnsEmptyBody() throws Exception { final TextAnalysisInput input = textAnalysisInput(); mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) - .andExpect(method(HttpMethod.POST)) - .andExpect(content().string(objectMapper.writeValueAsString(input))) - .andRespond(withSuccess()); + .andExpect(method(HttpMethod.POST)) + .andExpect(content().string(objectMapper.writeValueAsString(input))) + .andRespond(withSuccess()); final WebServiceIntegrationException result = assertThrows(WebServiceIntegrationException.class, - () -> sut.analyzeFile(file, Collections.singleton(vocabulary.getUri()))); + () -> sut.analyzeFile(file, Collections.singleton( + vocabulary.getUri()))); assertThat(result.getMessage(), containsString("empty response")); mockServer.verify(); } @@ -290,13 +294,13 @@ void analyzeFileCreatesFileBackupBeforeInvokingAnnotationGenerator() throws Exce @Test void analyzeFilePassesRepositoryAndSpecifiedVocabularyContextsToService() throws Exception { final Set vocabs = IntStream.range(0, 5).mapToObj(i -> Generator.generateUri()) - .collect(Collectors.toSet()); + .collect(Collectors.toSet()); final TextAnalysisInput expected = textAnalysisInput(); expected.setVocabularyContexts(vocabs); mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) - .andExpect(method(HttpMethod.POST)) - .andExpect(content().string(objectMapper.writeValueAsString(expected))) - .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); + .andExpect(method(HttpMethod.POST)) + .andExpect(content().string(objectMapper.writeValueAsString(expected))) + .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); sut.analyzeFile(file, vocabs); mockServer.verify(); } @@ -305,9 +309,9 @@ void analyzeFilePassesRepositoryAndSpecifiedVocabularyContextsToService() throws void analyzeFileBacksUpFileContentBeforeSavingNewAnalyzedContent() throws Exception { final TextAnalysisInput input = textAnalysisInput(); mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) - .andExpect(method(HttpMethod.POST)) - .andExpect(content().string(objectMapper.writeValueAsString(input))) - .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); + .andExpect(method(HttpMethod.POST)) + .andExpect(content().string(objectMapper.writeValueAsString(input))) + .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); mockServer.verify(); final InOrder inOrder = Mockito.inOrder(documentManagerSpy, annotationGeneratorMock); @@ -318,8 +322,8 @@ void analyzeFileBacksUpFileContentBeforeSavingNewAnalyzedContent() throws Except @Test void analyzeFileCreatesTextAnalysisRecord() { mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) - .andExpect(method(HttpMethod.POST)).andExpect(content().string(containsString(CONTENT))) - .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); + .andExpect(method(HttpMethod.POST)).andExpect(content().string(containsString(CONTENT))) + .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); final ArgumentCaptor captor = ArgumentCaptor.forClass(TextAnalysisRecord.class); verify(textAnalysisRecordDao).persist(captor.capture()); @@ -424,7 +428,8 @@ void analyzeFilePublishesAnalysisFinishedEvent() { .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); - ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(FileTextAnalysisFinishedEvent.class); + ArgumentCaptor eventCaptor = ArgumentCaptor.forClass( + FileTextAnalysisFinishedEvent.class); verify(eventPublisher).publishEvent(eventCaptor.capture()); assertNotNull(eventCaptor.getValue()); assertEquals(file.getUri(), eventCaptor.getValue().getFileUri()); @@ -444,10 +449,33 @@ void analyzeTermDefinitionPublishesAnalysisFinishedEvent() throws JsonProcessing sut.analyzeTermDefinition(term, vocabulary.getUri()); - ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(TermDefinitionTextAnalysisFinishedEvent.class); + ArgumentCaptor eventCaptor = ArgumentCaptor.forClass( + TermDefinitionTextAnalysisFinishedEvent.class); verify(eventPublisher).publishEvent(eventCaptor.capture()); assertNotNull(eventCaptor.getValue()); assertEquals(term.getUri(), eventCaptor.getValue().getTermUri()); assertEquals(vocabulary.getUri(), eventCaptor.getValue().getVocabularyIri()); } + + @Test + void analyzeFileSetsFileLanguageInTextAnalysisInvocationInput() { + file.setLanguage("cs"); + mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) + .andExpect(method(HttpMethod.POST)) + .andExpect(jsonPath("$.language").value("cs")) + .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); + sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); + mockServer.verify(); + } + + @Test + void analyzeFileUsesConfiguredPersistenceLanguageInTextAnalysisInvocationInputWhenFileLanguageIsNotSet() { + file.setLanguage(null); + mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) + .andExpect(method(HttpMethod.POST)) + .andExpect(jsonPath("$.language").value(Environment.LANGUAGE)) + .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); + sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); + mockServer.verify(); + } } From 4db5145ab18225bf2e1327f9222258980b451766 Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Thu, 7 Nov 2024 16:55:06 +0100 Subject: [PATCH 04/49] [kbss-cvut/termit-ui#553] Allow retrieving list of languages used in vocabulary. --- .../termit/persistence/dao/VocabularyDao.java | 162 +++++++++++------- .../termit/rest/VocabularyController.java | 16 ++ .../service/business/VocabularyService.java | 13 +- .../VocabularyRepositoryService.java | 11 ++ .../persistence/dao/VocabularyDaoTest.java | 20 +++ .../termit/rest/VocabularyControllerTest.java | 12 ++ src/test/resources/application.yml | 1 + 7 files changed, 170 insertions(+), 65 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java index d0cd42ea8..fec898b75 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java @@ -218,10 +218,13 @@ public Vocabulary update(Vocabulary entity) { /** * Forcefully removes the specified vocabulary. *

- * This deletes the whole graph of the vocabulary, all terms in the vocabulary's glossary and then removes the vocabulary itself. Extreme caution - * should be exercised when using this method. All relevant data, including documents and files, will be dropped. + * This deletes the whole graph of the vocabulary, all terms in the vocabulary's glossary and then removes the + * vocabulary itself. Extreme caution should be exercised when using this method. All relevant data, including + * documents and files, will be dropped. *

- * Publishes {@link VocabularyWillBeRemovedEvent} before the actual removal to allow other services to clean up related resources (e.g., delete the document). + * Publishes {@link VocabularyWillBeRemovedEvent} before the actual removal to allow other services to clean up + * related resources (e.g., delete the document). + * * @param entity The vocabulary to delete */ @ModifiesData @@ -236,9 +239,9 @@ public void remove(Vocabulary entity) { *

* Forcefully removes the specified vocabulary. *

- * This deletes all terms in the vocabulary's glossary and then removes the vocabulary itself. - * Extreme caution should be exercised when using this method, - * as it does not check for any references or usage and just drops all the relevant data. + * This deletes all terms in the vocabulary's glossary and then removes the vocabulary itself. Extreme caution + * should be exercised when using this method, as it does not check for any references or usage and just drops all + * the relevant data. *

* The document is not removed. */ @@ -248,19 +251,19 @@ public void removeVocabularyKeepDocument(Vocabulary entity) { /** *

- * Does not publish the {@link VocabularyWillBeRemovedEvent}.
- * You should use {@link #remove(Vocabulary)} instead. + * Does not publish the {@link VocabularyWillBeRemovedEvent}.
You should use {@link #remove(Vocabulary)} + * instead. *

* Forcefully removes the specified vocabulary. *

* This deletes all terms in the vocabulary's glossary and then removes the vocabulary itself. Extreme caution * should be exercised when using this method, as it does not check for any references or usage and just drops all * the relevant data. - * @param entity The vocabulary to delete - * @param dropGraph if false, - * executes {@code src/main/resources/query/remove/removeGlossaryTerms.ru} removing terms, - * their relations, model, glossary and vocabulary itself, keeps the document. - * When true, the whole vocabulary graph is dropped. + * + * @param entity The vocabulary to delete + * @param dropGraph if false, executes {@code src/main/resources/query/remove/removeGlossaryTerms.ru} removing + * terms, their relations, model, glossary and vocabulary itself, keeps the document. When true, + * the whole vocabulary graph is dropped. */ private void removeVocabulary(Vocabulary entity, boolean dropGraph) { Objects.requireNonNull(entity); @@ -268,7 +271,7 @@ private void removeVocabulary(Vocabulary entity, boolean dropGraph) { try { final URI vocabularyContext = contextMapper.getVocabularyContext(entity.getUri()); - if(dropGraph) { + if (dropGraph) { // drops whole named graph em.createNativeQuery("DROP GRAPH ?context") .setParameter("context", vocabularyContext) @@ -317,8 +320,8 @@ public Optional findGlossary(URI uri) { } /** - * Checks whether terms from the {@code subjectVocabulary} reference (as parent terms) any terms from the {@code - * targetVocabulary}. + * Checks whether terms from the {@code subjectVocabulary} reference (as parent terms) any terms from the + * {@code targetVocabulary}. * * @param subjectVocabulary Subject vocabulary identifier * @param targetVocabulary Target vocabulary identifier @@ -395,7 +398,7 @@ public List getChangesOfContent(Vocabulary vocabulary) { * Gets content change records of the specified vocabulary. * * @param vocabulary Vocabulary whose content changes to get - * @param pageReq Specification of the size and number of the page to return + * @param pageReq Specification of the size and number of the page to return * @return List of change records, ordered by date in descending order */ public List getDetailedHistoryOfContent(Vocabulary vocabulary, Pageable pageReq) { @@ -403,25 +406,27 @@ public List getDetailedHistoryOfContent(Vocabulary vocabul return createDetailedContentChangesQuery(vocabulary, pageReq).getResultList(); } - private TypedQuery createDetailedContentChangesQuery(Vocabulary vocabulary, Pageable pageReq) { + private TypedQuery createDetailedContentChangesQuery(Vocabulary vocabulary, + Pageable pageReq) { return em.createNativeQuery(""" - SELECT ?record WHERE { - ?term ?inVocabulary ?vocabulary ; - a ?termType . - ?record a ?changeRecord ; - ?relatesTo ?term ; - ?hasTime ?timestamp . - OPTIONAL { ?record ?hasChangedAttribute ?attribute . } - } ORDER BY DESC(?timestamp) ?attribute - """, AbstractChangeRecord.class) + SELECT ?record WHERE { + ?term ?inVocabulary ?vocabulary ; + a ?termType . + ?record a ?changeRecord ; + ?relatesTo ?term ; + ?hasTime ?timestamp . + OPTIONAL { ?record ?hasChangedAttribute ?attribute . } + } ORDER BY DESC(?timestamp) ?attribute + """, AbstractChangeRecord.class) .setParameter("inVocabulary", - URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_je_pojmem_ze_slovniku)) + URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_je_pojmem_ze_slovniku)) .setParameter("vocabulary", vocabulary) - .setParameter("termType", URI.create(SKOS.CONCEPT)) + .setParameter("termType", URI.create(SKOS.CONCEPT)) .setParameter("changeRecord", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_c_zmena)) .setParameter("relatesTo", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_zmenenou_entitu)) .setParameter("hasTime", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_datum_a_cas_modifikace)) - .setParameter("hasChangedAttribute", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_zmeneny_atribut)) + .setParameter("hasChangedAttribute", + URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_zmeneny_atribut)) .setFirstResult((int) pageReq.getOffset()) .setMaxResults(pageReq.getPageSize()); } @@ -576,16 +581,17 @@ public List getVocabularyRelations(Vocabulary vocabulary, Collect try { return em.createNativeQuery(""" - SELECT DISTINCT ?object ?relation ?subject { - ?object a ?vocabularyType ; - ?relation ?subject . - FILTER(?object != ?subject) . - FILTER(?relation NOT IN (?excluded)) . - } ORDER BY ?object ?relation - """, "RDFStatement") + SELECT DISTINCT ?object ?relation ?subject { + ?object a ?vocabularyType ; + ?relation ?subject . + FILTER(?object != ?subject) . + FILTER(?relation NOT IN (?excluded)) . + } ORDER BY ?object ?relation + """, "RDFStatement") .setParameter("subject", vocabularyUri) - .setParameter("excluded", excludedRelations) - .setParameter("vocabularyType", URI.create(EntityToOwlClassMapper.getOwlClassForEntity(Vocabulary.class))) + .setParameter("excluded", excludedRelations) + .setParameter("vocabularyType", + URI.create(EntityToOwlClassMapper.getOwlClassForEntity(Vocabulary.class))) .getResultList(); } catch (RuntimeException e) { throw new PersistenceException(e); @@ -603,31 +609,31 @@ public List getTermRelations(Vocabulary vocabulary) { try { return em.createNativeQuery(""" - SELECT DISTINCT ?object ?relation ?subject WHERE { - ?term a ?termType; - ?inVocabulary ?vocabulary . - - { - ?term ?relation ?secondTerm . - ?secondTerm a ?termType; - ?inVocabulary ?secondVocabulary . - - BIND(?term as ?object) - BIND(?secondTerm as ?subject) - } UNION { - ?secondTerm ?relation ?term . - ?secondTerm a ?termType; - ?inVocabulary ?secondVocabulary . - - BIND(?secondTerm as ?object) - BIND(?term as ?subject) - } - - FILTER(?relation IN (?deniedRelations)) - FILTER(?object != ?subject) - FILTER(?secondVocabulary != ?vocabulary) - } ORDER by ?object ?relation ?subject - """, "RDFStatement" + SELECT DISTINCT ?object ?relation ?subject WHERE { + ?term a ?termType; + ?inVocabulary ?vocabulary . + + { + ?term ?relation ?secondTerm . + ?secondTerm a ?termType; + ?inVocabulary ?secondVocabulary . + + BIND(?term as ?object) + BIND(?secondTerm as ?subject) + } UNION { + ?secondTerm ?relation ?term . + ?secondTerm a ?termType; + ?inVocabulary ?secondVocabulary . + + BIND(?secondTerm as ?object) + BIND(?term as ?subject) + } + + FILTER(?relation IN (?deniedRelations)) + FILTER(?object != ?subject) + FILTER(?secondVocabulary != ?vocabulary) + } ORDER by ?object ?relation ?subject + """, "RDFStatement" ).setMaxResults(DEFAULT_PAGE_SIZE) .setParameter("termType", termType) .setParameter("inVocabulary", inVocabulary) @@ -638,4 +644,32 @@ public List getTermRelations(Vocabulary vocabulary) { throw new PersistenceException(e); } } + + /** + * Returns the list of all distinct languages (language tags) used by terms in the specified vocabulary. + * + * @param vocabularyUri Vocabulary identifier + * @return List of distinct languages + */ + public List getLanguages(URI vocabularyUri) { + Objects.requireNonNull(vocabularyUri); + try { + return em.createNativeQuery(""" + SELECT DISTINCT ?lang WHERE { + ?x a ?type ; + ?inVocabulary ?vocabulary ; + ?labelProp ?label . + BIND (LANG(?label) as ?lang) + } + """, String.class) + .setParameter("type", URI.create(SKOS.CONCEPT)) + .setParameter("inVocabulary", + URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_je_pojmem_ze_slovniku)) + .setParameter("vocabulary", vocabularyUri) + .setParameter("labelProp", URI.create(SKOS.PREF_LABEL)) + .getResultList(); + } catch (RuntimeException e) { + throw new PersistenceException(e); + } + } } diff --git a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java index e8cd5afb4..2f4d4d1a9 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java @@ -311,6 +311,22 @@ public List getDetailedHistoryOfContent( return vocabularyService.getDetailedHistoryOfContent(vocabulary, pageReq); } + @Operation(security = {@SecurityRequirement(name = "bearer-key")}, + description = "Gets a list of languages used in the vocabulary.") + @ApiResponses({ + @ApiResponse(responseCode = "200", description = "List of languages.") + }) + @GetMapping(value = "/{localName}/languages", produces = {MediaType.APPLICATION_JSON_VALUE, JsonLd.MEDIA_TYPE}) + public List getLanguages( + @Parameter(description = ApiDoc.ID_LOCAL_NAME_DESCRIPTION, + example = ApiDoc.ID_LOCAL_NAME_EXAMPLE) @PathVariable String localName, + @Parameter(description = ApiDoc.ID_NAMESPACE_DESCRIPTION, + example = ApiDoc.ID_NAMESPACE_EXAMPLE) @RequestParam(name = QueryParams.NAMESPACE, + required = false) Optional namespace) { + final URI vocabularyUri = resolveVocabularyUri(localName, namespace); + return vocabularyService.getLanguages(vocabularyUri); + } + @Operation(security = {@SecurityRequirement(name = "bearer-key")}, description = "Updates metadata of vocabulary with the specified identifier.") @ApiResponses({ diff --git a/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java b/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java index fe6d9b20a..6f265656c 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java @@ -316,7 +316,7 @@ public List getChangesOfContent(Vocabulary vocabulary) { * Gets content change records of the specified vocabulary. * * @param vocabulary Vocabulary whose content changes to get - * @param pageReq Specification of the size and number of the page to return + * @param pageReq Specification of the size and number of the page to return * @return List of change records, ordered by date in descending order */ public List getDetailedHistoryOfContent(Vocabulary vocabulary, Pageable pageReq) { @@ -522,6 +522,17 @@ public AccessLevel getAccessLevel(Vocabulary vocabulary) { return authorizationService.getAccessLevel(vocabulary); } + /** + * Gets the list of languages used in the specified vocabulary. + * + * @param vocabularyUri Vocabulary identifier + * @return List of languages + */ + @PreAuthorize("@vocabularyAuthorizationService.canRead(#vocabularyUri)") + public List getLanguages(URI vocabularyUri) { + return repositoryService.getLanguages(vocabularyUri); + } + @Override public void setApplicationEventPublisher(@Nonnull ApplicationEventPublisher eventPublisher) { this.eventPublisher = eventPublisher; diff --git a/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java b/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java index 6be0b86d4..6cffad957 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java @@ -372,4 +372,15 @@ public Vocabulary findVersionValidAt(Vocabulary vocabulary, Instant at) { public PrefixDeclaration resolvePrefix(URI vocabularyUri) { return vocabularyDao.resolvePrefix(vocabularyUri); } + + /** + * Returns the list of all distinct languages (language tags) used by terms in the specified vocabulary. + * + * @param vocabularyUri Vocabulary identifier + * @return List of distinct languages + */ + @Transactional(readOnly = true) + public List getLanguages(URI vocabularyUri) { + return vocabularyDao.getLanguages(vocabularyUri); + } } diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java index 23b72777c..c7d063d1f 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java @@ -83,6 +83,7 @@ import static cz.cvut.kbss.termit.environment.util.ContainsSameEntities.containsSameEntities; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -927,4 +928,23 @@ void getAnyExternalRelationsReturnsTermsWithBothRelations(URI termRelation) { } }); } + + @Test + void getLanguagesReturnsDistinctLanguagesUsedByVocabularyTerms() { + final Vocabulary vocabulary = Generator.generateVocabularyWithId(); + final Term term = Generator.generateTermWithId(vocabulary.getUri()); + final Term term2 = Generator.generateTermWithId(vocabulary.getUri()); + term2.getLabel().set("cs", "Název v češtině"); + transactional(() -> { + em.persist(vocabulary, descriptorFor(vocabulary)); + em.persist(term, descriptorFactory.termDescriptor(term)); + em.persist(term2, descriptorFactory.termDescriptor(term2)); + Generator.addTermInVocabularyRelationship(term, vocabulary.getUri(), em); + Generator.addTermInVocabularyRelationship(term2, vocabulary.getUri(), em); + }); + + final List languages = sut.getLanguages(vocabulary.getUri()); + assertEquals(2, languages.size()); + assertThat(languages, hasItems(Environment.LANGUAGE, "cs")); + } } diff --git a/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java b/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java index 0d1c7444d..32d3aa47c 100644 --- a/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java +++ b/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java @@ -642,4 +642,16 @@ void getExcelTemplateFileReturnsExcelTemplateFileRetrievedFromServiceAsAttachmen assertThat(mvcResult.getResponse().getHeader(HttpHeaders.CONTENT_DISPOSITION), containsString("filename=\"termit-import.xlsx\"")); } + + @Test + void getLanguagesRetrievesAndReturnsListOfLanguagesUsedInVocabulary() throws Exception { + when(idResolverMock.resolveIdentifier(NAMESPACE, FRAGMENT)).thenReturn(VOCABULARY_URI); + final List languages = List.of(Environment.LANGUAGE, "cs", "de"); + when(serviceMock.getLanguages(VOCABULARY_URI)).thenReturn(languages); + + final MvcResult mvcResult = mockMvc.perform(get(PATH + "/" + FRAGMENT + "/languages").queryParam(QueryParams.NAMESPACE, NAMESPACE)).andReturn(); + final List result = readValue(mvcResult, new TypeReference>() {}); + assertEquals(languages, result); + verify(serviceMock).getLanguages(VOCABULARY_URI); + } } diff --git a/src/test/resources/application.yml b/src/test/resources/application.yml index 56f473bd6..9365e2b7f 100644 --- a/src/test/resources/application.yml +++ b/src/test/resources/application.yml @@ -1,3 +1,4 @@ +application.version: DEV spring: servlet: multipart: From 78765d031b81aeb414aa2fb9fda4f3f150f3a131 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Sat, 19 Oct 2024 15:42:02 +0200 Subject: [PATCH 05/49] [Enhancement kbss-cvut/termit-ui#530] Create DeleteChangeRecord class and BeforeAssetDeleteEvent Created classes required for recording an asset removal, implemented dispatching asset delete event and logic for ChangeTracker handling the event. --- .../termit/event/BeforeAssetDeleteEvent.java | 19 +++++ .../changetracking/DeleteChangeRecord.java | 84 +++++++++++++++++++ .../changetracking/PersistChangeRecord.java | 3 + .../changetracking/UpdateChangeRecord.java | 3 + .../termit/persistence/dao/BaseAssetDao.java | 8 ++ .../termit/persistence/dao/VocabularyDao.java | 2 + .../service/changetracking/ChangeTracker.java | 28 +++++++ .../resources/ontologies/popis-dat-model.ttl | 4 + 8 files changed, 151 insertions(+) create mode 100644 src/main/java/cz/cvut/kbss/termit/event/BeforeAssetDeleteEvent.java create mode 100644 src/main/java/cz/cvut/kbss/termit/model/changetracking/DeleteChangeRecord.java diff --git a/src/main/java/cz/cvut/kbss/termit/event/BeforeAssetDeleteEvent.java b/src/main/java/cz/cvut/kbss/termit/event/BeforeAssetDeleteEvent.java new file mode 100644 index 000000000..e8e6e363d --- /dev/null +++ b/src/main/java/cz/cvut/kbss/termit/event/BeforeAssetDeleteEvent.java @@ -0,0 +1,19 @@ +package cz.cvut.kbss.termit.event; + +import cz.cvut.kbss.termit.model.Asset; +import org.springframework.context.ApplicationEvent; + +/** + * Event published before an asset is deleted. + */ +public class BeforeAssetDeleteEvent> extends ApplicationEvent { + final T asset; + public BeforeAssetDeleteEvent(Object source, T asset) { + super(source); + this.asset = asset; + } + + public T getAsset() { + return asset; + } +} diff --git a/src/main/java/cz/cvut/kbss/termit/model/changetracking/DeleteChangeRecord.java b/src/main/java/cz/cvut/kbss/termit/model/changetracking/DeleteChangeRecord.java new file mode 100644 index 000000000..437df6f22 --- /dev/null +++ b/src/main/java/cz/cvut/kbss/termit/model/changetracking/DeleteChangeRecord.java @@ -0,0 +1,84 @@ +package cz.cvut.kbss.termit.model.changetracking; + +import cz.cvut.kbss.jopa.model.MultilingualString; +import cz.cvut.kbss.jopa.model.annotations.OWLAnnotationProperty; +import cz.cvut.kbss.jopa.model.annotations.OWLObjectProperty; +import cz.cvut.kbss.jopa.model.annotations.ParticipationConstraints; +import cz.cvut.kbss.jopa.vocabulary.DC; +import cz.cvut.kbss.termit.model.Asset; +import cz.cvut.kbss.termit.util.Vocabulary; +import jakarta.annotation.Nonnull; + +import java.io.Serializable; +import java.net.URI; +import java.util.Objects; + +/** + * Represents a record of asset deletion. + * @param The label type, {@link String} or {@link MultilingualString} + */ +//@OWLClass(iri = Vocabulary.s_c_smazani_entity) TODO: ontology for DeleteChangeRecord +public class DeleteChangeRecord extends AbstractChangeRecord { + @ParticipationConstraints(nonEmpty = true) + @OWLAnnotationProperty(iri = DC.Terms.TITLE) + private T label; + + @OWLObjectProperty(iri = Vocabulary.s_p_je_pojmem_ze_slovniku) + private URI vocabulary; + + public DeleteChangeRecord(Asset changedEntity, URI vocabulary) { + super(changedEntity); + this.label = changedEntity.getLabel(); + this.vocabulary = vocabulary; + } + + public T getLabel() { + return label; + } + + public void setLabel(T label) { + this.label = label; + } + + public URI getVocabulary() { + return vocabulary; + } + + public void setVocabulary(URI vocabulary) { + this.vocabulary = vocabulary; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof DeleteChangeRecord that)) { + return false; + } + if (!super.equals(o)) { + return false; + } + return Objects.equals(label, that.label) && Objects.equals(vocabulary, that.vocabulary); + } + + @Override + public String toString() { + return "DeleteChangeRecord{" + + super.toString() + + ", label=" + label + + (vocabulary != null ? ", vocabulary=" + vocabulary : "") + + '}'; + } + + @Override + public int compareTo(@Nonnull AbstractChangeRecord o) { + if (o instanceof UpdateChangeRecord) { + return 1; + } + if (o instanceof PersistChangeRecord) { + return 1; + } + return super.compareTo(o); + } +} diff --git a/src/main/java/cz/cvut/kbss/termit/model/changetracking/PersistChangeRecord.java b/src/main/java/cz/cvut/kbss/termit/model/changetracking/PersistChangeRecord.java index 6fccde3d6..ed1c675af 100644 --- a/src/main/java/cz/cvut/kbss/termit/model/changetracking/PersistChangeRecord.java +++ b/src/main/java/cz/cvut/kbss/termit/model/changetracking/PersistChangeRecord.java @@ -42,6 +42,9 @@ public int compareTo(@Nonnull AbstractChangeRecord o) { if (o instanceof UpdateChangeRecord) { return -1; } + if (o instanceof DeleteChangeRecord) { + return -1; + } return super.compareTo(o); } } diff --git a/src/main/java/cz/cvut/kbss/termit/model/changetracking/UpdateChangeRecord.java b/src/main/java/cz/cvut/kbss/termit/model/changetracking/UpdateChangeRecord.java index e1220f9f4..93074f63e 100644 --- a/src/main/java/cz/cvut/kbss/termit/model/changetracking/UpdateChangeRecord.java +++ b/src/main/java/cz/cvut/kbss/termit/model/changetracking/UpdateChangeRecord.java @@ -105,6 +105,9 @@ public int compareTo(@Nonnull AbstractChangeRecord o) { if (o instanceof PersistChangeRecord) { return 1; } + if (o instanceof DeleteChangeRecord) { + return -1; + } return super.compareTo(o); } } diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/BaseAssetDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/BaseAssetDao.java index bb6e26400..297af3676 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/BaseAssetDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/BaseAssetDao.java @@ -21,6 +21,7 @@ import cz.cvut.kbss.termit.dto.RecentlyCommentedAsset; import cz.cvut.kbss.termit.event.AssetPersistEvent; import cz.cvut.kbss.termit.event.AssetUpdateEvent; +import cz.cvut.kbss.termit.event.BeforeAssetDeleteEvent; import cz.cvut.kbss.termit.exception.PersistenceException; import cz.cvut.kbss.termit.model.Asset; import cz.cvut.kbss.termit.model.User; @@ -40,6 +41,7 @@ * Base DAO implementation for assets managed by the application. * * @param Type of the asset + * @param Type of the asset's label */ public abstract class BaseAssetDao> extends BaseDao { @@ -65,6 +67,12 @@ public T update(T entity) { return super.update(entity); } + @Override + public void remove(T entity) { + eventPublisher.publishEvent(new BeforeAssetDeleteEvent(this, entity)); + super.remove(entity); + } + /** * Finds unique last commented assets. * diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java index d0cd42ea8..d0fbdc893 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java @@ -30,6 +30,7 @@ import cz.cvut.kbss.termit.dto.Snapshot; import cz.cvut.kbss.termit.event.AssetPersistEvent; import cz.cvut.kbss.termit.event.AssetUpdateEvent; +import cz.cvut.kbss.termit.event.BeforeAssetDeleteEvent; import cz.cvut.kbss.termit.event.RefreshLastModifiedEvent; import cz.cvut.kbss.termit.event.VocabularyWillBeRemovedEvent; import cz.cvut.kbss.termit.exception.PersistenceException; @@ -228,6 +229,7 @@ public Vocabulary update(Vocabulary entity) { @Override public void remove(Vocabulary entity) { eventPublisher.publishEvent(new VocabularyWillBeRemovedEvent(this, entity.getUri())); + eventPublisher.publishEvent(new BeforeAssetDeleteEvent(this, entity)); this.removeVocabulary(entity, true); } diff --git a/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeTracker.java b/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeTracker.java index b9497ab94..8a5b70fac 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeTracker.java +++ b/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeTracker.java @@ -19,9 +19,12 @@ import cz.cvut.kbss.termit.event.AssetPersistEvent; import cz.cvut.kbss.termit.event.AssetUpdateEvent; +import cz.cvut.kbss.termit.event.BeforeAssetDeleteEvent; +import cz.cvut.kbss.termit.model.AbstractTerm; import cz.cvut.kbss.termit.model.Asset; import cz.cvut.kbss.termit.model.User; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; +import cz.cvut.kbss.termit.model.changetracking.DeleteChangeRecord; import cz.cvut.kbss.termit.model.changetracking.PersistChangeRecord; import cz.cvut.kbss.termit.model.changetracking.UpdateChangeRecord; import cz.cvut.kbss.termit.model.resource.File; @@ -37,6 +40,8 @@ import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; +import java.io.Serializable; +import java.net.URI; import java.time.Instant; import java.util.Collection; import java.util.stream.Collectors; @@ -114,4 +119,27 @@ public void onAssetPersistEvent(@Nonnull AssetPersistEvent event) { changeRecord.setTimestamp(Utils.timestamp()); changeRecordDao.persist(changeRecord, added); } + + /** + * Records an asset deletion from the repository. + * + * @param event Event representing the asset deletion + */ + @Transactional + @EventListener + public > void onBeforeAssetDeleteEvent(@Nonnull BeforeAssetDeleteEvent event) { + final T asset = event.getAsset(); + LOG.trace("Recording deletion of asset {}.", asset); + + URI vocabulary = null; + if (asset instanceof AbstractTerm term) { + vocabulary = term.getVocabulary(); + } + + final AbstractChangeRecord changeRecord = new DeleteChangeRecord(asset, vocabulary); + changeRecord.setAuthor(securityUtils.getCurrentUser().toUser()); + changeRecord.setTimestamp(Utils.timestamp()); + + changeRecordDao.persist(changeRecord, asset); + } } diff --git a/src/test/resources/ontologies/popis-dat-model.ttl b/src/test/resources/ontologies/popis-dat-model.ttl index 180b9a5a1..04152b693 100644 --- a/src/test/resources/ontologies/popis-dat-model.ttl +++ b/src/test/resources/ontologies/popis-dat-model.ttl @@ -536,3 +536,7 @@ a-popis-dat-pojem:má-původní-hodnotu a ; rdfs:domain a-popis-dat-pojem:úprava-entity ; rdfs:subPropertyOf . + +a-popis-dat-pojem:smazání-entity + a , owl:Class ; + rdfs:subClassOf a-popis-dat-pojem:změna . From 82f6bb352c90ee0ab986476097154b7fb78d25e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Sun, 27 Oct 2024 12:04:11 +0100 Subject: [PATCH 06/49] [Enhancement kbss-cvut/termit-ui#530] Remove generics for DeleteChangeRecord Using type capture instead. --- .../termit/event/BeforeAssetDeleteEvent.java | 8 ++-- .../changetracking/DeleteChangeRecord.java | 41 +++++++++++++------ .../termit/persistence/dao/BaseAssetDao.java | 3 +- .../termit/persistence/dao/VocabularyDao.java | 16 ++++---- .../service/changetracking/ChangeTracker.java | 7 ++-- 5 files changed, 46 insertions(+), 29 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/event/BeforeAssetDeleteEvent.java b/src/main/java/cz/cvut/kbss/termit/event/BeforeAssetDeleteEvent.java index e8e6e363d..ddbdee1e0 100644 --- a/src/main/java/cz/cvut/kbss/termit/event/BeforeAssetDeleteEvent.java +++ b/src/main/java/cz/cvut/kbss/termit/event/BeforeAssetDeleteEvent.java @@ -6,14 +6,14 @@ /** * Event published before an asset is deleted. */ -public class BeforeAssetDeleteEvent> extends ApplicationEvent { - final T asset; - public BeforeAssetDeleteEvent(Object source, T asset) { +public class BeforeAssetDeleteEvent extends ApplicationEvent { + final Asset asset; + public BeforeAssetDeleteEvent(Object source, Asset asset) { super(source); this.asset = asset; } - public T getAsset() { + public Asset getAsset() { return asset; } } diff --git a/src/main/java/cz/cvut/kbss/termit/model/changetracking/DeleteChangeRecord.java b/src/main/java/cz/cvut/kbss/termit/model/changetracking/DeleteChangeRecord.java index 437df6f22..27657e808 100644 --- a/src/main/java/cz/cvut/kbss/termit/model/changetracking/DeleteChangeRecord.java +++ b/src/main/java/cz/cvut/kbss/termit/model/changetracking/DeleteChangeRecord.java @@ -2,41 +2,58 @@ import cz.cvut.kbss.jopa.model.MultilingualString; import cz.cvut.kbss.jopa.model.annotations.OWLAnnotationProperty; +import cz.cvut.kbss.jopa.model.annotations.OWLClass; import cz.cvut.kbss.jopa.model.annotations.OWLObjectProperty; import cz.cvut.kbss.jopa.model.annotations.ParticipationConstraints; -import cz.cvut.kbss.jopa.vocabulary.DC; +import cz.cvut.kbss.jopa.vocabulary.RDFS; import cz.cvut.kbss.termit.model.Asset; import cz.cvut.kbss.termit.util.Vocabulary; import jakarta.annotation.Nonnull; -import java.io.Serializable; import java.net.URI; import java.util.Objects; /** * Represents a record of asset deletion. - * @param The label type, {@link String} or {@link MultilingualString} */ -//@OWLClass(iri = Vocabulary.s_c_smazani_entity) TODO: ontology for DeleteChangeRecord -public class DeleteChangeRecord extends AbstractChangeRecord { +@OWLClass(iri = Vocabulary.s_c_smazani_entity) +public class DeleteChangeRecord extends AbstractChangeRecord { @ParticipationConstraints(nonEmpty = true) - @OWLAnnotationProperty(iri = DC.Terms.TITLE) - private T label; + @OWLAnnotationProperty(iri = RDFS.LABEL) + private MultilingualString label; @OWLObjectProperty(iri = Vocabulary.s_p_je_pojmem_ze_slovniku) private URI vocabulary; - public DeleteChangeRecord(Asset changedEntity, URI vocabulary) { + /** + * Creates a new instance. + * @param changedEntity the changed asset + * @param vocabulary optional vocabulary URI + * @throws IllegalArgumentException If the label type is not String or MultilingualString + */ + public DeleteChangeRecord(Asset changedEntity, URI vocabulary) { super(changedEntity); - this.label = changedEntity.getLabel(); + + if (changedEntity.getLabel() instanceof String stringLabel) { + this.label = MultilingualString.create(stringLabel, null); + } else if (changedEntity.getLabel() instanceof MultilingualString multilingualLabel) { + this.label = multilingualLabel; + } else { + throw new IllegalArgumentException("Unsupported label type: " + changedEntity.getLabel().getClass()); + } + this.vocabulary = vocabulary; } - public T getLabel() { + public DeleteChangeRecord() { + super(); + } + + public MultilingualString getLabel() { return label; } - public void setLabel(T label) { + public void setLabel(MultilingualString label) { this.label = label; } @@ -53,7 +70,7 @@ public boolean equals(Object o) { if (this == o) { return true; } - if (!(o instanceof DeleteChangeRecord that)) { + if (!(o instanceof DeleteChangeRecord that)) { return false; } if (!super.equals(o)) { diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/BaseAssetDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/BaseAssetDao.java index 297af3676..831961df5 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/BaseAssetDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/BaseAssetDao.java @@ -41,7 +41,6 @@ * Base DAO implementation for assets managed by the application. * * @param Type of the asset - * @param Type of the asset's label */ public abstract class BaseAssetDao> extends BaseDao { @@ -69,7 +68,7 @@ public T update(T entity) { @Override public void remove(T entity) { - eventPublisher.publishEvent(new BeforeAssetDeleteEvent(this, entity)); + eventPublisher.publishEvent(new BeforeAssetDeleteEvent(this, entity)); super.remove(entity); } diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java index d0fbdc893..02230ea73 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java @@ -62,12 +62,12 @@ import java.time.Instant; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Set; +import java.util.stream.Stream; import static cz.cvut.kbss.termit.util.Constants.DEFAULT_PAGE_SIZE; import static cz.cvut.kbss.termit.util.Constants.SKOS_CONCEPT_MATCH_RELATIONSHIPS; @@ -229,7 +229,7 @@ public Vocabulary update(Vocabulary entity) { @Override public void remove(Vocabulary entity) { eventPublisher.publishEvent(new VocabularyWillBeRemovedEvent(this, entity.getUri())); - eventPublisher.publishEvent(new BeforeAssetDeleteEvent(this, entity)); + eventPublisher.publishEvent(new BeforeAssetDeleteEvent(this, entity)); this.removeVocabulary(entity, true); } @@ -386,11 +386,13 @@ public List getChangesOfContent(Vocabulary vocabulary) { .setParameter("type", URI.create( cz.cvut.kbss.termit.util.Vocabulary.s_c_uprava_entity)).getResultList(); updates.forEach(u -> u.addType(cz.cvut.kbss.termit.util.Vocabulary.s_c_uprava_entity)); - final List result = new ArrayList<>(persists.size() + updates.size()); - result.addAll(persists); - result.addAll(updates); - Collections.sort(result); - return result; + final List deletitions = createContentChangesQuery(vocabulary) + .setParameter("type", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_c_smazani_entity)).getResultList(); + deletitions.forEach(d -> d.addType(cz.cvut.kbss.termit.util.Vocabulary.s_c_smazani_entity)); + return Stream.of(persists, updates, deletitions) + .flatMap(List::stream) + .sorted() + .toList(); } /** diff --git a/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeTracker.java b/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeTracker.java index 8a5b70fac..fb9ab565d 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeTracker.java +++ b/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeTracker.java @@ -40,7 +40,6 @@ import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import java.io.Serializable; import java.net.URI; import java.time.Instant; import java.util.Collection; @@ -127,8 +126,8 @@ public void onAssetPersistEvent(@Nonnull AssetPersistEvent event) { */ @Transactional @EventListener - public > void onBeforeAssetDeleteEvent(@Nonnull BeforeAssetDeleteEvent event) { - final T asset = event.getAsset(); + public void onBeforeAssetDeleteEvent(@Nonnull BeforeAssetDeleteEvent event) { + final Asset asset = event.getAsset(); LOG.trace("Recording deletion of asset {}.", asset); URI vocabulary = null; @@ -136,7 +135,7 @@ public > void onBeforeAssetDeleteEven vocabulary = term.getVocabulary(); } - final AbstractChangeRecord changeRecord = new DeleteChangeRecord(asset, vocabulary); + final AbstractChangeRecord changeRecord = new DeleteChangeRecord(asset, vocabulary); changeRecord.setAuthor(securityUtils.getCurrentUser().toUser()); changeRecord.setTimestamp(Utils.timestamp()); From e331541dde00404d59e1a6c85e9a3e4ecdfd560f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Sun, 27 Oct 2024 13:53:44 +0100 Subject: [PATCH 07/49] [Enhancement kbss-cvut/termit-ui#530] Remove vocabulary from DeleteChangeRecord The record is already saved in tha vocabulary context. --- .../changetracking/DeleteChangeRecord.java | 21 ++----------------- .../service/changetracking/ChangeTracker.java | 9 +------- 2 files changed, 3 insertions(+), 27 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/model/changetracking/DeleteChangeRecord.java b/src/main/java/cz/cvut/kbss/termit/model/changetracking/DeleteChangeRecord.java index 27657e808..1d2cdc98c 100644 --- a/src/main/java/cz/cvut/kbss/termit/model/changetracking/DeleteChangeRecord.java +++ b/src/main/java/cz/cvut/kbss/termit/model/changetracking/DeleteChangeRecord.java @@ -3,14 +3,12 @@ import cz.cvut.kbss.jopa.model.MultilingualString; import cz.cvut.kbss.jopa.model.annotations.OWLAnnotationProperty; import cz.cvut.kbss.jopa.model.annotations.OWLClass; -import cz.cvut.kbss.jopa.model.annotations.OWLObjectProperty; import cz.cvut.kbss.jopa.model.annotations.ParticipationConstraints; import cz.cvut.kbss.jopa.vocabulary.RDFS; import cz.cvut.kbss.termit.model.Asset; import cz.cvut.kbss.termit.util.Vocabulary; import jakarta.annotation.Nonnull; -import java.net.URI; import java.util.Objects; /** @@ -22,16 +20,12 @@ public class DeleteChangeRecord extends AbstractChangeRecord { @OWLAnnotationProperty(iri = RDFS.LABEL) private MultilingualString label; - @OWLObjectProperty(iri = Vocabulary.s_p_je_pojmem_ze_slovniku) - private URI vocabulary; - /** * Creates a new instance. * @param changedEntity the changed asset - * @param vocabulary optional vocabulary URI * @throws IllegalArgumentException If the label type is not String or MultilingualString */ - public DeleteChangeRecord(Asset changedEntity, URI vocabulary) { + public DeleteChangeRecord(Asset changedEntity) { super(changedEntity); if (changedEntity.getLabel() instanceof String stringLabel) { @@ -41,8 +35,6 @@ public DeleteChangeRecord(Asset changedEntity, URI vocabulary) { } else { throw new IllegalArgumentException("Unsupported label type: " + changedEntity.getLabel().getClass()); } - - this.vocabulary = vocabulary; } public DeleteChangeRecord() { @@ -57,14 +49,6 @@ public void setLabel(MultilingualString label) { this.label = label; } - public URI getVocabulary() { - return vocabulary; - } - - public void setVocabulary(URI vocabulary) { - this.vocabulary = vocabulary; - } - @Override public boolean equals(Object o) { if (this == o) { @@ -76,7 +60,7 @@ public boolean equals(Object o) { if (!super.equals(o)) { return false; } - return Objects.equals(label, that.label) && Objects.equals(vocabulary, that.vocabulary); + return Objects.equals(label, that.label); } @Override @@ -84,7 +68,6 @@ public String toString() { return "DeleteChangeRecord{" + super.toString() + ", label=" + label + - (vocabulary != null ? ", vocabulary=" + vocabulary : "") + '}'; } diff --git a/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeTracker.java b/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeTracker.java index fb9ab565d..a7a5876b7 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeTracker.java +++ b/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeTracker.java @@ -20,7 +20,6 @@ import cz.cvut.kbss.termit.event.AssetPersistEvent; import cz.cvut.kbss.termit.event.AssetUpdateEvent; import cz.cvut.kbss.termit.event.BeforeAssetDeleteEvent; -import cz.cvut.kbss.termit.model.AbstractTerm; import cz.cvut.kbss.termit.model.Asset; import cz.cvut.kbss.termit.model.User; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; @@ -40,7 +39,6 @@ import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import java.net.URI; import java.time.Instant; import java.util.Collection; import java.util.stream.Collectors; @@ -130,12 +128,7 @@ public void onBeforeAssetDeleteEvent(@Nonnull BeforeAssetDeleteEvent event) { final Asset asset = event.getAsset(); LOG.trace("Recording deletion of asset {}.", asset); - URI vocabulary = null; - if (asset instanceof AbstractTerm term) { - vocabulary = term.getVocabulary(); - } - - final AbstractChangeRecord changeRecord = new DeleteChangeRecord(asset, vocabulary); + final AbstractChangeRecord changeRecord = new DeleteChangeRecord(asset); changeRecord.setAuthor(securityUtils.getCurrentUser().toUser()); changeRecord.setTimestamp(Utils.timestamp()); From 44541bf924035d748b8a23f72871746613ac8491 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Tue, 5 Nov 2024 14:44:35 +0100 Subject: [PATCH 08/49] [Enhancement kbss-cvut/termit-ui#530] Fix removePublishesEventAndDropsGraph test --- .../termit/persistence/dao/VocabularyDaoTest.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java index 23b72777c..02af22e4c 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java @@ -30,6 +30,7 @@ import cz.cvut.kbss.termit.event.AssetPersistEvent; import cz.cvut.kbss.termit.event.AssetUpdateEvent; import cz.cvut.kbss.termit.event.RefreshLastModifiedEvent; +import cz.cvut.kbss.termit.event.VocabularyEvent; import cz.cvut.kbss.termit.event.VocabularyWillBeRemovedEvent; import cz.cvut.kbss.termit.model.Glossary; import cz.cvut.kbss.termit.model.Model; @@ -761,10 +762,14 @@ void removePublishesEventAndDropsGraph() { transactional(() -> sut.remove(vocabulary)); - ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(VocabularyWillBeRemovedEvent.class); - verify(eventPublisher).publishEvent(eventCaptor.capture()); + ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(VocabularyWillBeRemovedEvent.class); + verify(eventPublisher, atLeastOnce()).publishEvent(eventCaptor.capture()); - VocabularyWillBeRemovedEvent event = eventCaptor.getValue(); + VocabularyWillBeRemovedEvent event = (VocabularyWillBeRemovedEvent) eventCaptor + .getAllValues().stream() + .filter(e -> e instanceof VocabularyWillBeRemovedEvent) + .findAny().orElseThrow(); + assertNotNull(event); assertEquals(event.getVocabularyIri(), vocabulary.getUri()); From 1486e9f7f4d628f21f058307b73f31ef3b44ae40 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Fri, 1 Nov 2024 09:26:34 +0100 Subject: [PATCH 09/49] [Enhancement kbss-cvut/termit-ui#520] Update vocabulary content history detailed endpoint allowing result filtering. --- .../VocabularyContentChangeFilterDto.java | 45 ++++++++++ .../termit/persistence/dao/VocabularyDao.java | 90 ++++++++++++++----- .../termit/rest/VocabularyController.java | 17 +++- .../service/business/VocabularyService.java | 5 +- .../VocabularyRepositoryService.java | 5 +- 5 files changed, 137 insertions(+), 25 deletions(-) create mode 100644 src/main/java/cz/cvut/kbss/termit/dto/filter/VocabularyContentChangeFilterDto.java diff --git a/src/main/java/cz/cvut/kbss/termit/dto/filter/VocabularyContentChangeFilterDto.java b/src/main/java/cz/cvut/kbss/termit/dto/filter/VocabularyContentChangeFilterDto.java new file mode 100644 index 000000000..c09fa7988 --- /dev/null +++ b/src/main/java/cz/cvut/kbss/termit/dto/filter/VocabularyContentChangeFilterDto.java @@ -0,0 +1,45 @@ +package cz.cvut.kbss.termit.dto.filter; + +import java.net.URI; + +/** + * Represents parameters for filtering vocabulary content changes. + */ +public class VocabularyContentChangeFilterDto { + private String termName; + private String changedAttributeName; + private String authorName; + private URI changeType; + + public String getTermName() { + return termName; + } + + public void setTermName(String termName) { + this.termName = termName; + } + + public String getChangedAttributeName() { + return changedAttributeName; + } + + public void setChangedAttributeName(String changedAttributeName) { + this.changedAttributeName = changedAttributeName; + } + + public String getAuthorName() { + return authorName; + } + + public void setAuthorName(String authorName) { + this.authorName = authorName; + } + + public URI getChangeType() { + return changeType; + } + + public void setChangeType(URI changeType) { + this.changeType = changeType; + } +} diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java index 02230ea73..dab8b0619 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java @@ -21,6 +21,7 @@ import cz.cvut.kbss.jopa.model.query.Query; import cz.cvut.kbss.jopa.model.query.TypedQuery; import cz.cvut.kbss.jopa.vocabulary.DC; +import cz.cvut.kbss.jopa.vocabulary.RDFS; import cz.cvut.kbss.jopa.vocabulary.SKOS; import cz.cvut.kbss.termit.asset.provenance.ModifiesData; import cz.cvut.kbss.termit.asset.provenance.SupportsLastModification; @@ -28,6 +29,7 @@ import cz.cvut.kbss.termit.dto.PrefixDeclaration; import cz.cvut.kbss.termit.dto.RdfsStatement; import cz.cvut.kbss.termit.dto.Snapshot; +import cz.cvut.kbss.termit.dto.filter.VocabularyContentChangeFilterDto; import cz.cvut.kbss.termit.event.AssetPersistEvent; import cz.cvut.kbss.termit.event.AssetUpdateEvent; import cz.cvut.kbss.termit.event.BeforeAssetDeleteEvent; @@ -43,6 +45,7 @@ import cz.cvut.kbss.termit.model.validation.ValidationResult; import cz.cvut.kbss.termit.persistence.context.DescriptorFactory; import cz.cvut.kbss.termit.persistence.context.VocabularyContextMapper; +import cz.cvut.kbss.termit.persistence.dao.changetracking.ChangeTrackingContextResolver; import cz.cvut.kbss.termit.persistence.snapshot.AssetSnapshotLoader; import cz.cvut.kbss.termit.persistence.validation.VocabularyContentValidator; import cz.cvut.kbss.termit.service.snapshot.SnapshotProvider; @@ -88,6 +91,7 @@ public class VocabularyDao extends BaseAssetDao "} GROUP BY ?date HAVING (?cnt > 0) ORDER BY ?date"; private static final String REMOVE_GLOSSARY_TERMS_QUERY_FILE = "remove/removeGlossaryTerms.ru"; + private final ChangeTrackingContextResolver changeTrackingContextResolver; private volatile long lastModified; @@ -97,11 +101,13 @@ public class VocabularyDao extends BaseAssetDao @Autowired public VocabularyDao(EntityManager em, Configuration config, DescriptorFactory descriptorFactory, - VocabularyContextMapper contextMapper, ApplicationContext context) { + VocabularyContextMapper contextMapper, ApplicationContext context, + ChangeTrackingContextResolver changeTrackingContextResolver) { super(Vocabulary.class, em, config.getPersistence(), descriptorFactory); this.contextMapper = contextMapper; refreshLastModified(); this.context = context; + this.changeTrackingContextResolver = changeTrackingContextResolver; } @Override @@ -402,31 +408,75 @@ public List getChangesOfContent(Vocabulary vocabulary) { * @param pageReq Specification of the size and number of the page to return * @return List of change records, ordered by date in descending order */ - public List getDetailedHistoryOfContent(Vocabulary vocabulary, Pageable pageReq) { + public List getDetailedHistoryOfContent(Vocabulary vocabulary, VocabularyContentChangeFilterDto filter, Pageable pageReq) { Objects.requireNonNull(vocabulary); - return createDetailedContentChangesQuery(vocabulary, pageReq).getResultList(); + return createDetailedContentChangesQuery(vocabulary, filter, pageReq).getResultList(); } - private TypedQuery createDetailedContentChangesQuery(Vocabulary vocabulary, Pageable pageReq) { - return em.createNativeQuery(""" + private TypedQuery createDetailedContentChangesQuery(Vocabulary vocabulary, VocabularyContentChangeFilterDto filter, Pageable pageReq) { + TypedQuery query = em.createNativeQuery(""" SELECT ?record WHERE { - ?term ?inVocabulary ?vocabulary ; - a ?termType . - ?record a ?changeRecord ; - ?relatesTo ?term ; - ?hasTime ?timestamp . - OPTIONAL { ?record ?hasChangedAttribute ?attribute . } + GRAPH ?changeContext { + ?record a ?changeRecord . + } + ?changeRecord ?subClassOf+ ?zmena . + ?record ?relatesTo ?term ; + ?hasTime ?timestamp ; + ?hasAuthor ?author . + ?author ?hasFirstName ?firstName ; + ?hasLastName ?lastName . + BIND(CONCAT(?firstName, " ", ?lastName) as ?authorFullName) + OPTIONAL { + ?record ?hasChangedAttribute ?attribute . + OPTIONAL { + ?attribute ?hasRdfsLabel ?changedAttributeName . + } + } + OPTIONAL { + ?term ?inVocabulary ?vocabulary ; + a ?termType ; + ?hasLabel ?label . + } + OPTIONAL { + ?record ?hasRdfsLabel ?label . + } + BIND(?termName as ?termNameVal) + BIND(?authorName as ?authorNameVal) + BIND(?changedAttributeName as ?changedAttributeNameVal) + FILTER (!BOUND(?termNameVal) || CONTAINS(LCASE(?label), LCASE(?termName))) + FILTER (!BOUND(?authorNameVal) || CONTAINS(LCASE(?authorFullName), LCASE(?authorName))) + FILTER (!BOUND(?changedAttributeName) || !BOUND(?changedAttributeNameVal) || CONTAINS(LCASE(?changedAttributeName), LCASE(?attributeName))) } ORDER BY DESC(?timestamp) ?attribute """, AbstractChangeRecord.class) - .setParameter("inVocabulary", - URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_je_pojmem_ze_slovniku)) - .setParameter("vocabulary", vocabulary) - .setParameter("termType", URI.create(SKOS.CONCEPT)) - .setParameter("changeRecord", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_c_zmena)) - .setParameter("relatesTo", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_zmenenou_entitu)) - .setParameter("hasTime", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_datum_a_cas_modifikace)) - .setParameter("hasChangedAttribute", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_zmeneny_atribut)) - .setFirstResult((int) pageReq.getOffset()) + .setParameter("changeContext", changeTrackingContextResolver.resolveChangeTrackingContext(vocabulary)) + .setParameter("subClassOf", URI.create(RDFS.SUB_CLASS_OF)) + .setParameter("zmena", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_c_zmena)) + .setParameter("inVocabulary", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_je_pojmem_ze_slovniku)) + .setParameter("vocabulary", vocabulary) + .setParameter("termType", URI.create(SKOS.CONCEPT)) + .setParameter("relatesTo", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_zmenenou_entitu)) + .setParameter("hasTime", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_datum_a_cas_modifikace)) + .setParameter("hasChangedAttribute", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_zmeneny_atribut)) + .setParameter("hasLabel", URI.create(SKOS.PREF_LABEL)) // term label + .setParameter("hasAuthor", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_editora)) // record has author + .setParameter("hasFirstName", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_krestni_jmeno)) + .setParameter("hasLastName", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_prijmeni)) + .setParameter("hasRdfsLabel", URI.create(RDFS.LABEL)); // changed attribute label + + if(!Utils.isBlank(filter.getTermName())) { + query = query.setParameter("termName", filter.getTermName().trim()); + } + if (!Utils.isBlank(filter.getAuthorName())) { + query = query.setParameter("authorName", filter.getAuthorName().trim()); + } + if (filter.getChangeType() != null) { + query = query.setParameter("changeRecord", filter.getChangeType()); + } + if (!Utils.isBlank(filter.getChangedAttributeName())) { + query = query.setParameter("attributeName", filter.getChangedAttributeName().trim()); + } + + return query.setFirstResult((int) pageReq.getOffset()) .setMaxResults(pageReq.getPageSize()); } diff --git a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java index e8cd5afb4..72aa526a6 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java @@ -22,6 +22,7 @@ import cz.cvut.kbss.termit.dto.RdfsStatement; import cz.cvut.kbss.termit.dto.Snapshot; import cz.cvut.kbss.termit.dto.acl.AccessControlListDto; +import cz.cvut.kbss.termit.dto.filter.VocabularyContentChangeFilterDto; import cz.cvut.kbss.termit.dto.listing.VocabularyDto; import cz.cvut.kbss.termit.model.Vocabulary; import cz.cvut.kbss.termit.model.acl.AccessControlRecord; @@ -301,6 +302,15 @@ public List getDetailedHistoryOfContent( @Parameter(description = ApiDoc.ID_NAMESPACE_DESCRIPTION, example = ApiDoc.ID_NAMESPACE_EXAMPLE) @RequestParam(name = QueryParams.NAMESPACE, required = false) Optional namespace, + @Parameter(description = "Term name to be used in filtering.") + @RequestParam(name = "term", required = false, defaultValue = "") String termName, + @Parameter(description = "Change type to be used in filtering.") + @RequestParam(name = "type", required = false) URI changeType, + @Parameter(description = "Author name to be used in filtering.") + @RequestParam(name = "author", required = false, defaultValue = "") String authorName, + @Parameter(description = "Changed attribute name to be used in filtering.") + @RequestParam(name = "attribute", required = false, defaultValue = "") String changedAttributeName, + @Parameter(description = ApiDocConstants.PAGE_SIZE_DESCRIPTION) @RequestParam( name = Constants.QueryParams.PAGE_SIZE, required = false, defaultValue = DEFAULT_PAGE_SIZE) Integer pageSize, @@ -308,7 +318,12 @@ public List getDetailedHistoryOfContent( name = Constants.QueryParams.PAGE, required = false, defaultValue = DEFAULT_PAGE) Integer pageNo) { final Pageable pageReq = createPageRequest(pageSize, pageNo); final Vocabulary vocabulary = vocabularyService.getReference(resolveVocabularyUri(localName, namespace)); - return vocabularyService.getDetailedHistoryOfContent(vocabulary, pageReq); + final VocabularyContentChangeFilterDto filter = new VocabularyContentChangeFilterDto(); + filter.setTermName(termName); + filter.setChangeType(changeType); + filter.setAuthorName(authorName); + filter.setChangedAttributeName(changedAttributeName); + return vocabularyService.getDetailedHistoryOfContent(vocabulary, filter, pageReq); } @Operation(security = {@SecurityRequirement(name = "bearer-key")}, diff --git a/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java b/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java index fe6d9b20a..a0f252171 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java @@ -22,6 +22,7 @@ import cz.cvut.kbss.termit.dto.RdfsStatement; import cz.cvut.kbss.termit.dto.Snapshot; import cz.cvut.kbss.termit.dto.acl.AccessControlListDto; +import cz.cvut.kbss.termit.dto.filter.VocabularyContentChangeFilterDto; import cz.cvut.kbss.termit.dto.listing.TermDto; import cz.cvut.kbss.termit.dto.listing.VocabularyDto; import cz.cvut.kbss.termit.event.VocabularyContentModifiedEvent; @@ -319,8 +320,8 @@ public List getChangesOfContent(Vocabulary vocabulary) { * @param pageReq Specification of the size and number of the page to return * @return List of change records, ordered by date in descending order */ - public List getDetailedHistoryOfContent(Vocabulary vocabulary, Pageable pageReq) { - return repositoryService.getDetailedHistoryOfContent(vocabulary, pageReq); + public List getDetailedHistoryOfContent(Vocabulary vocabulary, VocabularyContentChangeFilterDto filter, Pageable pageReq) { + return repositoryService.getDetailedHistoryOfContent(vocabulary, filter, pageReq); } /** diff --git a/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java b/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java index 6be0b86d4..43e7cde00 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java @@ -21,6 +21,7 @@ import cz.cvut.kbss.termit.dto.PrefixDeclaration; import cz.cvut.kbss.termit.dto.RdfsStatement; import cz.cvut.kbss.termit.dto.Snapshot; +import cz.cvut.kbss.termit.dto.filter.VocabularyContentChangeFilterDto; import cz.cvut.kbss.termit.dto.listing.VocabularyDto; import cz.cvut.kbss.termit.dto.mapper.DtoMapper; import cz.cvut.kbss.termit.exception.AssetRemovalException; @@ -228,8 +229,8 @@ public List getChangesOfContent(Vocabulary vocabulary) { * @return List of change records, ordered by date in descending order */ @Transactional(readOnly = true) - public List getDetailedHistoryOfContent(Vocabulary vocabulary, Pageable pageReq) { - return vocabularyDao.getDetailedHistoryOfContent(vocabulary, pageReq); + public List getDetailedHistoryOfContent(Vocabulary vocabulary, VocabularyContentChangeFilterDto filter, Pageable pageReq) { + return vocabularyDao.getDetailedHistoryOfContent(vocabulary, filter, pageReq); } @CacheEvict(allEntries = true) From 39422e55332d540a0cca62a2c10df2e5e78f29b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Mon, 4 Nov 2024 09:55:37 +0100 Subject: [PATCH 10/49] [Enhancement kbss-cvut/termit-ui#520] Fix Vocabulary content changes history query Query filters were using wrong variables, and removed unnecessary optional block. --- .../kbss/termit/persistence/dao/VocabularyDao.java | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java index dab8b0619..1e78b17d8 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java @@ -428,9 +428,7 @@ private TypedQuery createDetailedContentChangesQuery(Vocab BIND(CONCAT(?firstName, " ", ?lastName) as ?authorFullName) OPTIONAL { ?record ?hasChangedAttribute ?attribute . - OPTIONAL { - ?attribute ?hasRdfsLabel ?changedAttributeName . - } + ?attribute ?hasRdfsLabel ?changedAttributeName . } OPTIONAL { ?term ?inVocabulary ?vocabulary ; @@ -442,10 +440,10 @@ private TypedQuery createDetailedContentChangesQuery(Vocab } BIND(?termName as ?termNameVal) BIND(?authorName as ?authorNameVal) - BIND(?changedAttributeName as ?changedAttributeNameVal) - FILTER (!BOUND(?termNameVal) || CONTAINS(LCASE(?label), LCASE(?termName))) - FILTER (!BOUND(?authorNameVal) || CONTAINS(LCASE(?authorFullName), LCASE(?authorName))) - FILTER (!BOUND(?changedAttributeName) || !BOUND(?changedAttributeNameVal) || CONTAINS(LCASE(?changedAttributeName), LCASE(?attributeName))) + BIND(?attributeName as ?changedAttributeNameVal) + FILTER (!BOUND(?termNameVal) || CONTAINS(LCASE(?label), LCASE(?termNameVal))) + FILTER (!BOUND(?authorNameVal) || CONTAINS(LCASE(?authorFullName), LCASE(?authorNameVal))) + FILTER (!BOUND(?changedAttributeName) || !BOUND(?changedAttributeNameVal) || CONTAINS(LCASE(?changedAttributeName), LCASE(?changedAttributeName))) } ORDER BY DESC(?timestamp) ?attribute """, AbstractChangeRecord.class) .setParameter("changeContext", changeTrackingContextResolver.resolveChangeTrackingContext(vocabulary)) From 426c9c40fcf931a1491a8f1604cf50cde107bd6d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Tue, 5 Nov 2024 12:58:40 +0100 Subject: [PATCH 11/49] [Enhancement kbss-cvut/termit-ui#520] Add tests for delete records and vocabulary content detailed history endpoint --- .../termit/persistence/dao/VocabularyDao.java | 68 ++++--- .../persistence/dao/VocabularyDaoTest.java | 172 ++++++++++++++++++ .../changetracking/ChangeTrackingTest.java | 27 ++- 3 files changed, 238 insertions(+), 29 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java index 1e78b17d8..4744df72b 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java @@ -415,42 +415,52 @@ public List getDetailedHistoryOfContent(Vocabulary vocabul private TypedQuery createDetailedContentChangesQuery(Vocabulary vocabulary, VocabularyContentChangeFilterDto filter, Pageable pageReq) { TypedQuery query = em.createNativeQuery(""" - SELECT ?record WHERE { + SELECT DISTINCT ?record WHERE { +""" + /* Select anything from change context */ """ GRAPH ?changeContext { ?record a ?changeRecord . } +""" + /* The record should be a subclass of "zmena" */ """ ?changeRecord ?subClassOf+ ?zmena . ?record ?relatesTo ?term ; - ?hasTime ?timestamp ; - ?hasAuthor ?author . - ?author ?hasFirstName ?firstName ; - ?hasLastName ?lastName . - BIND(CONCAT(?firstName, " ", ?lastName) as ?authorFullName) - OPTIONAL { - ?record ?hasChangedAttribute ?attribute . - ?attribute ?hasRdfsLabel ?changedAttributeName . - } - OPTIONAL { - ?term ?inVocabulary ?vocabulary ; - a ?termType ; - ?hasLabel ?label . - } - OPTIONAL { - ?record ?hasRdfsLabel ?label . - } - BIND(?termName as ?termNameVal) - BIND(?authorName as ?authorNameVal) - BIND(?attributeName as ?changedAttributeNameVal) - FILTER (!BOUND(?termNameVal) || CONTAINS(LCASE(?label), LCASE(?termNameVal))) - FILTER (!BOUND(?authorNameVal) || CONTAINS(LCASE(?authorFullName), LCASE(?authorNameVal))) - FILTER (!BOUND(?changedAttributeName) || !BOUND(?changedAttributeNameVal) || CONTAINS(LCASE(?changedAttributeName), LCASE(?changedAttributeName))) + ?hasTime ?timestamp ; + ?hasAuthor ?author . +""" + /* Get author's name */ """ + ?author ?hasFirstName ?firstName ; + ?hasLastName ?lastName . + BIND(CONCAT(?firstName, " ", ?lastName) as ?authorFullName) +""" + /* When its update record, there will be a changed attribute */ """ + OPTIONAL { + ?record ?hasChangedAttribute ?attribute . + ?attribute ?hasRdfsLabel ?changedAttributeName . + } +""" + /* Get term's name (but the term might have been already deleted) */ """ + OPTIONAL { + ?term a ?termType ; + ?hasLabel ?label . + } +""" + /* then try to get the label from (delete) record */ """ + OPTIONAL { + ?record ?hasRdfsLabel ?label . + } +""" + /* When label is still not bound, the term was probably deleted, find the delete record and get the label from it */ """ + OPTIONAL { + FILTER(!BOUND(?label)) . + ?deleteRecord a ; + ?term; + ?label. + } + BIND(?termName as ?termNameVal) + BIND(?authorName as ?authorNameVal) + BIND(?attributeName as ?changedAttributeNameVal) + FILTER (!BOUND(?termNameVal) || CONTAINS(LCASE(?label), LCASE(?termNameVal))) + FILTER (!BOUND(?authorNameVal) || CONTAINS(LCASE(?authorFullName), LCASE(?authorNameVal))) + FILTER (!BOUND(?changedAttributeName) || !BOUND(?changedAttributeNameVal) || CONTAINS(LCASE(?changedAttributeName), LCASE(?changedAttributeName))) } ORDER BY DESC(?timestamp) ?attribute """, AbstractChangeRecord.class) .setParameter("changeContext", changeTrackingContextResolver.resolveChangeTrackingContext(vocabulary)) .setParameter("subClassOf", URI.create(RDFS.SUB_CLASS_OF)) .setParameter("zmena", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_c_zmena)) - .setParameter("inVocabulary", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_je_pojmem_ze_slovniku)) - .setParameter("vocabulary", vocabulary) .setParameter("termType", URI.create(SKOS.CONCEPT)) .setParameter("relatesTo", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_zmenenou_entitu)) .setParameter("hasTime", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_datum_a_cas_modifikace)) @@ -474,7 +484,11 @@ private TypedQuery createDetailedContentChangesQuery(Vocab query = query.setParameter("attributeName", filter.getChangedAttributeName().trim()); } - return query.setFirstResult((int) pageReq.getOffset()) + if(pageReq.isUnpaged()) { + return query; + } + + return query.setFirstResult((int) pageReq.getOffset()) .setMaxResults(pageReq.getPageSize()); } diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java index 02af22e4c..0df862acc 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java @@ -25,6 +25,7 @@ import cz.cvut.kbss.termit.dto.PrefixDeclaration; import cz.cvut.kbss.termit.dto.RdfsStatement; import cz.cvut.kbss.termit.dto.Snapshot; +import cz.cvut.kbss.termit.dto.filter.VocabularyContentChangeFilterDto; import cz.cvut.kbss.termit.environment.Environment; import cz.cvut.kbss.termit.environment.Generator; import cz.cvut.kbss.termit.event.AssetPersistEvent; @@ -38,13 +39,18 @@ import cz.cvut.kbss.termit.model.User; import cz.cvut.kbss.termit.model.Vocabulary; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; +import cz.cvut.kbss.termit.model.changetracking.DeleteChangeRecord; import cz.cvut.kbss.termit.model.changetracking.PersistChangeRecord; import cz.cvut.kbss.termit.model.changetracking.UpdateChangeRecord; import cz.cvut.kbss.termit.model.resource.Document; import cz.cvut.kbss.termit.model.resource.File; import cz.cvut.kbss.termit.model.util.EntityToOwlClassMapper; import cz.cvut.kbss.termit.persistence.context.DescriptorFactory; +import cz.cvut.kbss.termit.persistence.dao.changetracking.ChangeRecordDao; +import cz.cvut.kbss.termit.persistence.dao.changetracking.ChangeTrackingContextResolver; +import cz.cvut.kbss.termit.util.Configuration; import cz.cvut.kbss.termit.util.Constants; +import cz.cvut.kbss.termit.util.Utils; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.ValueFactory; import org.eclipse.rdf4j.model.vocabulary.RDF; @@ -60,6 +66,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationEvent; import org.springframework.context.ApplicationEventPublisher; +import org.springframework.data.domain.Pageable; import org.springframework.test.annotation.DirtiesContext; import java.net.URI; @@ -109,6 +116,9 @@ class VocabularyDaoTest extends BaseDaoTestRunner { @Autowired private VocabularyDao sut; + @Autowired + private TermDao termDao; + private User author; @BeforeEach @@ -932,4 +942,166 @@ void getAnyExternalRelationsReturnsTermsWithBothRelations(URI termRelation) { } }); } + + @Test + void getDetailedHistoryOfContentReturnsRecordsForAllChangeTypes() { + enableRdfsInference(em); + final Configuration config = new Configuration(); + config.getChangetracking().getContext().setExtension("/zmeny"); + final ChangeTrackingContextResolver resolver = new ChangeTrackingContextResolver(em, config); + final ChangeRecordDao changeRecordDao = new ChangeRecordDao(resolver, em); + + final Vocabulary vocabulary = Generator.generateVocabularyWithId(); + final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); + final Term termToRemove = Generator.generateTermWithId(vocabulary.getUri()); + + final List firstChanges = Generator.generateChangeRecords(firstTerm, author); + final List termToRemoveChanges = Generator.generateChangeRecords(termToRemove, author); + final DeleteChangeRecord deleteChangeRecord = new DeleteChangeRecord(); + deleteChangeRecord.setChangedEntity(termToRemove.getUri()); + deleteChangeRecord.setTimestamp(Utils.timestamp()); + deleteChangeRecord.setAuthor(author); + deleteChangeRecord.setLabel(termToRemove.getLabel()); + + transactional(() -> { + vocabulary.getGlossary().addRootTerm(firstTerm); + sut.persist(vocabulary); + Environment.addRelation(vocabulary.getUri(), URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_glosar), vocabulary.getGlossary().getUri(), em); + + termDao.persist(firstTerm, vocabulary); + termDao.persist(termToRemove, vocabulary); + + firstChanges.forEach(r -> changeRecordDao.persist(r, firstTerm)); + termToRemoveChanges.forEach(r -> changeRecordDao.persist(r, termToRemove)); + changeRecordDao.persist(deleteChangeRecord, termToRemove); + }); + + final VocabularyContentChangeFilterDto filter = new VocabularyContentChangeFilterDto(); + final int recordsCount = firstChanges.size() + termToRemoveChanges.size() + 1; // +1 for the delete record + final Pageable pageable = Pageable.ofSize(recordsCount * 3); + final List contentChanges = sut.getDetailedHistoryOfContent(vocabulary, filter, pageable); + + assertEquals(recordsCount, contentChanges.size()); + final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); + final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); + final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); + assertEquals(2, persistCount); + assertEquals(recordsCount - 3, updatesCount); // -2 persist records, -1 delete record + assertEquals(1, deleteCount); + } + + + @Test + void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByTermName() { + enableRdfsInference(em); + final Configuration config = new Configuration(); + config.getChangetracking().getContext().setExtension("/zmeny"); + final ChangeTrackingContextResolver resolver = new ChangeTrackingContextResolver(em, config); + final ChangeRecordDao changeRecordDao = new ChangeRecordDao(resolver, em); + + final String needle = "needle"; + final String haystack = "A label that contains needle somewhere"; + final String mud = "The n3edle is not here"; + + // Two terms with needle in the label, one term without needle in the label + final Vocabulary vocabulary = Generator.generateVocabularyWithId(); + final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); + firstTerm.getLabel().set(Environment.LANGUAGE, haystack); + final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); + secondTerm.getLabel().set(mud + needle); + final Term thirdTerm = Generator.generateTermWithId(vocabulary.getUri()); + thirdTerm.getLabel().set(Environment.LANGUAGE, mud); + + final List firstChanges = Generator.generateChangeRecords(firstTerm, author); + final List secondChanges = Generator.generateChangeRecords(secondTerm, author); + final List thirdChanges = Generator.generateChangeRecords(thirdTerm, author); + + transactional(() -> { + vocabulary.getGlossary().addRootTerm(firstTerm); + sut.persist(vocabulary); + Environment.addRelation(vocabulary.getUri(), URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_glosar), vocabulary.getGlossary().getUri(), em); + + termDao.persist(firstTerm, vocabulary); + termDao.persist(secondTerm, vocabulary); + termDao.persist(thirdTerm, vocabulary); + + firstChanges.forEach(r -> changeRecordDao.persist(r, firstTerm)); + secondChanges.forEach(r -> changeRecordDao.persist(r, secondTerm)); + thirdChanges.forEach(r -> changeRecordDao.persist(r, thirdTerm)); + }); + + final VocabularyContentChangeFilterDto filter = new VocabularyContentChangeFilterDto(); + filter.setTermName(needle); + + final int recordsCount = firstChanges.size() + secondChanges.size(); + final Pageable pageable = Pageable.ofSize(recordsCount * 2); + final List contentChanges = sut.getDetailedHistoryOfContent(vocabulary, filter, pageable); + + assertEquals(recordsCount, contentChanges.size()); + final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); + final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); + final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); + assertEquals(2, persistCount); + assertEquals(recordsCount - 2, updatesCount); // -2 persist records + assertEquals(0, deleteCount); + } + + @Test + void getDetailedHistoryOfContentReturnsRecordsOfDeletedTermFilteredByTermName() { + enableRdfsInference(em); + final Configuration config = new Configuration(); + config.getChangetracking().getContext().setExtension("/zmeny"); + final ChangeTrackingContextResolver resolver = new ChangeTrackingContextResolver(em, config); + final ChangeRecordDao changeRecordDao = new ChangeRecordDao(resolver, em); + + final String needle = "needle"; + final String haystack = "A label that contains needle somewhere"; + final String mud = "The n3edle is not here"; + + final Vocabulary vocabulary = Generator.generateVocabularyWithId(); + final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); + // the needle is placed in the term which will be removed + firstTerm.getLabel().set(Environment.LANGUAGE, mud); + final Term termToRemove = Generator.generateTermWithId(vocabulary.getUri()); + termToRemove.getLabel().set(Environment.LANGUAGE, haystack); + + final List firstChanges = Generator.generateChangeRecords(firstTerm, author); + final List termToRemoveChanges = Generator.generateChangeRecords(termToRemove, author); + final DeleteChangeRecord deleteChangeRecord = new DeleteChangeRecord(); + deleteChangeRecord.setChangedEntity(termToRemove.getUri()); + deleteChangeRecord.setTimestamp(Utils.timestamp()); + deleteChangeRecord.setAuthor(author); + deleteChangeRecord.setLabel(termToRemove.getLabel()); + + transactional(() -> { + vocabulary.getGlossary().addRootTerm(firstTerm); + sut.persist(vocabulary); + Environment.addRelation(vocabulary.getUri(), URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_glosar), vocabulary.getGlossary().getUri(), em); + + termDao.persist(firstTerm, vocabulary); + termDao.persist(termToRemove, vocabulary); + + firstChanges.forEach(r -> changeRecordDao.persist(r, firstTerm)); + termToRemoveChanges.forEach(r -> changeRecordDao.persist(r, termToRemove)); + changeRecordDao.persist(deleteChangeRecord, termToRemove); + + termToRemove.setVocabulary(vocabulary.getUri()); + termDao.remove(termToRemove); + }); + + final VocabularyContentChangeFilterDto filter = new VocabularyContentChangeFilterDto(); + filter.setTermName(needle); + + final int recordsCount = termToRemoveChanges.size() + 1; // +1 for the delete record + final Pageable pageable = Pageable.ofSize(recordsCount * 2); + final List contentChanges = sut.getDetailedHistoryOfContent(vocabulary, filter, pageable); + + assertEquals(recordsCount, contentChanges.size()); + final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); + final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); + final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); + assertEquals(1, persistCount); + assertEquals(recordsCount - 2, updatesCount); // -1 persist record -1 delete record + assertEquals(1, deleteCount); + } } diff --git a/src/test/java/cz/cvut/kbss/termit/service/changetracking/ChangeTrackingTest.java b/src/test/java/cz/cvut/kbss/termit/service/changetracking/ChangeTrackingTest.java index 05069c385..d570a1f85 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/changetracking/ChangeTrackingTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/changetracking/ChangeTrackingTest.java @@ -27,6 +27,7 @@ import cz.cvut.kbss.termit.model.User; import cz.cvut.kbss.termit.model.Vocabulary; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; +import cz.cvut.kbss.termit.model.changetracking.DeleteChangeRecord; import cz.cvut.kbss.termit.model.changetracking.PersistChangeRecord; import cz.cvut.kbss.termit.model.changetracking.UpdateChangeRecord; import cz.cvut.kbss.termit.model.resource.File; @@ -52,6 +53,8 @@ import static org.hamcrest.Matchers.nullValue; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertNotNull; public class ChangeTrackingTest extends BaseServiceTestRunner { @@ -141,7 +144,7 @@ void updatingVocabularyReferenceAndLiteralAttributesCreatesTwoUpdateRecords() { assertEquals(vocabulary.getUri(), chr.getChangedEntity()); assertThat(result.get(0), instanceOf(UpdateChangeRecord.class)); assertThat(((UpdateChangeRecord) chr).getChangedAttribute().toString(), anyOf(equalTo(DC.Terms.TITLE), - equalTo(cz.cvut.kbss.termit.util.Vocabulary.s_p_importuje_slovnik))); + equalTo(cz.cvut.kbss.termit.util.Vocabulary.s_p_importuje_slovnik))); }); } @@ -214,7 +217,7 @@ void updatingTermLiteralAttributesCreatesChangeRecordWithOriginalAndNewValue() { final List result = changeRecordDao.findAll(term); assertEquals(1, result.size()); assertEquals(Collections.singleton(originalDefinition), - ((UpdateChangeRecord) result.get(0)).getOriginalValue()); + ((UpdateChangeRecord) result.get(0)).getOriginalValue()); assertEquals(Collections.singleton(newDefinition), ((UpdateChangeRecord) result.get(0)).getNewValue()); } @@ -271,4 +274,24 @@ void updatingTermStateCreatesUpdateChangeRecord() { assertEquals(URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_stav_pojmu), ((UpdateChangeRecord) result.get(0)).getChangedAttribute()); } + + @Test + void deletingTermCreatesDeleteChangeRecord() { + enableRdfsInference(em); + final Term term = Generator.generateTermWithId(vocabulary.getUri()); + transactional(()-> { + em.persist(vocabulary, descriptorFactory.vocabularyDescriptor(vocabulary)); + term.setGlossary(vocabulary.getGlossary().getUri()); + em.persist(term, descriptorFactory.termDescriptor(vocabulary)); + Generator.addTermInVocabularyRelationship(term, vocabulary.getUri(), em); + }); + + termService.remove(term); + final List result = changeRecordDao.findAll(term); + assertEquals(1, result.size()); + final DeleteChangeRecord record = assertInstanceOf(DeleteChangeRecord.class, result.get(0)); + assertEquals(term.getUri(), record.getChangedEntity()); + assertNotNull(record.getLabel()); + assertEquals(term.getLabel(), record.getLabel()); + } } From 154b8aa14cdb4bcb271642fbb785422fc1f229c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Tue, 5 Nov 2024 16:51:54 +0100 Subject: [PATCH 12/49] [Enhancement kbss-cvut/termit-ui#520] Add tests for change record filtering --- .../termit/persistence/dao/VocabularyDao.java | 2 +- .../persistence/dao/VocabularyDaoTest.java | 131 ++++++++++++++++-- 2 files changed, 118 insertions(+), 15 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java index 4744df72b..4ac58f75b 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java @@ -455,7 +455,7 @@ private TypedQuery createDetailedContentChangesQuery(Vocab BIND(?attributeName as ?changedAttributeNameVal) FILTER (!BOUND(?termNameVal) || CONTAINS(LCASE(?label), LCASE(?termNameVal))) FILTER (!BOUND(?authorNameVal) || CONTAINS(LCASE(?authorFullName), LCASE(?authorNameVal))) - FILTER (!BOUND(?changedAttributeName) || !BOUND(?changedAttributeNameVal) || CONTAINS(LCASE(?changedAttributeName), LCASE(?changedAttributeName))) + FILTER (!BOUND(?changedAttributeNameVal) || CONTAINS(LCASE(?changedAttributeName), LCASE(?changedAttributeNameVal))) } ORDER BY DESC(?timestamp) ?attribute """, AbstractChangeRecord.class) .setParameter("changeContext", changeTrackingContextResolver.resolveChangeTrackingContext(vocabulary)) diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java index 0df862acc..01e4b9bac 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java @@ -20,6 +20,7 @@ import cz.cvut.kbss.jopa.model.EntityManager; import cz.cvut.kbss.jopa.model.MultilingualString; import cz.cvut.kbss.jopa.model.descriptors.Descriptor; +import cz.cvut.kbss.jopa.vocabulary.RDFS; import cz.cvut.kbss.jopa.vocabulary.SKOS; import cz.cvut.kbss.termit.dto.AggregatedChangeInfo; import cz.cvut.kbss.termit.dto.PrefixDeclaration; @@ -47,8 +48,6 @@ import cz.cvut.kbss.termit.model.util.EntityToOwlClassMapper; import cz.cvut.kbss.termit.persistence.context.DescriptorFactory; import cz.cvut.kbss.termit.persistence.dao.changetracking.ChangeRecordDao; -import cz.cvut.kbss.termit.persistence.dao.changetracking.ChangeTrackingContextResolver; -import cz.cvut.kbss.termit.util.Configuration; import cz.cvut.kbss.termit.util.Constants; import cz.cvut.kbss.termit.util.Utils; import org.eclipse.rdf4j.model.IRI; @@ -84,9 +83,12 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Random; import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.stream.IntStream; +import java.util.stream.Stream; import static cz.cvut.kbss.termit.environment.util.ContainsSameEntities.containsSameEntities; import static org.hamcrest.MatcherAssert.assertThat; @@ -118,6 +120,9 @@ class VocabularyDaoTest extends BaseDaoTestRunner { @Autowired private TermDao termDao; + + @Autowired + private ChangeRecordDao changeRecordDao; private User author; @@ -946,10 +951,6 @@ void getAnyExternalRelationsReturnsTermsWithBothRelations(URI termRelation) { @Test void getDetailedHistoryOfContentReturnsRecordsForAllChangeTypes() { enableRdfsInference(em); - final Configuration config = new Configuration(); - config.getChangetracking().getContext().setExtension("/zmeny"); - final ChangeTrackingContextResolver resolver = new ChangeTrackingContextResolver(em, config); - final ChangeRecordDao changeRecordDao = new ChangeRecordDao(resolver, em); final Vocabulary vocabulary = Generator.generateVocabularyWithId(); final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); @@ -994,10 +995,6 @@ void getDetailedHistoryOfContentReturnsRecordsForAllChangeTypes() { @Test void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByTermName() { enableRdfsInference(em); - final Configuration config = new Configuration(); - config.getChangetracking().getContext().setExtension("/zmeny"); - final ChangeTrackingContextResolver resolver = new ChangeTrackingContextResolver(em, config); - final ChangeRecordDao changeRecordDao = new ChangeRecordDao(resolver, em); final String needle = "needle"; final String haystack = "A label that contains needle somewhere"; @@ -1049,10 +1046,6 @@ void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByTermName() @Test void getDetailedHistoryOfContentReturnsRecordsOfDeletedTermFilteredByTermName() { enableRdfsInference(em); - final Configuration config = new Configuration(); - config.getChangetracking().getContext().setExtension("/zmeny"); - final ChangeTrackingContextResolver resolver = new ChangeTrackingContextResolver(em, config); - final ChangeRecordDao changeRecordDao = new ChangeRecordDao(resolver, em); final String needle = "needle"; final String haystack = "A label that contains needle somewhere"; @@ -1104,4 +1097,114 @@ void getDetailedHistoryOfContentReturnsRecordsOfDeletedTermFilteredByTermName() assertEquals(recordsCount - 2, updatesCount); // -1 persist record -1 delete record assertEquals(1, deleteCount); } + + @Test + void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByChangedAttributeName() { + enableRdfsInference(em); + + // Two terms with needle in the label, one term without needle in the label + final Vocabulary vocabulary = Generator.generateVocabularyWithId(); + final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); + final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); + + final List firstChanges = Generator.generateChangeRecords(firstTerm, author); + final List secondChanges = Generator.generateChangeRecords(secondTerm, author); + + // randomize changed attributes + final Random random = new Random(); + final AtomicInteger recordCount = new AtomicInteger(0); + final URI changedAttribute = URI.create(SKOS.DEFINITION); + final URI anotherChangedAttribute = URI.create(RDFS.LABEL); + final String changedAttributeName = "definition"; + + Stream.of(firstChanges, secondChanges).flatMap(Collection::stream) + .filter(r -> r instanceof UpdateChangeRecord) + .map(r -> (UpdateChangeRecord) r) + .forEach(r -> { + if(random.nextBoolean() || recordCount.get() == 0) { + r.setChangedAttribute(changedAttribute); + recordCount.incrementAndGet(); + } else { + r.setChangedAttribute(anotherChangedAttribute); + } + }); + + transactional(() -> { + vocabulary.getGlossary().addRootTerm(firstTerm); + sut.persist(vocabulary); + Environment.addRelation(vocabulary.getUri(), URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_glosar), vocabulary.getGlossary().getUri(), em); + + termDao.persist(firstTerm, vocabulary); + termDao.persist(secondTerm, vocabulary); + + firstChanges.forEach(r -> changeRecordDao.persist(r, firstTerm)); + secondChanges.forEach(r -> changeRecordDao.persist(r, secondTerm)); + }); + + final VocabularyContentChangeFilterDto filter = new VocabularyContentChangeFilterDto(); + filter.setChangedAttributeName(changedAttributeName); + + final Pageable pageable = Pageable.ofSize(recordCount.get() * 2); + final List contentChanges = sut.getDetailedHistoryOfContent(vocabulary, filter, pageable); + + assertEquals(recordCount.get(), contentChanges.size()); + final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); + final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); + final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); + assertEquals(0, persistCount); + assertEquals(recordCount.get(), updatesCount); + assertEquals(0, deleteCount); + } + + @Test + void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByAuthorName() { + enableRdfsInference(em); + + // Two terms with needle in the label, one term without needle in the label + final Vocabulary vocabulary = Generator.generateVocabularyWithId(); + final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); + final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); + + final List firstChanges = Generator.generateChangeRecords(firstTerm, author); + final List secondChanges = Generator.generateChangeRecords(secondTerm, author); + + // make new author + final User anotherAuthor = Generator.generateUserWithId(); + anotherAuthor.setFirstName("Karel"); + anotherAuthor.setLastName("Novák"); + transactional(() -> em.persist(anotherAuthor)); + Environment.setCurrentUser(anotherAuthor); + + final int recordCount = 2; + // author is this.author (Environment current user) + firstChanges.add(Generator.generateUpdateChange(firstTerm)); + secondChanges.add(Generator.generateUpdateChange(secondTerm)); + + transactional(() -> { + vocabulary.getGlossary().addRootTerm(firstTerm); + sut.persist(vocabulary); + Environment.addRelation(vocabulary.getUri(), URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_glosar), vocabulary.getGlossary().getUri(), em); + + termDao.persist(firstTerm, vocabulary); + termDao.persist(secondTerm, vocabulary); + + firstChanges.forEach(r -> changeRecordDao.persist(r, firstTerm)); + secondChanges.forEach(r -> changeRecordDao.persist(r, secondTerm)); + }); + + final VocabularyContentChangeFilterDto filter = new VocabularyContentChangeFilterDto(); + // full name without first two and last two characters + filter.setAuthorName(anotherAuthor.getFullName().substring(2, anotherAuthor.getFullName().length() - 2)); + + final Pageable pageable = Pageable.ofSize(4); + final List contentChanges = sut.getDetailedHistoryOfContent(vocabulary, filter, pageable); + + assertEquals(recordCount, contentChanges.size()); + final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); + final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); + final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); + assertEquals(0, persistCount); + assertEquals(recordCount, updatesCount); + assertEquals(0, deleteCount); + } } From 08aa466d327b6a490b04eedbda0b0dcf4a4e9500 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Wed, 6 Nov 2024 13:51:25 +0100 Subject: [PATCH 13/49] [Enhancement kbss-cvut/termit-ui#520] Add parametrized test for filtering vocabulary content changes by change type. --- .../persistence/dao/VocabularyDaoTest.java | 60 +++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java index 01e4b9bac..ea825cc5a 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java @@ -60,9 +60,11 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.ValueSource; import org.mockito.ArgumentCaptor; import org.mockito.Spy; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; import org.springframework.context.ApplicationEvent; import org.springframework.context.ApplicationEventPublisher; import org.springframework.data.domain.Pageable; @@ -1207,4 +1209,62 @@ void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByAuthorName assertEquals(recordCount, updatesCount); assertEquals(0, deleteCount); } + + @ParameterizedTest + @ValueSource(strings = { + cz.cvut.kbss.termit.util.Vocabulary.s_c_uprava_entity, + cz.cvut.kbss.termit.util.Vocabulary.s_c_vytvoreni_entity, + cz.cvut.kbss.termit.util.Vocabulary.s_c_smazani_entity, + }) + void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByChangeType(String type) { + enableRdfsInference(em); + final URI typeUri = URI.create(type); + final Class typeClass = switch (type) { + case cz.cvut.kbss.termit.util.Vocabulary.s_c_uprava_entity -> UpdateChangeRecord.class; + case cz.cvut.kbss.termit.util.Vocabulary.s_c_vytvoreni_entity -> PersistChangeRecord.class; + case cz.cvut.kbss.termit.util.Vocabulary.s_c_smazani_entity -> DeleteChangeRecord.class; + default -> throw new IllegalArgumentException("Unknown change type: " + type); + }; + + // Two terms with needle in the label, one term without needle in the label + final Vocabulary vocabulary = Generator.generateVocabularyWithId(); + final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); + final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); + + final List firstChanges = Generator.generateChangeRecords(firstTerm, author); + final List secondChanges = Generator.generateChangeRecords(secondTerm, author); + final DeleteChangeRecord deleteChangeRecord = new DeleteChangeRecord(); + deleteChangeRecord.setChangedEntity(secondTerm.getUri()); + deleteChangeRecord.setTimestamp(Utils.timestamp()); + deleteChangeRecord.setAuthor(author); + deleteChangeRecord.setLabel(secondTerm.getLabel()); + + final int recordCount = (int) Stream.of(firstChanges, secondChanges, List.of(deleteChangeRecord)).flatMap(List::stream).filter(typeClass::isInstance).count(); + + transactional(() -> { + vocabulary.getGlossary().addRootTerm(firstTerm); + sut.persist(vocabulary); + Environment.addRelation(vocabulary.getUri(), URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_glosar), vocabulary.getGlossary().getUri(), em); + + termDao.persist(firstTerm, vocabulary); + termDao.persist(secondTerm, vocabulary); + + firstChanges.forEach(r -> changeRecordDao.persist(r, firstTerm)); + secondChanges.forEach(r -> changeRecordDao.persist(r, secondTerm)); + changeRecordDao.persist(deleteChangeRecord, secondTerm); + + secondTerm.setVocabulary(vocabulary.getUri()); + termDao.remove(secondTerm); + }); + + final VocabularyContentChangeFilterDto filter = new VocabularyContentChangeFilterDto(); + // full name without first two and last two characters + filter.setChangeType(typeUri); + + final Pageable pageable = Pageable.ofSize( recordCount * 2); + final List contentChanges = sut.getDetailedHistoryOfContent(vocabulary, filter, pageable); + + assertEquals(recordCount, contentChanges.size()); + assertTrue(contentChanges.stream().allMatch(typeClass::isInstance)); + } } From 7e61939ae4abc3d11c6497b167dcd4c6842b8f6d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Sun, 10 Nov 2024 10:33:12 +0100 Subject: [PATCH 14/49] [Enhancement kbss-cvut/termit-ui#520] Test that detailed history of vocabulary content endpoint return results when no filter is specified. --- .../VocabularyContentChangeFilterDto.java | 24 ++++++++++--- .../termit/rest/VocabularyController.java | 35 +++++++++++-------- .../termit/rest/VocabularyControllerTest.java | 27 ++++++++++++++ 3 files changed, 67 insertions(+), 19 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/dto/filter/VocabularyContentChangeFilterDto.java b/src/main/java/cz/cvut/kbss/termit/dto/filter/VocabularyContentChangeFilterDto.java index c09fa7988..9d75ef607 100644 --- a/src/main/java/cz/cvut/kbss/termit/dto/filter/VocabularyContentChangeFilterDto.java +++ b/src/main/java/cz/cvut/kbss/termit/dto/filter/VocabularyContentChangeFilterDto.java @@ -1,15 +1,16 @@ package cz.cvut.kbss.termit.dto.filter; import java.net.URI; +import java.util.Objects; /** * Represents parameters for filtering vocabulary content changes. */ public class VocabularyContentChangeFilterDto { - private String termName; - private String changedAttributeName; - private String authorName; - private URI changeType; + private String termName = ""; + private String changedAttributeName = ""; + private String authorName = ""; + private URI changeType = null; public String getTermName() { return termName; @@ -42,4 +43,19 @@ public URI getChangeType() { public void setChangeType(URI changeType) { this.changeType = changeType; } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof VocabularyContentChangeFilterDto that)) return false; + return Objects.equals(termName, that.termName) && + Objects.equals(changedAttributeName, that.changedAttributeName) && + Objects.equals(authorName, that.authorName) && + Objects.equals(changeType, that.changeType); + } + + @Override + public int hashCode() { + return Objects.hash(termName, changedAttributeName, authorName, changeType); + } } diff --git a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java index 72aa526a6..13858a4e0 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java @@ -297,25 +297,30 @@ public List getHistoryOfContent( @GetMapping(value = "/{localName}/history-of-content/detail", produces = {MediaType.APPLICATION_JSON_VALUE, JsonLd.MEDIA_TYPE}) public List getDetailedHistoryOfContent( - @Parameter(description = ApiDoc.ID_LOCAL_NAME_DESCRIPTION, - example = ApiDoc.ID_LOCAL_NAME_EXAMPLE) @PathVariable String localName, - @Parameter(description = ApiDoc.ID_NAMESPACE_DESCRIPTION, - example = ApiDoc.ID_NAMESPACE_EXAMPLE) @RequestParam(name = QueryParams.NAMESPACE, - required = false) Optional namespace, + @Parameter(description = ApiDoc.ID_LOCAL_NAME_DESCRIPTION, example = ApiDoc.ID_LOCAL_NAME_EXAMPLE) + @PathVariable + String localName, + @Parameter(description = ApiDoc.ID_NAMESPACE_DESCRIPTION, example = ApiDoc.ID_NAMESPACE_EXAMPLE) + @RequestParam(name = QueryParams.NAMESPACE, required = false) + Optional namespace, @Parameter(description = "Term name to be used in filtering.") - @RequestParam(name = "term", required = false, defaultValue = "") String termName, + @RequestParam(name = "term", required = false, defaultValue = "") + String termName, @Parameter(description = "Change type to be used in filtering.") - @RequestParam(name = "type", required = false) URI changeType, + @RequestParam(name = "type", required = false) + URI changeType, @Parameter(description = "Author name to be used in filtering.") - @RequestParam(name = "author", required = false, defaultValue = "") String authorName, + @RequestParam(name = "author", required = false, defaultValue = "") + String authorName, @Parameter(description = "Changed attribute name to be used in filtering.") - @RequestParam(name = "attribute", required = false, defaultValue = "") String changedAttributeName, - - @Parameter(description = ApiDocConstants.PAGE_SIZE_DESCRIPTION) @RequestParam( - name = Constants.QueryParams.PAGE_SIZE, required = false, - defaultValue = DEFAULT_PAGE_SIZE) Integer pageSize, - @Parameter(description = ApiDocConstants.PAGE_NO_DESCRIPTION) @RequestParam( - name = Constants.QueryParams.PAGE, required = false, defaultValue = DEFAULT_PAGE) Integer pageNo) { + @RequestParam(name = "attribute", required = false, defaultValue = "") + String changedAttributeName, + @Parameter(description = ApiDocConstants.PAGE_SIZE_DESCRIPTION) + @RequestParam(name = Constants.QueryParams.PAGE_SIZE, required = false, defaultValue = DEFAULT_PAGE_SIZE) + Integer pageSize, + @Parameter(description = ApiDocConstants.PAGE_NO_DESCRIPTION) + @RequestParam(name = Constants.QueryParams.PAGE, required = false, defaultValue = DEFAULT_PAGE) + Integer pageNo) { final Pageable pageReq = createPageRequest(pageSize, pageNo); final Vocabulary vocabulary = vocabularyService.getReference(resolveVocabularyUri(localName, namespace)); final VocabularyContentChangeFilterDto filter = new VocabularyContentChangeFilterDto(); diff --git a/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java b/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java index 0d1c7444d..37af85f47 100644 --- a/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java +++ b/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java @@ -22,11 +22,13 @@ import cz.cvut.kbss.termit.dto.AggregatedChangeInfo; import cz.cvut.kbss.termit.dto.Snapshot; import cz.cvut.kbss.termit.dto.acl.AccessControlListDto; +import cz.cvut.kbss.termit.dto.filter.VocabularyContentChangeFilterDto; import cz.cvut.kbss.termit.dto.listing.VocabularyDto; import cz.cvut.kbss.termit.environment.Environment; import cz.cvut.kbss.termit.environment.Generator; import cz.cvut.kbss.termit.exception.AssetRemovalException; import cz.cvut.kbss.termit.exception.importing.VocabularyImportException; +import cz.cvut.kbss.termit.model.Term; import cz.cvut.kbss.termit.model.User; import cz.cvut.kbss.termit.model.Vocabulary; import cz.cvut.kbss.termit.model.acl.AccessControlList; @@ -50,6 +52,7 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.domain.Pageable; import org.springframework.http.HttpHeaders; import org.springframework.http.MediaType; import org.springframework.mock.web.MockMultipartFile; @@ -70,15 +73,20 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.IntStream; +import java.util.stream.Stream; import static cz.cvut.kbss.termit.environment.util.ContainsSameEntities.containsSameEntities; +import static cz.cvut.kbss.termit.util.Constants.DEFAULT_PAGE_SIZE; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalToObject; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.notNull; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyBoolean; +import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; @@ -642,4 +650,23 @@ void getExcelTemplateFileReturnsExcelTemplateFileRetrievedFromServiceAsAttachmen assertThat(mvcResult.getResponse().getHeader(HttpHeaders.CONTENT_DISPOSITION), containsString("filename=\"termit-import.xlsx\"")); } + + @Test + void getDetailedHistoryOfContentReturnsListOfChangeRecordsWhenNoFilterIsSpecified() throws Exception { + final int pageSize = Integer.parseInt(VocabularyController.DEFAULT_PAGE_SIZE); + final Vocabulary vocabulary = generateVocabularyAndInitReferenceResolution(); + final Term term = Generator.generateTermWithId(); + final List changeRecords = IntStream.range(0, 5).mapToObj(i -> Generator.generateChangeRecords(term, user)).flatMap(List::stream).toList(); + final VocabularyContentChangeFilterDto filter = new VocabularyContentChangeFilterDto(); + final Pageable pageable = Pageable.ofSize(pageSize); + + doReturn(changeRecords).when(serviceMock).getDetailedHistoryOfContent(vocabulary, filter, pageable); + + final MvcResult mvcResult = mockMvc.perform(get(PATH + "/" + FRAGMENT + "/history-of-content/detail")).andExpect(status().isOk()).andReturn(); + final List result = + readValue(mvcResult, new TypeReference>() {}); + assertNotNull(result); + assertEquals(changeRecords, result); + verify(serviceMock).getDetailedHistoryOfContent(vocabulary, filter, pageable); + } } From 34b7bbbcfe2fecf289206772e95d55f649c451cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Sun, 10 Nov 2024 10:47:22 +0100 Subject: [PATCH 15/49] [Ref] Optimize imports --- .../cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java | 1 - .../cz/cvut/kbss/termit/rest/VocabularyControllerTest.java | 4 ---- 2 files changed, 5 deletions(-) diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java index ea825cc5a..3c6d7b295 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java @@ -64,7 +64,6 @@ import org.mockito.ArgumentCaptor; import org.mockito.Spy; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; import org.springframework.context.ApplicationEvent; import org.springframework.context.ApplicationEventPublisher; import org.springframework.data.domain.Pageable; diff --git a/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java b/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java index 37af85f47..a7e8677c6 100644 --- a/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java +++ b/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java @@ -73,17 +73,13 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.IntStream; -import java.util.stream.Stream; import static cz.cvut.kbss.termit.environment.util.ContainsSameEntities.containsSameEntities; -import static cz.cvut.kbss.termit.util.Constants.DEFAULT_PAGE_SIZE; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalToObject; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.notNull; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyBoolean; import static org.mockito.Mockito.doReturn; From eee60a90409fb79024fe006c88f1094291f4528f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Sun, 10 Nov 2024 16:46:49 +0100 Subject: [PATCH 16/49] [Enhancement kbss-cvut/termit-ui#520] Move change record implementation to ChangeRecordDao --- ...terDto.java => ChangeRecordFilterDto.java} | 32 +++-- .../termit/persistence/dao/VocabularyDao.java | 98 ++------------- .../dao/changetracking/ChangeRecordDao.java | 116 +++++++++++++++++- .../kbss/termit/rest/ResourceController.java | 3 +- .../cvut/kbss/termit/rest/TermController.java | 19 ++- .../termit/rest/VocabularyController.java | 53 ++++---- .../service/business/ResourceService.java | 5 +- .../termit/service/business/TermService.java | 5 +- .../service/business/VocabularyService.java | 8 +- .../changetracking/ChangeRecordProvider.java | 19 ++- .../repository/ChangeRecordService.java | 8 +- .../VocabularyRepositoryService.java | 4 +- .../persistence/dao/VocabularyDaoTest.java | 18 +-- .../termit/rest/ResourceControllerTest.java | 6 +- .../kbss/termit/rest/TermControllerTest.java | 9 +- .../termit/rest/VocabularyControllerTest.java | 9 +- .../service/business/ResourceServiceTest.java | 6 +- .../service/business/TermServiceTest.java | 3 +- .../business/VocabularyServiceTest.java | 6 +- 19 files changed, 262 insertions(+), 165 deletions(-) rename src/main/java/cz/cvut/kbss/termit/dto/filter/{VocabularyContentChangeFilterDto.java => ChangeRecordFilterDto.java} (58%) diff --git a/src/main/java/cz/cvut/kbss/termit/dto/filter/VocabularyContentChangeFilterDto.java b/src/main/java/cz/cvut/kbss/termit/dto/filter/ChangeRecordFilterDto.java similarity index 58% rename from src/main/java/cz/cvut/kbss/termit/dto/filter/VocabularyContentChangeFilterDto.java rename to src/main/java/cz/cvut/kbss/termit/dto/filter/ChangeRecordFilterDto.java index 9d75ef607..9f06efbd0 100644 --- a/src/main/java/cz/cvut/kbss/termit/dto/filter/VocabularyContentChangeFilterDto.java +++ b/src/main/java/cz/cvut/kbss/termit/dto/filter/ChangeRecordFilterDto.java @@ -1,23 +1,26 @@ package cz.cvut.kbss.termit.dto.filter; +import com.fasterxml.jackson.annotation.JsonIgnore; +import cz.cvut.kbss.termit.util.Utils; + import java.net.URI; import java.util.Objects; /** * Represents parameters for filtering vocabulary content changes. */ -public class VocabularyContentChangeFilterDto { - private String termName = ""; +public class ChangeRecordFilterDto { + private String assetLabel = ""; private String changedAttributeName = ""; private String authorName = ""; private URI changeType = null; - public String getTermName() { - return termName; + public String getAssetLabel() { + return assetLabel; } - public void setTermName(String termName) { - this.termName = termName; + public void setAssetLabel(String assetLabel) { + this.assetLabel = assetLabel; } public String getChangedAttributeName() { @@ -44,11 +47,22 @@ public void setChangeType(URI changeType) { this.changeType = changeType; } + /** + * @return true when all attributes are empty or null + */ + @JsonIgnore + public boolean isEmpty() { + return Utils.isBlank(assetLabel) && + Utils.isBlank(changedAttributeName) && + Utils.isBlank(authorName) && + changeType == null; + } + @Override public boolean equals(Object o) { if (this == o) return true; - if (!(o instanceof VocabularyContentChangeFilterDto that)) return false; - return Objects.equals(termName, that.termName) && + if (!(o instanceof ChangeRecordFilterDto that)) return false; + return Objects.equals(assetLabel, that.assetLabel) && Objects.equals(changedAttributeName, that.changedAttributeName) && Objects.equals(authorName, that.authorName) && Objects.equals(changeType, that.changeType); @@ -56,6 +70,6 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(termName, changedAttributeName, authorName, changeType); + return Objects.hash(assetLabel, changedAttributeName, authorName, changeType); } } diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java index 4ac58f75b..cba546691 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java @@ -19,9 +19,7 @@ import cz.cvut.kbss.jopa.model.EntityManager; import cz.cvut.kbss.jopa.model.query.Query; -import cz.cvut.kbss.jopa.model.query.TypedQuery; import cz.cvut.kbss.jopa.vocabulary.DC; -import cz.cvut.kbss.jopa.vocabulary.RDFS; import cz.cvut.kbss.jopa.vocabulary.SKOS; import cz.cvut.kbss.termit.asset.provenance.ModifiesData; import cz.cvut.kbss.termit.asset.provenance.SupportsLastModification; @@ -29,7 +27,7 @@ import cz.cvut.kbss.termit.dto.PrefixDeclaration; import cz.cvut.kbss.termit.dto.RdfsStatement; import cz.cvut.kbss.termit.dto.Snapshot; -import cz.cvut.kbss.termit.dto.filter.VocabularyContentChangeFilterDto; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.event.AssetPersistEvent; import cz.cvut.kbss.termit.event.AssetUpdateEvent; import cz.cvut.kbss.termit.event.BeforeAssetDeleteEvent; @@ -45,6 +43,7 @@ import cz.cvut.kbss.termit.model.validation.ValidationResult; import cz.cvut.kbss.termit.persistence.context.DescriptorFactory; import cz.cvut.kbss.termit.persistence.context.VocabularyContextMapper; +import cz.cvut.kbss.termit.persistence.dao.changetracking.ChangeRecordDao; import cz.cvut.kbss.termit.persistence.dao.changetracking.ChangeTrackingContextResolver; import cz.cvut.kbss.termit.persistence.snapshot.AssetSnapshotLoader; import cz.cvut.kbss.termit.persistence.validation.VocabularyContentValidator; @@ -92,6 +91,7 @@ public class VocabularyDao extends BaseAssetDao private static final String REMOVE_GLOSSARY_TERMS_QUERY_FILE = "remove/removeGlossaryTerms.ru"; private final ChangeTrackingContextResolver changeTrackingContextResolver; + private final ChangeRecordDao changeRecordDao; private volatile long lastModified; @@ -102,12 +102,13 @@ public class VocabularyDao extends BaseAssetDao @Autowired public VocabularyDao(EntityManager em, Configuration config, DescriptorFactory descriptorFactory, VocabularyContextMapper contextMapper, ApplicationContext context, - ChangeTrackingContextResolver changeTrackingContextResolver) { + ChangeTrackingContextResolver changeTrackingContextResolver, ChangeRecordDao changeRecordDao) { super(Vocabulary.class, em, config.getPersistence(), descriptorFactory); this.contextMapper = contextMapper; refreshLastModified(); this.context = context; this.changeTrackingContextResolver = changeTrackingContextResolver; + this.changeRecordDao = changeRecordDao; } @Override @@ -408,88 +409,15 @@ public List getChangesOfContent(Vocabulary vocabulary) { * @param pageReq Specification of the size and number of the page to return * @return List of change records, ordered by date in descending order */ - public List getDetailedHistoryOfContent(Vocabulary vocabulary, VocabularyContentChangeFilterDto filter, Pageable pageReq) { + public List getDetailedHistoryOfContent(Vocabulary vocabulary, ChangeRecordFilterDto filter, Pageable pageReq) { Objects.requireNonNull(vocabulary); - return createDetailedContentChangesQuery(vocabulary, filter, pageReq).getResultList(); - } - - private TypedQuery createDetailedContentChangesQuery(Vocabulary vocabulary, VocabularyContentChangeFilterDto filter, Pageable pageReq) { - TypedQuery query = em.createNativeQuery(""" - SELECT DISTINCT ?record WHERE { -""" + /* Select anything from change context */ """ - GRAPH ?changeContext { - ?record a ?changeRecord . - } -""" + /* The record should be a subclass of "zmena" */ """ - ?changeRecord ?subClassOf+ ?zmena . - ?record ?relatesTo ?term ; - ?hasTime ?timestamp ; - ?hasAuthor ?author . -""" + /* Get author's name */ """ - ?author ?hasFirstName ?firstName ; - ?hasLastName ?lastName . - BIND(CONCAT(?firstName, " ", ?lastName) as ?authorFullName) -""" + /* When its update record, there will be a changed attribute */ """ - OPTIONAL { - ?record ?hasChangedAttribute ?attribute . - ?attribute ?hasRdfsLabel ?changedAttributeName . - } -""" + /* Get term's name (but the term might have been already deleted) */ """ - OPTIONAL { - ?term a ?termType ; - ?hasLabel ?label . - } -""" + /* then try to get the label from (delete) record */ """ - OPTIONAL { - ?record ?hasRdfsLabel ?label . - } -""" + /* When label is still not bound, the term was probably deleted, find the delete record and get the label from it */ """ - OPTIONAL { - FILTER(!BOUND(?label)) . - ?deleteRecord a ; - ?term; - ?label. - } - BIND(?termName as ?termNameVal) - BIND(?authorName as ?authorNameVal) - BIND(?attributeName as ?changedAttributeNameVal) - FILTER (!BOUND(?termNameVal) || CONTAINS(LCASE(?label), LCASE(?termNameVal))) - FILTER (!BOUND(?authorNameVal) || CONTAINS(LCASE(?authorFullName), LCASE(?authorNameVal))) - FILTER (!BOUND(?changedAttributeNameVal) || CONTAINS(LCASE(?changedAttributeName), LCASE(?changedAttributeNameVal))) - } ORDER BY DESC(?timestamp) ?attribute - """, AbstractChangeRecord.class) - .setParameter("changeContext", changeTrackingContextResolver.resolveChangeTrackingContext(vocabulary)) - .setParameter("subClassOf", URI.create(RDFS.SUB_CLASS_OF)) - .setParameter("zmena", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_c_zmena)) - .setParameter("termType", URI.create(SKOS.CONCEPT)) - .setParameter("relatesTo", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_zmenenou_entitu)) - .setParameter("hasTime", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_datum_a_cas_modifikace)) - .setParameter("hasChangedAttribute", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_zmeneny_atribut)) - .setParameter("hasLabel", URI.create(SKOS.PREF_LABEL)) // term label - .setParameter("hasAuthor", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_editora)) // record has author - .setParameter("hasFirstName", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_krestni_jmeno)) - .setParameter("hasLastName", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_prijmeni)) - .setParameter("hasRdfsLabel", URI.create(RDFS.LABEL)); // changed attribute label - - if(!Utils.isBlank(filter.getTermName())) { - query = query.setParameter("termName", filter.getTermName().trim()); - } - if (!Utils.isBlank(filter.getAuthorName())) { - query = query.setParameter("authorName", filter.getAuthorName().trim()); - } - if (filter.getChangeType() != null) { - query = query.setParameter("changeRecord", filter.getChangeType()); - } - if (!Utils.isBlank(filter.getChangedAttributeName())) { - query = query.setParameter("attributeName", filter.getChangedAttributeName().trim()); - } - - if(pageReq.isUnpaged()) { - return query; - } - - return query.setFirstResult((int) pageReq.getOffset()) - .setMaxResults(pageReq.getPageSize()); + return changeRecordDao.findAllFiltered( + changeTrackingContextResolver.resolveChangeTrackingContext(vocabulary), + filter, + Optional.empty(), + Optional.of(URI.create(SKOS.CONCEPT)), // term + pageReq + ); } private Query createContentChangesQuery(Vocabulary vocabulary) { diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java index eadfa6b8a..b2f6718aa 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java @@ -20,18 +20,25 @@ import cz.cvut.kbss.jopa.model.EntityManager; import cz.cvut.kbss.jopa.model.descriptors.Descriptor; import cz.cvut.kbss.jopa.model.descriptors.EntityDescriptor; +import cz.cvut.kbss.jopa.model.query.TypedQuery; +import cz.cvut.kbss.jopa.vocabulary.RDFS; +import cz.cvut.kbss.jopa.vocabulary.SKOS; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.exception.PersistenceException; import cz.cvut.kbss.termit.model.Asset; import cz.cvut.kbss.termit.model.User; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; import cz.cvut.kbss.termit.model.util.HasIdentifier; +import cz.cvut.kbss.termit.util.Utils; import cz.cvut.kbss.termit.util.Vocabulary; +import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Repository; import java.net.URI; import java.util.HashSet; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.Set; @Repository @@ -66,13 +73,120 @@ public void persist(AbstractChangeRecord record, Asset changedAsset) { } } + public List findAll(Asset asset, ChangeRecordFilterDto filterDto) { + if (filterDto.isEmpty()) { + // there is nothing to filter, simple query can be used + return findAll(asset); + } + return findAllFiltered(contextResolver.resolveChangeTrackingContext(asset), filterDto, Optional.of(asset), Optional.empty(), Pageable.unpaged()); + } + + /** + * @param changeContext the context of change records + * @param filter filter parameters + * @param asset if present, only changes of the asset will be returned + * @param assetType if present, only changes related to this asset type will be returned. + */ + public List findAllFiltered(URI changeContext, ChangeRecordFilterDto filter, Optional> asset, Optional assetType, Pageable pageable) { + TypedQuery query = em.createNativeQuery(""" + SELECT DISTINCT ?record WHERE { +""" + /* Select anything from change context */ """ + GRAPH ?changeContext { + ?record a ?changeRecord . + } +""" + /* The record should be a subclass of changeType ("zmena") and have timestamp and author */ """ + ?changeRecord ?subClassOf+ ?changeType . + ?record ?hasChangedEntity ?asset ; + ?hasTime ?timestamp ; + ?hasAuthor ?author . + ?asset a ?assetType . +""" + /* Get author's name */ """ + ?author ?hasFirstName ?firstName ; + ?hasLastName ?lastName . + BIND(CONCAT(?firstName, " ", ?lastName) as ?authorFullName) +""" + /* When its update record, there will be a changed attribute */ """ + OPTIONAL { + ?record ?hasChangedAttribute ?attribute . + ?attribute ?hasRdfsLabel ?changedAttributeLabel . + } +""" + /* Get asset's name (but the asset might have been already deleted) */ """ + OPTIONAL { + ?asset ?hasLabel ?label . + } + OPTIONAL { + ?asset ?hasRdfsLabel ?label . + } +""" + /* then try to get the label from (delete) record */ """ + OPTIONAL { + ?record ?hasRdfsLabel ?label . + } +""" + /* When label is still not bound, the term was probably deleted, find the delete record and get the label from it */ """ + OPTIONAL { + FILTER(!BOUND(?label)) . + ?deleteRecord a ?deleteRecordType; + ?hasChangedEntity ?term; + ?hasRdfsLabel ?label. + } + BIND(?assetLabelValue as ?assetLabel) + BIND(?authorNameValue as ?authorName) + BIND(?attributeNameValue as ?changedAttributeName) + FILTER (!BOUND(?assetLabel) || CONTAINS(LCASE(?label), LCASE(?assetLabel))) + FILTER (!BOUND(?authorName) || CONTAINS(LCASE(?authorFullName), LCASE(?authorName))) + FILTER (!BOUND(?changedAttributeName) || CONTAINS(LCASE(?changedAttributeLabel), LCASE(?changedAttributeName))) + } ORDER BY DESC(?timestamp) ?attribute + """, AbstractChangeRecord.class) + .setParameter("changeContext", changeContext) + .setParameter("subClassOf", URI.create(RDFS.SUB_CLASS_OF)) + .setParameter("changeType", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_c_zmena)) + .setParameter("hasChangedEntity", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_zmenenou_entitu)) + .setParameter("hasTime", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_datum_a_cas_modifikace)) + .setParameter("hasAuthor", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_editora)) // record has author + .setParameter("hasFirstName", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_krestni_jmeno)) + .setParameter("hasLastName", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_prijmeni)) + // Optional - update change record + .setParameter("hasChangedAttribute", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_zmeneny_atribut)) + .setParameter("hasRdfsLabel", URI.create(RDFS.LABEL)) + // Optional - + .setParameter("hasLabel", URI.create(SKOS.PREF_LABEL)) + + // Optional asset label + .setParameter("deleteRecordType", Vocabulary.s_c_smazani_entity); + + if(asset.isPresent() && asset.get().getUri() != null) { + query = query.setParameter("asset", asset.get().getUri()); + } else if (assetType.isPresent()) { + query = query.setParameter("assetType", assetType.get()); + } + + + if(!Utils.isBlank(filter.getAssetLabel())) { + query = query.setParameter("assetLabelValue", filter.getAssetLabel().trim()); + } + if (!Utils.isBlank(filter.getAuthorName())) { + query = query.setParameter("authorNameValue", filter.getAuthorName().trim()); + } + if (filter.getChangeType() != null) { + query = query.setParameter("changeRecord", filter.getChangeType()); + } + if (!Utils.isBlank(filter.getChangedAttributeName())) { + query = query.setParameter("attributeNameValue", filter.getChangedAttributeName().trim()); + } + + if(pageable.isUnpaged()) { + return query.getResultList(); + } + + return query.setFirstResult((int) pageable.getOffset()) + .setMaxResults(pageable.getPageSize()).getResultList(); + } + /** * Finds all change records to the specified asset. * * @param asset The changed asset * @return List of change records ordered by timestamp (descending) */ - public List findAll(HasIdentifier asset) { + public List findAll(Asset asset) { Objects.requireNonNull(asset); try { final Descriptor descriptor = new EntityDescriptor(); diff --git a/src/main/java/cz/cvut/kbss/termit/rest/ResourceController.java b/src/main/java/cz/cvut/kbss/termit/rest/ResourceController.java index 11bb65415..d504d4bd2 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/ResourceController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/ResourceController.java @@ -18,6 +18,7 @@ package cz.cvut.kbss.termit.rest; import cz.cvut.kbss.jsonld.JsonLd; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.exception.TermItException; import cz.cvut.kbss.termit.model.TextAnalysisRecord; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; @@ -360,7 +361,7 @@ public List getHistory( required = false) Optional namespace) { final Resource resource = resourceService .getReference(resolveIdentifier(resourceNamespace(namespace), localName)); - return resourceService.getChanges(resource); + return resourceService.getChanges(resource, new ChangeRecordFilterDto()); // TODO: filter dto } /** diff --git a/src/main/java/cz/cvut/kbss/termit/rest/TermController.java b/src/main/java/cz/cvut/kbss/termit/rest/TermController.java index 9fc059aa9..5c215d955 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/TermController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/TermController.java @@ -18,6 +18,7 @@ package cz.cvut.kbss.termit.rest; import cz.cvut.kbss.jsonld.JsonLd; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.TermDto; import cz.cvut.kbss.termit.exception.TermItException; import cz.cvut.kbss.termit.model.Term; @@ -699,7 +700,7 @@ public List getHistory( @Parameter(description = ApiDoc.ID_NAMESPACE_DESCRIPTION, example = ApiDoc.ID_NAMESPACE_EXAMPLE) @RequestParam(name = QueryParams.NAMESPACE, required = false) Optional namespace) { final URI termUri = getTermUri(localName, termLocalName, namespace); - return termService.getChanges(termService.findRequired(termUri)); + return termService.getChanges(termService.findRequired(termUri), new ChangeRecordFilterDto()); // TODO: filter dto } /** @@ -722,9 +723,21 @@ public List getHistory(@Parameter(description = ApiDoc.ID_ @PathVariable String localName, @Parameter(description = ApiDoc.ID_STANDALONE_NAMESPACE_DESCRIPTION, example = ApiDoc.ID_STANDALONE_NAMESPACE_EXAMPLE) - @RequestParam(name = QueryParams.NAMESPACE) String namespace) { + @RequestParam(name = QueryParams.NAMESPACE) String namespace, + @Parameter(description = "Change type used for filtering.") + @RequestParam(name = "type", required = false) URI changeType, + @Parameter(description = "Author name used for filtering.") + @RequestParam(name = "author", required = false, + defaultValue = "") String authorName, + @Parameter(description = "Changed attribute name used for filtering.") + @RequestParam(name = "attribute", required = false, + defaultValue = "") String changedAttributeName) { final URI termUri = idResolver.resolveIdentifier(namespace, localName); - return termService.getChanges(termService.findRequired(termUri)); + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); + filter.setChangeType(changeType); + filter.setAuthorName(authorName); + filter.setChangedAttributeName(changedAttributeName); + return termService.getChanges(termService.findRequired(termUri), filter); } @Operation(security = {@SecurityRequirement(name = "bearer-key")}, diff --git a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java index 13858a4e0..912ebf1f0 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java @@ -22,7 +22,7 @@ import cz.cvut.kbss.termit.dto.RdfsStatement; import cz.cvut.kbss.termit.dto.Snapshot; import cz.cvut.kbss.termit.dto.acl.AccessControlListDto; -import cz.cvut.kbss.termit.dto.filter.VocabularyContentChangeFilterDto; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.VocabularyDto; import cz.cvut.kbss.termit.model.Vocabulary; import cz.cvut.kbss.termit.model.acl.AccessControlRecord; @@ -266,7 +266,7 @@ public List getHistory( required = false) Optional namespace) { final Vocabulary vocabulary = vocabularyService.getReference( resolveVocabularyUri(localName, namespace)); - return vocabularyService.getChanges(vocabulary); + return vocabularyService.getChanges(vocabulary, new ChangeRecordFilterDto()); // TODO: filter dto } @Operation(security = {@SecurityRequirement(name = "bearer-key")}, @@ -297,34 +297,31 @@ public List getHistoryOfContent( @GetMapping(value = "/{localName}/history-of-content/detail", produces = {MediaType.APPLICATION_JSON_VALUE, JsonLd.MEDIA_TYPE}) public List getDetailedHistoryOfContent( - @Parameter(description = ApiDoc.ID_LOCAL_NAME_DESCRIPTION, example = ApiDoc.ID_LOCAL_NAME_EXAMPLE) - @PathVariable - String localName, - @Parameter(description = ApiDoc.ID_NAMESPACE_DESCRIPTION, example = ApiDoc.ID_NAMESPACE_EXAMPLE) - @RequestParam(name = QueryParams.NAMESPACE, required = false) - Optional namespace, - @Parameter(description = "Term name to be used in filtering.") - @RequestParam(name = "term", required = false, defaultValue = "") - String termName, - @Parameter(description = "Change type to be used in filtering.") - @RequestParam(name = "type", required = false) - URI changeType, - @Parameter(description = "Author name to be used in filtering.") - @RequestParam(name = "author", required = false, defaultValue = "") - String authorName, - @Parameter(description = "Changed attribute name to be used in filtering.") - @RequestParam(name = "attribute", required = false, defaultValue = "") - String changedAttributeName, - @Parameter(description = ApiDocConstants.PAGE_SIZE_DESCRIPTION) - @RequestParam(name = Constants.QueryParams.PAGE_SIZE, required = false, defaultValue = DEFAULT_PAGE_SIZE) - Integer pageSize, - @Parameter(description = ApiDocConstants.PAGE_NO_DESCRIPTION) - @RequestParam(name = Constants.QueryParams.PAGE, required = false, defaultValue = DEFAULT_PAGE) - Integer pageNo) { + @Parameter(description = ApiDoc.ID_LOCAL_NAME_DESCRIPTION, + example = ApiDoc.ID_LOCAL_NAME_EXAMPLE) @PathVariable String localName, + @Parameter(description = ApiDoc.ID_NAMESPACE_DESCRIPTION, + example = ApiDoc.ID_NAMESPACE_EXAMPLE) @RequestParam(name = QueryParams.NAMESPACE, + required = false) Optional namespace, + @Parameter(description = "Term name used for filtering.") @RequestParam(name = "term", + required = false, + defaultValue = "") String termName, + @Parameter(description = "Change type used for filtering.") @RequestParam(name = "type", + required = false) URI changeType, + @Parameter(description = "Author name used for filtering.") @RequestParam(name = "author", + required = false, + defaultValue = "") String authorName, + @Parameter(description = "Changed attribute name used for filtering.") @RequestParam( + name = "attribute", required = false, defaultValue = "") String changedAttributeName, + + @Parameter(description = ApiDocConstants.PAGE_SIZE_DESCRIPTION) @RequestParam( + name = Constants.QueryParams.PAGE_SIZE, required = false, + defaultValue = DEFAULT_PAGE_SIZE) Integer pageSize, + @Parameter(description = ApiDocConstants.PAGE_NO_DESCRIPTION) @RequestParam( + name = Constants.QueryParams.PAGE, required = false, defaultValue = DEFAULT_PAGE) Integer pageNo) { final Pageable pageReq = createPageRequest(pageSize, pageNo); final Vocabulary vocabulary = vocabularyService.getReference(resolveVocabularyUri(localName, namespace)); - final VocabularyContentChangeFilterDto filter = new VocabularyContentChangeFilterDto(); - filter.setTermName(termName); + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); + filter.setAssetLabel(termName); filter.setChangeType(changeType); filter.setAuthorName(authorName); filter.setChangedAttributeName(changedAttributeName); diff --git a/src/main/java/cz/cvut/kbss/termit/service/business/ResourceService.java b/src/main/java/cz/cvut/kbss/termit/service/business/ResourceService.java index f8d8f87a3..412644c83 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/business/ResourceService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/business/ResourceService.java @@ -18,6 +18,7 @@ package cz.cvut.kbss.termit.service.business; import cz.cvut.kbss.termit.asset.provenance.SupportsLastModification; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.event.DocumentRenameEvent; import cz.cvut.kbss.termit.event.FileRenameEvent; import cz.cvut.kbss.termit.event.VocabularyWillBeRemovedEvent; @@ -369,8 +370,8 @@ public long getLastModified() { } @Override - public List getChanges(Resource asset) { - return changeRecordService.getChanges(asset); + public List getChanges(Resource asset, ChangeRecordFilterDto filterDto) { + return changeRecordService.getChanges(asset, filterDto); } @Override diff --git a/src/main/java/cz/cvut/kbss/termit/service/business/TermService.java b/src/main/java/cz/cvut/kbss/termit/service/business/TermService.java index b22bc1e67..1578b4888 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/business/TermService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/business/TermService.java @@ -20,6 +20,7 @@ import cz.cvut.kbss.termit.dto.RdfsResource; import cz.cvut.kbss.termit.dto.Snapshot; import cz.cvut.kbss.termit.dto.assignment.TermOccurrences; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.TermDto; import cz.cvut.kbss.termit.exception.InvalidTermStateException; import cz.cvut.kbss.termit.exception.NotFoundException; @@ -545,9 +546,9 @@ private void checkForInvalidTerminalStateAssignment(Term term, URI state) { } @Override - public List getChanges(Term term) { + public List getChanges(Term term, ChangeRecordFilterDto filterDto) { Objects.requireNonNull(term); - return changeRecordService.getChanges(term); + return changeRecordService.getChanges(term, filterDto); } /** diff --git a/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java b/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java index a0f252171..08281353c 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java @@ -22,7 +22,7 @@ import cz.cvut.kbss.termit.dto.RdfsStatement; import cz.cvut.kbss.termit.dto.Snapshot; import cz.cvut.kbss.termit.dto.acl.AccessControlListDto; -import cz.cvut.kbss.termit.dto.filter.VocabularyContentChangeFilterDto; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.TermDto; import cz.cvut.kbss.termit.dto.listing.VocabularyDto; import cz.cvut.kbss.termit.event.VocabularyContentModifiedEvent; @@ -299,8 +299,8 @@ public TypeAwareResource getExcelTemplateFile() { } @Override - public List getChanges(Vocabulary asset) { - return changeRecordService.getChanges(asset); + public List getChanges(Vocabulary asset, ChangeRecordFilterDto filterDto) { + return changeRecordService.getChanges(asset, filterDto); } /** @@ -320,7 +320,7 @@ public List getChangesOfContent(Vocabulary vocabulary) { * @param pageReq Specification of the size and number of the page to return * @return List of change records, ordered by date in descending order */ - public List getDetailedHistoryOfContent(Vocabulary vocabulary, VocabularyContentChangeFilterDto filter, Pageable pageReq) { + public List getDetailedHistoryOfContent(Vocabulary vocabulary, ChangeRecordFilterDto filter, Pageable pageReq) { return repositoryService.getDetailedHistoryOfContent(vocabulary, filter, pageReq); } diff --git a/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeRecordProvider.java b/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeRecordProvider.java index 41396793d..d6f034dfe 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeRecordProvider.java +++ b/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeRecordProvider.java @@ -17,8 +17,9 @@ */ package cz.cvut.kbss.termit.service.changetracking; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; +import cz.cvut.kbss.termit.model.Asset; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; -import cz.cvut.kbss.termit.model.util.HasIdentifier; import java.util.List; @@ -27,7 +28,17 @@ * * @param Type of asset to get changes for */ -public interface ChangeRecordProvider { +public interface ChangeRecordProvider> { + + /** + * Gets change records of the specified asset + * filtered by {@link ChangeRecordFilterDto}. + * + * @param asset Asset to find change records for + * @param filterDto Filter parameters + * @return List of change records, ordered by record timestamp in descending order + */ + List getChanges(T asset, ChangeRecordFilterDto filterDto); /** * Gets change records of the specified asset. @@ -35,5 +46,7 @@ public interface ChangeRecordProvider { * @param asset Asset to find change records for * @return List of change records, ordered by record timestamp in descending order */ - List getChanges(T asset); + default List getChanges(T asset) { + return getChanges(asset, new ChangeRecordFilterDto()); + } } diff --git a/src/main/java/cz/cvut/kbss/termit/service/repository/ChangeRecordService.java b/src/main/java/cz/cvut/kbss/termit/service/repository/ChangeRecordService.java index 6dc0c6ad8..5dc24350b 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/repository/ChangeRecordService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/repository/ChangeRecordService.java @@ -17,6 +17,8 @@ */ package cz.cvut.kbss.termit.service.repository; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; +import cz.cvut.kbss.termit.model.Asset; import cz.cvut.kbss.termit.model.User; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; import cz.cvut.kbss.termit.model.util.HasIdentifier; @@ -29,7 +31,7 @@ import java.util.Set; @Service -public class ChangeRecordService implements ChangeRecordProvider { +public class ChangeRecordService implements ChangeRecordProvider> { private final ChangeRecordDao changeRecordDao; @@ -39,8 +41,8 @@ public ChangeRecordService(ChangeRecordDao changeRecordDao) { } @Override - public List getChanges(HasIdentifier asset) { - return changeRecordDao.findAll(asset); + public List getChanges(Asset asset, ChangeRecordFilterDto filterDto) { + return changeRecordDao.findAll(asset, filterDto); } /** diff --git a/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java b/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java index 43e7cde00..c63f85b4a 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java @@ -21,7 +21,7 @@ import cz.cvut.kbss.termit.dto.PrefixDeclaration; import cz.cvut.kbss.termit.dto.RdfsStatement; import cz.cvut.kbss.termit.dto.Snapshot; -import cz.cvut.kbss.termit.dto.filter.VocabularyContentChangeFilterDto; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.VocabularyDto; import cz.cvut.kbss.termit.dto.mapper.DtoMapper; import cz.cvut.kbss.termit.exception.AssetRemovalException; @@ -229,7 +229,7 @@ public List getChangesOfContent(Vocabulary vocabulary) { * @return List of change records, ordered by date in descending order */ @Transactional(readOnly = true) - public List getDetailedHistoryOfContent(Vocabulary vocabulary, VocabularyContentChangeFilterDto filter, Pageable pageReq) { + public List getDetailedHistoryOfContent(Vocabulary vocabulary, ChangeRecordFilterDto filter, Pageable pageReq) { return vocabularyDao.getDetailedHistoryOfContent(vocabulary, filter, pageReq); } diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java index 3c6d7b295..abc32c176 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java @@ -26,7 +26,7 @@ import cz.cvut.kbss.termit.dto.PrefixDeclaration; import cz.cvut.kbss.termit.dto.RdfsStatement; import cz.cvut.kbss.termit.dto.Snapshot; -import cz.cvut.kbss.termit.dto.filter.VocabularyContentChangeFilterDto; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.environment.Environment; import cz.cvut.kbss.termit.environment.Generator; import cz.cvut.kbss.termit.event.AssetPersistEvent; @@ -978,7 +978,7 @@ void getDetailedHistoryOfContentReturnsRecordsForAllChangeTypes() { changeRecordDao.persist(deleteChangeRecord, termToRemove); }); - final VocabularyContentChangeFilterDto filter = new VocabularyContentChangeFilterDto(); + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); final int recordsCount = firstChanges.size() + termToRemoveChanges.size() + 1; // +1 for the delete record final Pageable pageable = Pageable.ofSize(recordsCount * 3); final List contentChanges = sut.getDetailedHistoryOfContent(vocabulary, filter, pageable); @@ -1028,8 +1028,8 @@ void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByTermName() thirdChanges.forEach(r -> changeRecordDao.persist(r, thirdTerm)); }); - final VocabularyContentChangeFilterDto filter = new VocabularyContentChangeFilterDto(); - filter.setTermName(needle); + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); + filter.setAssetLabel(needle); final int recordsCount = firstChanges.size() + secondChanges.size(); final Pageable pageable = Pageable.ofSize(recordsCount * 2); @@ -1083,8 +1083,8 @@ void getDetailedHistoryOfContentReturnsRecordsOfDeletedTermFilteredByTermName() termDao.remove(termToRemove); }); - final VocabularyContentChangeFilterDto filter = new VocabularyContentChangeFilterDto(); - filter.setTermName(needle); + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); + filter.setAssetLabel(needle); final int recordsCount = termToRemoveChanges.size() + 1; // +1 for the delete record final Pageable pageable = Pageable.ofSize(recordsCount * 2); @@ -1142,7 +1142,7 @@ void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByChangedAtt secondChanges.forEach(r -> changeRecordDao.persist(r, secondTerm)); }); - final VocabularyContentChangeFilterDto filter = new VocabularyContentChangeFilterDto(); + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); filter.setChangedAttributeName(changedAttributeName); final Pageable pageable = Pageable.ofSize(recordCount.get() * 2); @@ -1193,7 +1193,7 @@ void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByAuthorName secondChanges.forEach(r -> changeRecordDao.persist(r, secondTerm)); }); - final VocabularyContentChangeFilterDto filter = new VocabularyContentChangeFilterDto(); + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); // full name without first two and last two characters filter.setAuthorName(anotherAuthor.getFullName().substring(2, anotherAuthor.getFullName().length() - 2)); @@ -1256,7 +1256,7 @@ void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByChangeType termDao.remove(secondTerm); }); - final VocabularyContentChangeFilterDto filter = new VocabularyContentChangeFilterDto(); + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); // full name without first two and last two characters filter.setChangeType(typeUri); diff --git a/src/test/java/cz/cvut/kbss/termit/rest/ResourceControllerTest.java b/src/test/java/cz/cvut/kbss/termit/rest/ResourceControllerTest.java index bd50b7258..2b1adb9d2 100644 --- a/src/test/java/cz/cvut/kbss/termit/rest/ResourceControllerTest.java +++ b/src/test/java/cz/cvut/kbss/termit/rest/ResourceControllerTest.java @@ -19,6 +19,7 @@ import com.fasterxml.jackson.core.type.TypeReference; import cz.cvut.kbss.jsonld.JsonLd; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.environment.Environment; import cz.cvut.kbss.termit.environment.Generator; import cz.cvut.kbss.termit.exception.NotFoundException; @@ -396,7 +397,8 @@ void getHistoryReturnsListOfChangeRecordsForSpecifiedVocabulary() throws Excepti when(identifierResolverMock.resolveIdentifier(RESOURCE_NAMESPACE, RESOURCE_NAME)).thenReturn(resource.getUri()); when(resourceServiceMock.getReference(RESOURCE_URI)).thenReturn(resource); final List records = Collections.singletonList(Generator.generatePersistChange(resource)); - when(resourceServiceMock.getChanges(resource)).thenReturn(records); + final ChangeRecordFilterDto emptyFilter = new ChangeRecordFilterDto(); + when(resourceServiceMock.getChanges(resource, emptyFilter)).thenReturn(records); final MvcResult mvcResult = mockMvc .perform(get(PATH + "/" + RESOURCE_NAME + "/history").param(QueryParams.NAMESPACE, RESOURCE_NAMESPACE)) @@ -406,7 +408,7 @@ void getHistoryReturnsListOfChangeRecordsForSpecifiedVocabulary() throws Excepti }); assertNotNull(result); assertEquals(records, result); - verify(resourceServiceMock).getChanges(resource); + verify(resourceServiceMock).getChanges(resource, emptyFilter); } @Test diff --git a/src/test/java/cz/cvut/kbss/termit/rest/TermControllerTest.java b/src/test/java/cz/cvut/kbss/termit/rest/TermControllerTest.java index cedfb8e06..55554e910 100644 --- a/src/test/java/cz/cvut/kbss/termit/rest/TermControllerTest.java +++ b/src/test/java/cz/cvut/kbss/termit/rest/TermControllerTest.java @@ -23,6 +23,7 @@ import cz.cvut.kbss.jopa.vocabulary.SKOS; import cz.cvut.kbss.jsonld.JsonLd; import cz.cvut.kbss.termit.dto.Snapshot; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.TermDto; import cz.cvut.kbss.termit.environment.Environment; import cz.cvut.kbss.termit.environment.Generator; @@ -803,7 +804,8 @@ void getHistoryReturnsListOfChangeRecordsForSpecifiedTerm() throws Exception { term.setUri(termUri); when(termServiceMock.findRequired(term.getUri())).thenReturn(term); final List records = generateChangeRecords(term); - when(termServiceMock.getChanges(term)).thenReturn(records); + final ChangeRecordFilterDto emptyFilter = new ChangeRecordFilterDto(); + when(termServiceMock.getChanges(term, emptyFilter)).thenReturn(records); final MvcResult mvcResult = mockMvc .perform(get(PATH + VOCABULARY_NAME + "/terms/" + TERM_NAME + "/history")) @@ -812,6 +814,7 @@ void getHistoryReturnsListOfChangeRecordsForSpecifiedTerm() throws Exception { }); assertNotNull(result); assertEquals(records, result); + verify(termServiceMock).getChanges(term, emptyFilter); } private List generateChangeRecords(Term term) { @@ -833,7 +836,8 @@ void getHistoryStandaloneReturnsListOfChangeRecordsForSpecifiedTerm() throws Exc when(idResolverMock.resolveIdentifier(NAMESPACE, TERM_NAME)).thenReturn(termUri); when(termServiceMock.findRequired(termUri)).thenReturn(term); final List records = generateChangeRecords(term); - when(termServiceMock.getChanges(term)).thenReturn(records); + final ChangeRecordFilterDto emptyFilter = new ChangeRecordFilterDto(); + when(termServiceMock.getChanges(term, emptyFilter)).thenReturn(records); final MvcResult mvcResult = mockMvc .perform(get("/terms/" + TERM_NAME + "/history").param(QueryParams.NAMESPACE, NAMESPACE)) @@ -843,6 +847,7 @@ void getHistoryStandaloneReturnsListOfChangeRecordsForSpecifiedTerm() throws Exc }); assertNotNull(result); assertEquals(records, result); + verify(termServiceMock).getChanges(term, emptyFilter); } @Test diff --git a/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java b/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java index a7e8677c6..2ebe5c579 100644 --- a/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java +++ b/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java @@ -22,7 +22,7 @@ import cz.cvut.kbss.termit.dto.AggregatedChangeInfo; import cz.cvut.kbss.termit.dto.Snapshot; import cz.cvut.kbss.termit.dto.acl.AccessControlListDto; -import cz.cvut.kbss.termit.dto.filter.VocabularyContentChangeFilterDto; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.VocabularyDto; import cz.cvut.kbss.termit.environment.Environment; import cz.cvut.kbss.termit.environment.Generator; @@ -431,7 +431,8 @@ void getHistoryReturnsListOfChangeRecordsForSpecifiedVocabulary() throws Excepti final Vocabulary vocabulary = generateVocabularyAndInitReferenceResolution(); final List records = Generator.generateChangeRecords(vocabulary, user); - when(serviceMock.getChanges(vocabulary)).thenReturn(records); + final ChangeRecordFilterDto emptyFilter = new ChangeRecordFilterDto(); + when(serviceMock.getChanges(vocabulary, emptyFilter)).thenReturn(records); final MvcResult mvcResult = mockMvc.perform(get(PATH + "/" + FRAGMENT + "/history")).andExpect(status().isOk()) @@ -441,7 +442,7 @@ void getHistoryReturnsListOfChangeRecordsForSpecifiedVocabulary() throws Excepti }); assertNotNull(result); assertEquals(records, result); - verify(serviceMock).getChanges(vocabulary); + verify(serviceMock).getChanges(vocabulary,emptyFilter); } @Test @@ -653,7 +654,7 @@ void getDetailedHistoryOfContentReturnsListOfChangeRecordsWhenNoFilterIsSpecifie final Vocabulary vocabulary = generateVocabularyAndInitReferenceResolution(); final Term term = Generator.generateTermWithId(); final List changeRecords = IntStream.range(0, 5).mapToObj(i -> Generator.generateChangeRecords(term, user)).flatMap(List::stream).toList(); - final VocabularyContentChangeFilterDto filter = new VocabularyContentChangeFilterDto(); + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); final Pageable pageable = Pageable.ofSize(pageSize); doReturn(changeRecords).when(serviceMock).getDetailedHistoryOfContent(vocabulary, filter, pageable); diff --git a/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java index 6119b0f90..3f9f8695a 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java @@ -17,6 +17,7 @@ */ package cz.cvut.kbss.termit.service.business; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.environment.Environment; import cz.cvut.kbss.termit.environment.Generator; import cz.cvut.kbss.termit.event.DocumentRenameEvent; @@ -426,9 +427,10 @@ void getLastModifiedReturnsValueFromRepositoryService() { void getChangesLoadsChangeRecordsForSpecifiedAssetFromChangeRecordService() { final Resource resource = Generator.generateResourceWithId(); final List records = Collections.singletonList(Generator.generatePersistChange(resource)); - when(changeRecordService.getChanges(resource)).thenReturn(records); + final ChangeRecordFilterDto filterDto = new ChangeRecordFilterDto(); + when(changeRecordService.getChanges(resource, filterDto)).thenReturn(records); assertEquals(records, sut.getChanges(resource)); - verify(changeRecordService).getChanges(resource); + verify(changeRecordService).getChanges(resource, filterDto); } @Test diff --git a/src/test/java/cz/cvut/kbss/termit/service/business/TermServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/business/TermServiceTest.java index 5ea15780f..4a222635f 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/business/TermServiceTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/business/TermServiceTest.java @@ -21,6 +21,7 @@ import cz.cvut.kbss.termit.dto.RdfsResource; import cz.cvut.kbss.termit.dto.TermInfo; import cz.cvut.kbss.termit.dto.assignment.TermOccurrences; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.TermDto; import cz.cvut.kbss.termit.environment.Environment; import cz.cvut.kbss.termit.environment.Generator; @@ -396,7 +397,7 @@ void setTermDefinitionReplacesExistingTermDefinition() { void getChangesRetrievesChangeRecordsFromChangeRecordService() { final Term asset = Generator.generateTermWithId(); sut.getChanges(asset); - verify(changeRecordService).getChanges(asset); + verify(changeRecordService).getChanges(asset, new ChangeRecordFilterDto()); } @Test diff --git a/src/test/java/cz/cvut/kbss/termit/service/business/VocabularyServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/business/VocabularyServiceTest.java index 6cc2d505d..ca6b97ab2 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/business/VocabularyServiceTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/business/VocabularyServiceTest.java @@ -19,6 +19,7 @@ import cz.cvut.kbss.termit.dto.Snapshot; import cz.cvut.kbss.termit.dto.acl.AccessControlListDto; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.TermDto; import cz.cvut.kbss.termit.dto.listing.VocabularyDto; import cz.cvut.kbss.termit.environment.Environment; @@ -184,10 +185,11 @@ void getChangesRetrievesChangesForVocabulary() { final Vocabulary vocabulary = Generator.generateVocabularyWithId(); final List records = Generator.generateChangeRecords(vocabulary, Generator.generateUserWithId()); - when(changeRecordService.getChanges(vocabulary)).thenReturn(records); + final ChangeRecordFilterDto emptyFilter = new ChangeRecordFilterDto(); + when(changeRecordService.getChanges(vocabulary, emptyFilter)).thenReturn(records); final List result = sut.getChanges(vocabulary); assertEquals(records, result); - verify(changeRecordService).getChanges(vocabulary); + verify(changeRecordService).getChanges(vocabulary, emptyFilter); } @Test From 54b819ed0e3a13332cde72be144b4da0a986ab20 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Sun, 10 Nov 2024 17:09:08 +0100 Subject: [PATCH 17/49] [Enhancement kbss-cvut/termit-ui#520] Add filtering options for vocabulary & term history endpoints --- .../dto/filter/ChangeRecordFilterDto.java | 15 +++++++++++++ .../kbss/termit/rest/ResourceController.java | 2 +- .../cvut/kbss/termit/rest/TermController.java | 16 +++++++++++--- .../termit/rest/VocabularyController.java | 22 ++++++++++++++----- 4 files changed, 45 insertions(+), 10 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/dto/filter/ChangeRecordFilterDto.java b/src/main/java/cz/cvut/kbss/termit/dto/filter/ChangeRecordFilterDto.java index 9f06efbd0..e3b00750f 100644 --- a/src/main/java/cz/cvut/kbss/termit/dto/filter/ChangeRecordFilterDto.java +++ b/src/main/java/cz/cvut/kbss/termit/dto/filter/ChangeRecordFilterDto.java @@ -72,4 +72,19 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(assetLabel, changedAttributeName, authorName, changeType); } + + + /** + * Constants for the Open API documentation of the REST API. + */ + public static final class ApiDoc { + public static final String TERM_NAME_DESCRIPTION = "Name of the term used for filtering."; + public static final String CHANGE_TYPE_DESCRIPTION = "Type of the change used for filtering."; + public static final String AUTHOR_NAME_DESCRIPTION = "Name of the author of the change used for filtering."; + public static final String CHANGED_ATTRIBUTE_DESCRIPTION = "Name of the changed attribute used for filtering."; + + private ApiDoc() { + throw new AssertionError(); + } + } } diff --git a/src/main/java/cz/cvut/kbss/termit/rest/ResourceController.java b/src/main/java/cz/cvut/kbss/termit/rest/ResourceController.java index d504d4bd2..0c659485d 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/ResourceController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/ResourceController.java @@ -361,7 +361,7 @@ public List getHistory( required = false) Optional namespace) { final Resource resource = resourceService .getReference(resolveIdentifier(resourceNamespace(namespace), localName)); - return resourceService.getChanges(resource, new ChangeRecordFilterDto()); // TODO: filter dto + return resourceService.getChanges(resource, new ChangeRecordFilterDto()); } /** diff --git a/src/main/java/cz/cvut/kbss/termit/rest/TermController.java b/src/main/java/cz/cvut/kbss/termit/rest/TermController.java index 5c215d955..250c6c0be 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/TermController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/TermController.java @@ -698,9 +698,19 @@ public List getHistory( @Parameter(description = ApiDoc.ID_TERM_LOCAL_NAME_DESCRIPTION, example = ApiDoc.ID_TERM_LOCAL_NAME_EXAMPLE) @PathVariable String termLocalName, @Parameter(description = ApiDoc.ID_NAMESPACE_DESCRIPTION, example = ApiDoc.ID_NAMESPACE_EXAMPLE) - @RequestParam(name = QueryParams.NAMESPACE, required = false) Optional namespace) { + @RequestParam(name = QueryParams.NAMESPACE, required = false) Optional namespace, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGE_TYPE_DESCRIPTION) + @RequestParam(name = "type", required = false) URI changeType, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.AUTHOR_NAME_DESCRIPTION) + @RequestParam(name = "author", required = false, defaultValue = "") String authorName, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGED_ATTRIBUTE_DESCRIPTION) + @RequestParam(name = "attribute", required = false, defaultValue = "") String changedAttributeName) { final URI termUri = getTermUri(localName, termLocalName, namespace); - return termService.getChanges(termService.findRequired(termUri), new ChangeRecordFilterDto()); // TODO: filter dto + final ChangeRecordFilterDto filterDto = new ChangeRecordFilterDto(); + filterDto.setChangeType(changeType); + filterDto.setAuthorName(authorName); + filterDto.setChangedAttributeName(changedAttributeName); + return termService.getChanges(termService.findRequired(termUri), filterDto); } /** @@ -708,7 +718,7 @@ public List getHistory( *

* This is a convenience method to allow access without using the Term's parent Vocabulary. * - * @see #getHistory(String, String, Optional) + * @see #getHistory */ @Operation(security = {@SecurityRequirement(name = "bearer-key")}, description = "Gets a list of changes made to metadata of the term with the specified identifier.") diff --git a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java index 912ebf1f0..f59ad8384 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java @@ -263,10 +263,20 @@ public List getHistory( @Parameter(description = ApiDoc.ID_NAMESPACE_DESCRIPTION, example = ApiDoc.ID_NAMESPACE_EXAMPLE) @RequestParam(name = QueryParams.NAMESPACE, - required = false) Optional namespace) { + required = false) Optional namespace, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGE_TYPE_DESCRIPTION) + @RequestParam(name = "type", required = false) URI changeType, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.AUTHOR_NAME_DESCRIPTION) + @RequestParam(name = "author", required = false, defaultValue = "") String authorName, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGED_ATTRIBUTE_DESCRIPTION) + @RequestParam(name = "attribute", required = false, defaultValue = "") String changedAttributeName) { final Vocabulary vocabulary = vocabularyService.getReference( resolveVocabularyUri(localName, namespace)); - return vocabularyService.getChanges(vocabulary, new ChangeRecordFilterDto()); // TODO: filter dto + final ChangeRecordFilterDto filterDto = new ChangeRecordFilterDto(); + filterDto.setChangeType(changeType); + filterDto.setAuthorName(authorName); + filterDto.setChangedAttributeName(changedAttributeName); + return vocabularyService.getChanges(vocabulary, filterDto); } @Operation(security = {@SecurityRequirement(name = "bearer-key")}, @@ -302,15 +312,15 @@ public List getDetailedHistoryOfContent( @Parameter(description = ApiDoc.ID_NAMESPACE_DESCRIPTION, example = ApiDoc.ID_NAMESPACE_EXAMPLE) @RequestParam(name = QueryParams.NAMESPACE, required = false) Optional namespace, - @Parameter(description = "Term name used for filtering.") @RequestParam(name = "term", + @Parameter(description = ChangeRecordFilterDto.ApiDoc.TERM_NAME_DESCRIPTION) @RequestParam(name = "term", required = false, defaultValue = "") String termName, - @Parameter(description = "Change type used for filtering.") @RequestParam(name = "type", + @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGE_TYPE_DESCRIPTION) @RequestParam(name = "type", required = false) URI changeType, - @Parameter(description = "Author name used for filtering.") @RequestParam(name = "author", + @Parameter(description = ChangeRecordFilterDto.ApiDoc.AUTHOR_NAME_DESCRIPTION) @RequestParam(name = "author", required = false, defaultValue = "") String authorName, - @Parameter(description = "Changed attribute name used for filtering.") @RequestParam( + @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGED_ATTRIBUTE_DESCRIPTION) @RequestParam( name = "attribute", required = false, defaultValue = "") String changedAttributeName, @Parameter(description = ApiDocConstants.PAGE_SIZE_DESCRIPTION) @RequestParam( From 8a1dcc973261ea648d85169835028631a2265e33 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Mon, 11 Nov 2024 13:59:41 +0100 Subject: [PATCH 18/49] [Enhancement kbss-cvut/termit-ui#520] Fix tests after rebase --- .../cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java index abc32c176..fa795b572 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java @@ -1075,6 +1075,9 @@ void getDetailedHistoryOfContentReturnsRecordsOfDeletedTermFilteredByTermName() termDao.persist(firstTerm, vocabulary); termDao.persist(termToRemove, vocabulary); + Generator.addTermInVocabularyRelationship(firstTerm, vocabulary.getUri(), em); + Generator.addTermInVocabularyRelationship(termToRemove, vocabulary.getUri(), em); + firstChanges.forEach(r -> changeRecordDao.persist(r, firstTerm)); termToRemoveChanges.forEach(r -> changeRecordDao.persist(r, termToRemove)); changeRecordDao.persist(deleteChangeRecord, termToRemove); @@ -1248,6 +1251,9 @@ void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByChangeType termDao.persist(firstTerm, vocabulary); termDao.persist(secondTerm, vocabulary); + Generator.addTermInVocabularyRelationship(firstTerm, vocabulary.getUri(), em); + Generator.addTermInVocabularyRelationship(secondTerm, vocabulary.getUri(), em); + firstChanges.forEach(r -> changeRecordDao.persist(r, firstTerm)); secondChanges.forEach(r -> changeRecordDao.persist(r, secondTerm)); changeRecordDao.persist(deleteChangeRecord, secondTerm); From 433461735e0d271569c9a66a039f06156e4683b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Wed, 13 Nov 2024 14:30:02 +0100 Subject: [PATCH 19/49] [Enhancement kbss-cvut/termit-ui#520] Fix change record filter query --- .../dao/changetracking/ChangeRecordDao.java | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java index b2f6718aa..38e640f22 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java @@ -99,7 +99,18 @@ public List findAllFiltered(URI changeContext, ChangeRecor ?record ?hasChangedEntity ?asset ; ?hasTime ?timestamp ; ?hasAuthor ?author . - ?asset a ?assetType . +""" + /* Find an asset type if it is known (deleted assets does not have a type */ """ + + OPTIONAL { + ?asset a ?assetType + } + OPTIONAL { + ?asset a ?assetTypeVal . + BIND(true as ?isAssetType) + } +""" + /* filter assets without a type (deleted) or with a matching type */ """ + BIND(?assetTypeVal as ?assetTypeVar) + FILTER(!BOUND(?assetType) || !BOUND(?assetTypeVar) || BOUND(?isAssetType)) """ + /* Get author's name */ """ ?author ?hasFirstName ?firstName ; ?hasLastName ?lastName . @@ -124,8 +135,8 @@ public List findAllFiltered(URI changeContext, ChangeRecor OPTIONAL { FILTER(!BOUND(?label)) . ?deleteRecord a ?deleteRecordType; - ?hasChangedEntity ?term; - ?hasRdfsLabel ?label. + ?hasChangedEntity ?asset; + ?hasRdfsLabel ?label . } BIND(?assetLabelValue as ?assetLabel) BIND(?authorNameValue as ?authorName) @@ -155,7 +166,7 @@ public List findAllFiltered(URI changeContext, ChangeRecor if(asset.isPresent() && asset.get().getUri() != null) { query = query.setParameter("asset", asset.get().getUri()); } else if (assetType.isPresent()) { - query = query.setParameter("assetType", assetType.get()); + query = query.setParameter("assetTypeVal", assetType.get()); } From f3f6dafb9ec4abc962301386cf321a73f452af95 Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Mon, 11 Nov 2024 16:14:46 +0100 Subject: [PATCH 20/49] [GH-309] Allow running in development mode without configuring mail server. --- .../kbss/termit/service/mail/Postman.java | 11 ++++- .../cz/cvut/kbss/termit/util/Constants.java | 45 +++++++------------ .../java/cz/cvut/kbss/termit/util/Utils.java | 43 +++++++++++------- 3 files changed, 52 insertions(+), 47 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/service/mail/Postman.java b/src/main/java/cz/cvut/kbss/termit/service/mail/Postman.java index cb66781e9..04cd590ce 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/mail/Postman.java +++ b/src/main/java/cz/cvut/kbss/termit/service/mail/Postman.java @@ -19,6 +19,7 @@ import cz.cvut.kbss.termit.exception.PostmanException; import cz.cvut.kbss.termit.exception.ValidationException; +import cz.cvut.kbss.termit.util.Utils; import jakarta.mail.MessagingException; import jakarta.mail.internet.InternetAddress; import jakarta.mail.internet.MimeMessage; @@ -65,7 +66,12 @@ public Postman(Environment env, @Autowired(required = false) JavaMailSender mail @PostConstruct public void postConstruct() { - if(mailSender == null) { + if (mailSender == null) { + if (Utils.isDevelopmentProfile(env.getActiveProfiles())) { + LOG.warn( + "Mail server not configured but running in development mode. Will not be able to send messages."); + return; + } throw new ValidationException("Mail server not configured."); } } @@ -86,7 +92,8 @@ public void sendMessage(Message message) { final MimeMessage mail = mailSender.createMimeMessage(); final MimeMessageHelper helper = new MimeMessageHelper(mail, true); - helper.setFrom(new InternetAddress(sender != null ? sender : senderUsername, FROM_NICKNAME, StandardCharsets.UTF_8.toString())); + helper.setFrom(new InternetAddress(sender != null ? sender : senderUsername, FROM_NICKNAME, + StandardCharsets.UTF_8.toString())); helper.setTo(message.getRecipients().toArray(new String[]{})); helper.setSubject(message.getSubject()); helper.setText(message.getContent(), true); diff --git a/src/main/java/cz/cvut/kbss/termit/util/Constants.java b/src/main/java/cz/cvut/kbss/termit/util/Constants.java index 5d7ead6a9..7cb925992 100644 --- a/src/main/java/cz/cvut/kbss/termit/util/Constants.java +++ b/src/main/java/cz/cvut/kbss/termit/util/Constants.java @@ -153,6 +153,23 @@ public class Constants { "Notation", "Example", "References") ); + + /** + * the maximum amount of data to buffer when sending messages to a WebSocket session + */ + public static final int WEBSOCKET_SEND_BUFFER_SIZE_LIMIT = Integer.MAX_VALUE; + + /** + * Set the maximum time allowed in milliseconds after the WebSocket connection is established + * and before the first sub-protocol message is received. + */ + public static final int WEBSOCKET_TIME_TO_FIRST_MESSAGE = 15 * 1000 /* 15s */; + + /** + * Development Spring profile. + */ + public static final String DEVELOPMENT_PROFILE = "development"; + private Constants() { throw new AssertionError(); } @@ -247,32 +264,4 @@ private QueryParams() { throw new AssertionError(); } } - - public static final class DebouncingGroups { - - /** - * Text analysis of all terms in specific vocabulary - */ - public static final String TEXT_ANALYSIS_VOCABULARY_TERMS_ALL_DEFINITIONS = "TEXT_ANALYSIS_VOCABULARY_TERMS_ALL_DEFINITIONS"; - - /** - * Text analysis of all vocabularies - */ - public static final String TEXT_ANALYSIS_VOCABULARY = "TEXT_ANALYSIS_VOCABULARY"; - - private DebouncingGroups() { - throw new AssertionError(); - } - } - - /** - * the maximum amount of data to buffer when sending messages to a WebSocket session - */ - public static final int WEBSOCKET_SEND_BUFFER_SIZE_LIMIT = Integer.MAX_VALUE; - - /** - * Set the maximum time allowed in milliseconds after the WebSocket connection is established - * and before the first sub-protocol message is received. - */ - public static final int WEBSOCKET_TIME_TO_FIRST_MESSAGE = 15 * 1000 /* 15s */; } diff --git a/src/main/java/cz/cvut/kbss/termit/util/Utils.java b/src/main/java/cz/cvut/kbss/termit/util/Utils.java index f8857028d..7adf76742 100644 --- a/src/main/java/cz/cvut/kbss/termit/util/Utils.java +++ b/src/main/java/cz/cvut/kbss/termit/util/Utils.java @@ -44,6 +44,7 @@ import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; @@ -194,13 +195,20 @@ public static String getVocabularyIri(final Set conceptUris, String term if (conceptUris.isEmpty()) { throw new IllegalArgumentException("No namespace candidate."); } - final Iterator i = conceptUris.iterator(); - final String conceptUri = i.next(); + final String namespace = extractNamespace(termSeparator, conceptUri); + for (final String s : conceptUris) { + if (!s.startsWith(namespace)) { + throw new IllegalArgumentException( + "Not all Concept IRIs have the same namespace: " + conceptUri + " vs. " + namespace); + } + } + return namespace; + } + private static String extractNamespace(String termSeparator, String conceptUri) { final String separator; - if (conceptUri.lastIndexOf(termSeparator) > 0) { separator = termSeparator; } else if (conceptUri.lastIndexOf("#") > 0) { @@ -210,16 +218,7 @@ public static String getVocabularyIri(final Set conceptUris, String term } else { throw new IllegalArgumentException("The IRI does not have a proper format: " + conceptUri); } - - final String namespace = conceptUri.substring(0, conceptUri.lastIndexOf(separator)); - - for (final String s : conceptUris) { - if (!s.startsWith(namespace)) { - throw new IllegalArgumentException( - "Not all Concept IRIs have the same namespace: " + conceptUri + " vs. " + namespace); - } - } - return namespace; + return conceptUri.substring(0, conceptUri.lastIndexOf(separator)); } /** @@ -402,15 +401,25 @@ public static void pruneBlankTranslations(MultilingualString str) { /** * Converts the map into a string - * @return Empty string when the map is {@code null}, otherwise the String in format - * {@code {key=value, key=value}} + * + * @return Empty string when the map is {@code null}, otherwise the String in format {@code {key=value, key=value}} */ public static String mapToString(Map map) { if (map == null) { return ""; } return map.keySet().stream() - .map(key -> key + "=" + map.get(key)) - .collect(Collectors.joining(", ", "{", "}")); + .map(key -> key + "=" + map.get(key)) + .collect(Collectors.joining(", ", "{", "}")); + } + + /** + * Checks whether the {@code development} profile is active. + * + * @param activeProfiles Array of active profiles + * @return {@code true} if the {@code development} profile is active, {@code false} otherwise + */ + public static boolean isDevelopmentProfile(String[] activeProfiles) { + return Arrays.binarySearch(activeProfiles, Constants.DEVELOPMENT_PROFILE) != -1; } } From 5fbccba0d7b67711c9535693cfeca0d563a06da4 Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Wed, 13 Nov 2024 16:30:00 +0100 Subject: [PATCH 21/49] [GH-309] Remove text analysis service URL from application.yml It is not necessary in development. When one wants to test TermIt with text analysis, they should configure the real URL of the service. --- src/main/resources/application.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml index 8d9cae801..655043d51 100644 --- a/src/main/resources/application.yml +++ b/src/main/resources/application.yml @@ -66,8 +66,6 @@ termit: separator: /verze file: storage: /tmp/termit - textAnalysis: - url: http://localhost:8081/annotace/annotate changetracking: context: extension: /zmeny From 742a0174dcdc0da9826249710cb050dd70be0255 Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Wed, 13 Nov 2024 16:32:01 +0100 Subject: [PATCH 22/49] [GH-309] Document the development Spring profile. --- doc/setup.md | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/setup.md b/doc/setup.md index 839fe0b3c..af068c823 100644 --- a/doc/setup.md +++ b/doc/setup.md @@ -39,6 +39,7 @@ by the application: * `lucene` - decides whether Lucene text indexing is enabled and should be used in full text search queries. * `admin-registration-only` - decides whether new users can be registered only by application admin, or whether anyone can register. * `no-cache` - disables Ehcache, which is used to cache lists of resources and vocabularies for faster retrieval, and persistence cache. +* `development` - indicates that the application is running is development. This, for example, means that mail server does not need to be configured. The `lucene` Spring profile is activated automatically by the `graphdb` Maven. `admin-registration-only` and `no-cache` have to be added either in `application.yml` directly, or one can pass the parameter to Maven build, e.g.: From edfbbe5bb4b891d98e0b00c3fcb8389aaf81c872 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Thu, 14 Nov 2024 14:19:35 +0100 Subject: [PATCH 23/49] [Enhancement kbss-cvut/termit-ui#520] Fix change record filter query --- .../dao/changetracking/ChangeRecordDao.java | 34 ++++++++++--------- .../persistence/dao/VocabularyDaoTest.java | 21 +++++------- 2 files changed, 27 insertions(+), 28 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java index 38e640f22..c0e660981 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java @@ -100,17 +100,16 @@ public List findAllFiltered(URI changeContext, ChangeRecor ?hasTime ?timestamp ; ?hasAuthor ?author . """ + /* Find an asset type if it is known (deleted assets does not have a type */ """ - - OPTIONAL { - ?asset a ?assetType - } OPTIONAL { - ?asset a ?assetTypeVal . - BIND(true as ?isAssetType) + ?asset a ?assetType . + OPTIONAL { + ?asset a ?assetTypeValue + BIND(true as ?isAssetType) + } } + FILTER(!BOUND(?assetType) || ?isAssetType) """ + /* filter assets without a type (deleted) or with a matching type */ """ - BIND(?assetTypeVal as ?assetTypeVar) - FILTER(!BOUND(?assetType) || !BOUND(?assetTypeVar) || BOUND(?isAssetType)) + """ + /* Get author's name */ """ ?author ?hasFirstName ?firstName ; ?hasLastName ?lastName . @@ -122,26 +121,29 @@ public List findAllFiltered(URI changeContext, ChangeRecor } """ + /* Get asset's name (but the asset might have been already deleted) */ """ OPTIONAL { - ?asset ?hasLabel ?label . + ?asset ?hasLabel ?assetPrefLabel . + BIND(?assetPrefLabel as ?finalAssetLabel) } OPTIONAL { - ?asset ?hasRdfsLabel ?label . + ?asset ?hasRdfsLabel ?assetRdfsLabel . + BIND(?assetRdfsLabel as ?finalAssetLabel) } """ + /* then try to get the label from (delete) record */ """ OPTIONAL { - ?record ?hasRdfsLabel ?label . + ?record ?hasRdfsLabel ?recordRdfsLabel . + BIND(?recordRdfsLabel as ?finalAssetLabel) } """ + /* When label is still not bound, the term was probably deleted, find the delete record and get the label from it */ """ OPTIONAL { - FILTER(!BOUND(?label)) . ?deleteRecord a ?deleteRecordType; ?hasChangedEntity ?asset; - ?hasRdfsLabel ?label . + ?hasRdfsLabel ?deleteRecordLabel . + BIND(?deleteRecordLabel as ?finalAssetLabel) } BIND(?assetLabelValue as ?assetLabel) BIND(?authorNameValue as ?authorName) BIND(?attributeNameValue as ?changedAttributeName) - FILTER (!BOUND(?assetLabel) || CONTAINS(LCASE(?label), LCASE(?assetLabel))) + FILTER (!BOUND(?assetLabel) || CONTAINS(LCASE(?finalAssetLabel), LCASE(?assetLabel))) FILTER (!BOUND(?authorName) || CONTAINS(LCASE(?authorFullName), LCASE(?authorName))) FILTER (!BOUND(?changedAttributeName) || CONTAINS(LCASE(?changedAttributeLabel), LCASE(?changedAttributeName))) } ORDER BY DESC(?timestamp) ?attribute @@ -161,12 +163,12 @@ public List findAllFiltered(URI changeContext, ChangeRecor .setParameter("hasLabel", URI.create(SKOS.PREF_LABEL)) // Optional asset label - .setParameter("deleteRecordType", Vocabulary.s_c_smazani_entity); + .setParameter("deleteRecordType", URI.create(Vocabulary.s_c_smazani_entity)); if(asset.isPresent() && asset.get().getUri() != null) { query = query.setParameter("asset", asset.get().getUri()); } else if (assetType.isPresent()) { - query = query.setParameter("assetTypeVal", assetType.get()); + query = query.setParameter("assetTypeValue", assetType.get()); } diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java index fa795b572..df833d8ad 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java @@ -19,6 +19,7 @@ import cz.cvut.kbss.jopa.model.EntityManager; import cz.cvut.kbss.jopa.model.MultilingualString; +import cz.cvut.kbss.jopa.model.annotations.OWLClass; import cz.cvut.kbss.jopa.model.descriptors.Descriptor; import cz.cvut.kbss.jopa.vocabulary.RDFS; import cz.cvut.kbss.jopa.vocabulary.SKOS; @@ -1056,8 +1057,10 @@ void getDetailedHistoryOfContentReturnsRecordsOfDeletedTermFilteredByTermName() final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); // the needle is placed in the term which will be removed firstTerm.getLabel().set(Environment.LANGUAGE, mud); + firstTerm.setVocabulary(vocabulary.getUri()); final Term termToRemove = Generator.generateTermWithId(vocabulary.getUri()); termToRemove.getLabel().set(Environment.LANGUAGE, haystack); + termToRemove.setVocabulary(vocabulary.getUri()); final List firstChanges = Generator.generateChangeRecords(firstTerm, author); final List termToRemoveChanges = Generator.generateChangeRecords(termToRemove, author); @@ -1213,20 +1216,14 @@ void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByAuthorName } @ParameterizedTest - @ValueSource(strings = { - cz.cvut.kbss.termit.util.Vocabulary.s_c_uprava_entity, - cz.cvut.kbss.termit.util.Vocabulary.s_c_vytvoreni_entity, - cz.cvut.kbss.termit.util.Vocabulary.s_c_smazani_entity, + @ValueSource(classes = { + UpdateChangeRecord.class, + PersistChangeRecord.class, + DeleteChangeRecord.class }) - void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByChangeType(String type) { + void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByChangeType(Class typeClass) { enableRdfsInference(em); - final URI typeUri = URI.create(type); - final Class typeClass = switch (type) { - case cz.cvut.kbss.termit.util.Vocabulary.s_c_uprava_entity -> UpdateChangeRecord.class; - case cz.cvut.kbss.termit.util.Vocabulary.s_c_vytvoreni_entity -> PersistChangeRecord.class; - case cz.cvut.kbss.termit.util.Vocabulary.s_c_smazani_entity -> DeleteChangeRecord.class; - default -> throw new IllegalArgumentException("Unknown change type: " + type); - }; + final URI typeUri = URI.create(typeClass.getAnnotation(OWLClass.class).iri()); // Two terms with needle in the label, one term without needle in the label final Vocabulary vocabulary = Generator.generateVocabularyWithId(); From ca0af58ddd9f4427a2271d36fcf5bb5afbf93315 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Thu, 14 Nov 2024 15:07:34 +0100 Subject: [PATCH 24/49] [Enhancement kbss-cvut/termit-ui#528] Change TermDao#resolveVocabularyId to return optional URI instead of throwing when term is not found (not in a vocabulary). --- .../cz/cvut/kbss/termit/persistence/dao/TermDao.java | 12 ++++++------ .../kbss/termit/persistence/dao/TermDaoTest.java | 9 +++++++++ 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/TermDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/TermDao.java index 052035b25..47180aa51 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/TermDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/TermDao.java @@ -91,8 +91,8 @@ protected URI labelProperty() { @Override public Optional find(URI id) { try { - final Optional result = Optional.ofNullable( - em.find(Term.class, id, descriptorFactory.termDescriptor(resolveVocabularyId(id)))); + final Optional result = resolveVocabularyId(id).map(vocabulary -> + em.find(Term.class, id, descriptorFactory.termDescriptor(vocabulary))); result.ifPresent(this::postLoad); return result; } catch (RuntimeException e) { @@ -100,14 +100,14 @@ public Optional find(URI id) { } } - private URI resolveVocabularyId(URI termId) { + private Optional resolveVocabularyId(URI termId) { try { - return em.createNativeQuery("SELECT DISTINCT ?v WHERE { ?t ?inVocabulary ?v . }", URI.class) + return Optional.of(em.createNativeQuery("SELECT DISTINCT ?v WHERE { ?t ?inVocabulary ?v . }", URI.class) .setParameter("t", termId) .setParameter("inVocabulary", TERM_FROM_VOCABULARY) - .getSingleResult(); + .getSingleResult()); } catch (NoResultException | NoUniqueResultException e) { - throw new PersistenceException("Unable to resolve term vocabulary.", e); + return Optional.empty(); } } diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/TermDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/TermDaoTest.java index 036c8bcf4..75ac29892 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/TermDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/TermDaoTest.java @@ -82,6 +82,7 @@ import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertInstanceOf; @@ -1394,4 +1395,12 @@ void findByIdLoadsTermFromVocabularyContextOnly() { assertTrue(result.isPresent()); assertFalse(result.get().getProperties().containsKey(property)); } + + @Test + void findByIdReturnsOptionalEmptyWhenTermDoesNotExists() { + final Term term = Generator.generateTermWithId(vocabulary.getUri()); + // trying to find a non-existing term + final Optional empty = assertDoesNotThrow(()-> sut.find(term.getUri())); + assertTrue(empty.isEmpty()); + } } From 6c5797b98f806c0a4ea7a1379155c6eb8158bd5b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Thu, 14 Nov 2024 16:46:15 +0100 Subject: [PATCH 25/49] [Enhancement kbss-cvut/termit-ui#520] Move ChangeRecordDao tests from VocabularyDaoTest to ChangeRecordDaoTest. --- .../dao/changetracking/ChangeRecordDao.java | 3 +- .../persistence/dao/VocabularyDaoTest.java | 342 +----------------- .../changetracking/ChangeRecordDaoTest.java | 283 +++++++++++++++ 3 files changed, 303 insertions(+), 325 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java index c0e660981..86376126b 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java @@ -107,9 +107,8 @@ public List findAllFiltered(URI changeContext, ChangeRecor BIND(true as ?isAssetType) } } - FILTER(!BOUND(?assetType) || ?isAssetType) """ + /* filter assets without a type (deleted) or with a matching type */ """ - + FILTER(!BOUND(?assetType) || ?isAssetType) """ + /* Get author's name */ """ ?author ?hasFirstName ?firstName ; ?hasLastName ?lastName . diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java index df833d8ad..18cddcaae 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java @@ -19,9 +19,7 @@ import cz.cvut.kbss.jopa.model.EntityManager; import cz.cvut.kbss.jopa.model.MultilingualString; -import cz.cvut.kbss.jopa.model.annotations.OWLClass; import cz.cvut.kbss.jopa.model.descriptors.Descriptor; -import cz.cvut.kbss.jopa.vocabulary.RDFS; import cz.cvut.kbss.jopa.vocabulary.SKOS; import cz.cvut.kbss.termit.dto.AggregatedChangeInfo; import cz.cvut.kbss.termit.dto.PrefixDeclaration; @@ -41,7 +39,6 @@ import cz.cvut.kbss.termit.model.User; import cz.cvut.kbss.termit.model.Vocabulary; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; -import cz.cvut.kbss.termit.model.changetracking.DeleteChangeRecord; import cz.cvut.kbss.termit.model.changetracking.PersistChangeRecord; import cz.cvut.kbss.termit.model.changetracking.UpdateChangeRecord; import cz.cvut.kbss.termit.model.resource.Document; @@ -49,8 +46,8 @@ import cz.cvut.kbss.termit.model.util.EntityToOwlClassMapper; import cz.cvut.kbss.termit.persistence.context.DescriptorFactory; import cz.cvut.kbss.termit.persistence.dao.changetracking.ChangeRecordDao; +import cz.cvut.kbss.termit.persistence.dao.changetracking.ChangeTrackingContextResolver; import cz.cvut.kbss.termit.util.Constants; -import cz.cvut.kbss.termit.util.Utils; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.ValueFactory; import org.eclipse.rdf4j.model.vocabulary.RDF; @@ -61,10 +58,10 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; -import org.junit.jupiter.params.provider.ValueSource; import org.mockito.ArgumentCaptor; import org.mockito.Spy; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.mock.mockito.SpyBean; import org.springframework.context.ApplicationEvent; import org.springframework.context.ApplicationEventPublisher; import org.springframework.data.domain.Pageable; @@ -85,12 +82,9 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.Random; import java.util.Set; -import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.stream.IntStream; -import java.util.stream.Stream; import static cz.cvut.kbss.termit.environment.util.ContainsSameEntities.containsSameEntities; import static org.hamcrest.MatcherAssert.assertThat; @@ -102,6 +96,7 @@ import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.atLeastOnce; +import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.verify; @DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_EACH_TEST_METHOD) @@ -122,10 +117,13 @@ class VocabularyDaoTest extends BaseDaoTestRunner { @Autowired private TermDao termDao; - - @Autowired + + @SpyBean private ChangeRecordDao changeRecordDao; + @SpyBean + private ChangeTrackingContextResolver changeTrackingContextResolver; + private User author; @BeforeEach @@ -951,322 +949,20 @@ void getAnyExternalRelationsReturnsTermsWithBothRelations(URI termRelation) { } @Test - void getDetailedHistoryOfContentReturnsRecordsForAllChangeTypes() { - enableRdfsInference(em); - - final Vocabulary vocabulary = Generator.generateVocabularyWithId(); - final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); - final Term termToRemove = Generator.generateTermWithId(vocabulary.getUri()); - - final List firstChanges = Generator.generateChangeRecords(firstTerm, author); - final List termToRemoveChanges = Generator.generateChangeRecords(termToRemove, author); - final DeleteChangeRecord deleteChangeRecord = new DeleteChangeRecord(); - deleteChangeRecord.setChangedEntity(termToRemove.getUri()); - deleteChangeRecord.setTimestamp(Utils.timestamp()); - deleteChangeRecord.setAuthor(author); - deleteChangeRecord.setLabel(termToRemove.getLabel()); - - transactional(() -> { - vocabulary.getGlossary().addRootTerm(firstTerm); - sut.persist(vocabulary); - Environment.addRelation(vocabulary.getUri(), URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_glosar), vocabulary.getGlossary().getUri(), em); - - termDao.persist(firstTerm, vocabulary); - termDao.persist(termToRemove, vocabulary); - - firstChanges.forEach(r -> changeRecordDao.persist(r, firstTerm)); - termToRemoveChanges.forEach(r -> changeRecordDao.persist(r, termToRemove)); - changeRecordDao.persist(deleteChangeRecord, termToRemove); - }); - - final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); - final int recordsCount = firstChanges.size() + termToRemoveChanges.size() + 1; // +1 for the delete record - final Pageable pageable = Pageable.ofSize(recordsCount * 3); - final List contentChanges = sut.getDetailedHistoryOfContent(vocabulary, filter, pageable); - - assertEquals(recordsCount, contentChanges.size()); - final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); - final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); - final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); - assertEquals(2, persistCount); - assertEquals(recordsCount - 3, updatesCount); // -2 persist records, -1 delete record - assertEquals(1, deleteCount); - } - - - @Test - void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByTermName() { - enableRdfsInference(em); - - final String needle = "needle"; - final String haystack = "A label that contains needle somewhere"; - final String mud = "The n3edle is not here"; - - // Two terms with needle in the label, one term without needle in the label - final Vocabulary vocabulary = Generator.generateVocabularyWithId(); - final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); - firstTerm.getLabel().set(Environment.LANGUAGE, haystack); - final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); - secondTerm.getLabel().set(mud + needle); - final Term thirdTerm = Generator.generateTermWithId(vocabulary.getUri()); - thirdTerm.getLabel().set(Environment.LANGUAGE, mud); - - final List firstChanges = Generator.generateChangeRecords(firstTerm, author); - final List secondChanges = Generator.generateChangeRecords(secondTerm, author); - final List thirdChanges = Generator.generateChangeRecords(thirdTerm, author); - - transactional(() -> { - vocabulary.getGlossary().addRootTerm(firstTerm); - sut.persist(vocabulary); - Environment.addRelation(vocabulary.getUri(), URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_glosar), vocabulary.getGlossary().getUri(), em); - - termDao.persist(firstTerm, vocabulary); - termDao.persist(secondTerm, vocabulary); - termDao.persist(thirdTerm, vocabulary); - - firstChanges.forEach(r -> changeRecordDao.persist(r, firstTerm)); - secondChanges.forEach(r -> changeRecordDao.persist(r, secondTerm)); - thirdChanges.forEach(r -> changeRecordDao.persist(r, thirdTerm)); - }); - - final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); - filter.setAssetLabel(needle); - - final int recordsCount = firstChanges.size() + secondChanges.size(); - final Pageable pageable = Pageable.ofSize(recordsCount * 2); - final List contentChanges = sut.getDetailedHistoryOfContent(vocabulary, filter, pageable); - - assertEquals(recordsCount, contentChanges.size()); - final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); - final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); - final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); - assertEquals(2, persistCount); - assertEquals(recordsCount - 2, updatesCount); // -2 persist records - assertEquals(0, deleteCount); - } - - @Test - void getDetailedHistoryOfContentReturnsRecordsOfDeletedTermFilteredByTermName() { - enableRdfsInference(em); - - final String needle = "needle"; - final String haystack = "A label that contains needle somewhere"; - final String mud = "The n3edle is not here"; - - final Vocabulary vocabulary = Generator.generateVocabularyWithId(); - final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); - // the needle is placed in the term which will be removed - firstTerm.getLabel().set(Environment.LANGUAGE, mud); - firstTerm.setVocabulary(vocabulary.getUri()); - final Term termToRemove = Generator.generateTermWithId(vocabulary.getUri()); - termToRemove.getLabel().set(Environment.LANGUAGE, haystack); - termToRemove.setVocabulary(vocabulary.getUri()); - - final List firstChanges = Generator.generateChangeRecords(firstTerm, author); - final List termToRemoveChanges = Generator.generateChangeRecords(termToRemove, author); - final DeleteChangeRecord deleteChangeRecord = new DeleteChangeRecord(); - deleteChangeRecord.setChangedEntity(termToRemove.getUri()); - deleteChangeRecord.setTimestamp(Utils.timestamp()); - deleteChangeRecord.setAuthor(author); - deleteChangeRecord.setLabel(termToRemove.getLabel()); - - transactional(() -> { - vocabulary.getGlossary().addRootTerm(firstTerm); - sut.persist(vocabulary); - Environment.addRelation(vocabulary.getUri(), URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_glosar), vocabulary.getGlossary().getUri(), em); - - termDao.persist(firstTerm, vocabulary); - termDao.persist(termToRemove, vocabulary); - - Generator.addTermInVocabularyRelationship(firstTerm, vocabulary.getUri(), em); - Generator.addTermInVocabularyRelationship(termToRemove, vocabulary.getUri(), em); - - firstChanges.forEach(r -> changeRecordDao.persist(r, firstTerm)); - termToRemoveChanges.forEach(r -> changeRecordDao.persist(r, termToRemove)); - changeRecordDao.persist(deleteChangeRecord, termToRemove); - - termToRemove.setVocabulary(vocabulary.getUri()); - termDao.remove(termToRemove); - }); - - final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); - filter.setAssetLabel(needle); - - final int recordsCount = termToRemoveChanges.size() + 1; // +1 for the delete record - final Pageable pageable = Pageable.ofSize(recordsCount * 2); - final List contentChanges = sut.getDetailedHistoryOfContent(vocabulary, filter, pageable); - - assertEquals(recordsCount, contentChanges.size()); - final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); - final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); - final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); - assertEquals(1, persistCount); - assertEquals(recordsCount - 2, updatesCount); // -1 persist record -1 delete record - assertEquals(1, deleteCount); - } - - @Test - void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByChangedAttributeName() { - enableRdfsInference(em); - - // Two terms with needle in the label, one term without needle in the label - final Vocabulary vocabulary = Generator.generateVocabularyWithId(); - final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); - final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); - - final List firstChanges = Generator.generateChangeRecords(firstTerm, author); - final List secondChanges = Generator.generateChangeRecords(secondTerm, author); - - // randomize changed attributes - final Random random = new Random(); - final AtomicInteger recordCount = new AtomicInteger(0); - final URI changedAttribute = URI.create(SKOS.DEFINITION); - final URI anotherChangedAttribute = URI.create(RDFS.LABEL); - final String changedAttributeName = "definition"; - - Stream.of(firstChanges, secondChanges).flatMap(Collection::stream) - .filter(r -> r instanceof UpdateChangeRecord) - .map(r -> (UpdateChangeRecord) r) - .forEach(r -> { - if(random.nextBoolean() || recordCount.get() == 0) { - r.setChangedAttribute(changedAttribute); - recordCount.incrementAndGet(); - } else { - r.setChangedAttribute(anotherChangedAttribute); - } - }); - - transactional(() -> { - vocabulary.getGlossary().addRootTerm(firstTerm); - sut.persist(vocabulary); - Environment.addRelation(vocabulary.getUri(), URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_glosar), vocabulary.getGlossary().getUri(), em); - - termDao.persist(firstTerm, vocabulary); - termDao.persist(secondTerm, vocabulary); - - firstChanges.forEach(r -> changeRecordDao.persist(r, firstTerm)); - secondChanges.forEach(r -> changeRecordDao.persist(r, secondTerm)); - }); - - final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); - filter.setChangedAttributeName(changedAttributeName); - - final Pageable pageable = Pageable.ofSize(recordCount.get() * 2); - final List contentChanges = sut.getDetailedHistoryOfContent(vocabulary, filter, pageable); - - assertEquals(recordCount.get(), contentChanges.size()); - final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); - final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); - final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); - assertEquals(0, persistCount); - assertEquals(recordCount.get(), updatesCount); - assertEquals(0, deleteCount); - } - - @Test - void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByAuthorName() { - enableRdfsInference(em); - - // Two terms with needle in the label, one term without needle in the label - final Vocabulary vocabulary = Generator.generateVocabularyWithId(); - final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); - final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); - - final List firstChanges = Generator.generateChangeRecords(firstTerm, author); - final List secondChanges = Generator.generateChangeRecords(secondTerm, author); - - // make new author - final User anotherAuthor = Generator.generateUserWithId(); - anotherAuthor.setFirstName("Karel"); - anotherAuthor.setLastName("Novák"); - transactional(() -> em.persist(anotherAuthor)); - Environment.setCurrentUser(anotherAuthor); - - final int recordCount = 2; - // author is this.author (Environment current user) - firstChanges.add(Generator.generateUpdateChange(firstTerm)); - secondChanges.add(Generator.generateUpdateChange(secondTerm)); - - transactional(() -> { - vocabulary.getGlossary().addRootTerm(firstTerm); - sut.persist(vocabulary); - Environment.addRelation(vocabulary.getUri(), URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_glosar), vocabulary.getGlossary().getUri(), em); - - termDao.persist(firstTerm, vocabulary); - termDao.persist(secondTerm, vocabulary); - - firstChanges.forEach(r -> changeRecordDao.persist(r, firstTerm)); - secondChanges.forEach(r -> changeRecordDao.persist(r, secondTerm)); - }); - - final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); - // full name without first two and last two characters - filter.setAuthorName(anotherAuthor.getFullName().substring(2, anotherAuthor.getFullName().length() - 2)); - - final Pageable pageable = Pageable.ofSize(4); - final List contentChanges = sut.getDetailedHistoryOfContent(vocabulary, filter, pageable); - - assertEquals(recordCount, contentChanges.size()); - final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); - final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); - final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); - assertEquals(0, persistCount); - assertEquals(recordCount, updatesCount); - assertEquals(0, deleteCount); - } - - @ParameterizedTest - @ValueSource(classes = { - UpdateChangeRecord.class, - PersistChangeRecord.class, - DeleteChangeRecord.class - }) - void getDetailedHistoryOfContentReturnsRecordsOfExistingTermFilteredByChangeType(Class typeClass) { - enableRdfsInference(em); - final URI typeUri = URI.create(typeClass.getAnnotation(OWLClass.class).iri()); - - // Two terms with needle in the label, one term without needle in the label + void getDetailedHistoryOfContentCallsChangeRecordDaoWithFilter() { final Vocabulary vocabulary = Generator.generateVocabularyWithId(); - final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); - final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); - - final List firstChanges = Generator.generateChangeRecords(firstTerm, author); - final List secondChanges = Generator.generateChangeRecords(secondTerm, author); - final DeleteChangeRecord deleteChangeRecord = new DeleteChangeRecord(); - deleteChangeRecord.setChangedEntity(secondTerm.getUri()); - deleteChangeRecord.setTimestamp(Utils.timestamp()); - deleteChangeRecord.setAuthor(author); - deleteChangeRecord.setLabel(secondTerm.getLabel()); - - final int recordCount = (int) Stream.of(firstChanges, secondChanges, List.of(deleteChangeRecord)).flatMap(List::stream).filter(typeClass::isInstance).count(); - - transactional(() -> { - vocabulary.getGlossary().addRootTerm(firstTerm); - sut.persist(vocabulary); - Environment.addRelation(vocabulary.getUri(), URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_glosar), vocabulary.getGlossary().getUri(), em); - - termDao.persist(firstTerm, vocabulary); - termDao.persist(secondTerm, vocabulary); - - Generator.addTermInVocabularyRelationship(firstTerm, vocabulary.getUri(), em); - Generator.addTermInVocabularyRelationship(secondTerm, vocabulary.getUri(), em); - - firstChanges.forEach(r -> changeRecordDao.persist(r, firstTerm)); - secondChanges.forEach(r -> changeRecordDao.persist(r, secondTerm)); - changeRecordDao.persist(deleteChangeRecord, secondTerm); - - secondTerm.setVocabulary(vocabulary.getUri()); - termDao.remove(secondTerm); - }); + final List records = List.of(); + final Optional skosConcept = Optional.of(URI.create(SKOS.CONCEPT)); + final Pageable unpaged = Pageable.unpaged(); + final ChangeRecordFilterDto filterDto = new ChangeRecordFilterDto(); + filterDto.setAuthorName("Name of the author"); - final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); - // full name without first two and last two characters - filter.setChangeType(typeUri); + doReturn(vocabulary.getUri()).when(changeTrackingContextResolver).resolveChangeTrackingContext(vocabulary); + doReturn(records).when(changeRecordDao).findAllFiltered(vocabulary.getUri(), filterDto, Optional.empty(), skosConcept, unpaged); - final Pageable pageable = Pageable.ofSize( recordCount * 2); - final List contentChanges = sut.getDetailedHistoryOfContent(vocabulary, filter, pageable); + sut.getDetailedHistoryOfContent(vocabulary, filterDto, unpaged); - assertEquals(recordCount, contentChanges.size()); - assertTrue(contentChanges.stream().allMatch(typeClass::isInstance)); + verify(changeTrackingContextResolver).resolveChangeTrackingContext(vocabulary); + verify(changeRecordDao).findAllFiltered(vocabulary.getUri(), filterDto, Optional.empty(), skosConcept, unpaged); } } diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java index 5f43ef096..fec0da69b 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java @@ -19,34 +19,46 @@ import cz.cvut.kbss.jopa.model.EntityManager; import cz.cvut.kbss.jopa.model.MultilingualString; +import cz.cvut.kbss.jopa.model.annotations.OWLClass; import cz.cvut.kbss.jopa.model.descriptors.Descriptor; import cz.cvut.kbss.jopa.model.descriptors.EntityDescriptor; +import cz.cvut.kbss.jopa.vocabulary.RDFS; import cz.cvut.kbss.jopa.vocabulary.SKOS; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; +import cz.cvut.kbss.termit.environment.Environment; import cz.cvut.kbss.termit.environment.Generator; import cz.cvut.kbss.termit.model.Term; import cz.cvut.kbss.termit.model.User; import cz.cvut.kbss.termit.model.Vocabulary; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; +import cz.cvut.kbss.termit.model.changetracking.DeleteChangeRecord; import cz.cvut.kbss.termit.model.changetracking.PersistChangeRecord; import cz.cvut.kbss.termit.model.changetracking.UpdateChangeRecord; import cz.cvut.kbss.termit.persistence.dao.BaseDaoTestRunner; import cz.cvut.kbss.termit.util.Utils; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Pageable; import org.springframework.test.annotation.DirtiesContext; import java.net.URI; import java.time.Instant; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Optional; +import java.util.Random; import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.stream.IntStream; +import java.util.stream.Stream; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.instanceOf; @@ -260,4 +272,275 @@ void getAuthorsRetrievesUsersAssociatedWithPersistChangeRecordsOfSpecifiedAsset( final Set result = sut.getAuthors(asset); assertEquals(Collections.singleton(author), result); } + + @Test + void findAllFilteredReturnsRecordsOfExistingTermFilteredByTermName() { + enableRdfsInference(em); + + final String needle = "needle"; + final String haystack = "A label that contains needle somewhere"; + final String mud = "The n3edle is not here"; + + // Two terms with needle in the label, one term without needle in the label + final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); + firstTerm.getLabel().set(Environment.LANGUAGE, haystack); + final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); + secondTerm.getLabel().set(mud + needle); + final Term thirdTerm = Generator.generateTermWithId(vocabulary.getUri()); + thirdTerm.getLabel().set(Environment.LANGUAGE, mud); + + final List firstChanges = Generator.generateChangeRecords(firstTerm, author); + final List secondChanges = Generator.generateChangeRecords(secondTerm, author); + final List thirdChanges = Generator.generateChangeRecords(thirdTerm, author); + + final Descriptor vocabularyDescriptor = persistDescriptor(vocabulary.getUri()); + + transactional(() -> { + vocabulary.getGlossary().addRootTerm(firstTerm); + em.persist(vocabulary, vocabularyDescriptor); + Environment.addRelation(vocabulary.getUri(), URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_glosar), vocabulary.getGlossary().getUri(), em); + + em.persist(firstTerm, vocabularyDescriptor); + em.persist(secondTerm, vocabularyDescriptor); + em.persist(thirdTerm, vocabularyDescriptor); + + Stream.of(firstChanges, secondChanges, thirdChanges) + .flatMap(Collection::stream) + .forEach(r -> em.persist(r, vocabularyDescriptor)); + }); + + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); + filter.setAssetLabel(needle); + + final int recordsCount = firstChanges.size() + secondChanges.size(); + final Pageable pageable = Pageable.ofSize(recordsCount * 2); + + final List contentChanges = sut.findAllFiltered(vocabulary.getUri(), filter, Optional.empty(), Optional.of(URI.create(SKOS.CONCEPT)), pageable); + + assertEquals(recordsCount, contentChanges.size()); + final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); + final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); + final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); + assertEquals(2, persistCount); + assertEquals(recordsCount - 2, updatesCount); // -2 persist records + assertEquals(0, deleteCount); + } + + + @Test + void findAllFilteredReturnsRecordsOfDeletedTermFilteredByTermName() { + enableRdfsInference(em); + + final String needle = "needle"; + final String haystack = "A label that contains needle somewhere"; + final String mud = "The n3edle is not here"; + + final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); + // the needle is placed in the term which will be removed + firstTerm.getLabel().set(Environment.LANGUAGE, mud); + firstTerm.setVocabulary(vocabulary.getUri()); + final Term termToRemove = Generator.generateTermWithId(vocabulary.getUri()); + termToRemove.getLabel().set(Environment.LANGUAGE, haystack); + termToRemove.setVocabulary(vocabulary.getUri()); + + final List firstChanges = Generator.generateChangeRecords(firstTerm, author); + final List termToRemoveChanges = Generator.generateChangeRecords(termToRemove, author); + final DeleteChangeRecord deleteChangeRecord = new DeleteChangeRecord(); + deleteChangeRecord.setChangedEntity(termToRemove.getUri()); + deleteChangeRecord.setTimestamp(Utils.timestamp()); + deleteChangeRecord.setAuthor(author); + deleteChangeRecord.setLabel(termToRemove.getLabel()); + + final Descriptor vocabularyDescriptor = persistDescriptor(vocabulary.getUri()); + + transactional(() -> { + em.persist(vocabulary); + + em.persist(firstTerm, vocabularyDescriptor); + em.persist(termToRemove, vocabularyDescriptor); + + Stream.of(firstChanges, termToRemoveChanges, List.of(deleteChangeRecord)) + .flatMap(Collection::stream) + .forEach(r -> em.persist(r, vocabularyDescriptor)); + }); + + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); + filter.setAssetLabel(needle); + + final int recordsCount = termToRemoveChanges.size() + 1; // +1 for the delete record + final Pageable pageable = Pageable.unpaged(); + + final List contentChanges = sut.findAllFiltered(vocabulary.getUri(), filter, Optional.empty(), Optional.of(URI.create(SKOS.CONCEPT)), pageable); + + assertEquals(recordsCount, contentChanges.size()); + final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); + final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); + final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); + assertEquals(1, persistCount); + assertEquals(recordsCount - 2, updatesCount); // -1 persist record -1 delete record + assertEquals(1, deleteCount); + } + + + @Test + void findAllFilteredReturnsRecordsOfExistingTermFilteredByChangedAttributeName() { + enableRdfsInference(em); + + // Two terms with needle in the label, one term without needle in the label + final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); + final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); + + final List firstChanges = Generator.generateChangeRecords(firstTerm, author); + final List secondChanges = Generator.generateChangeRecords(secondTerm, author); + + // randomize changed attributes + final Random random = new Random(); + final AtomicInteger recordCount = new AtomicInteger(0); + final URI changedAttribute = URI.create(SKOS.DEFINITION); + final URI anotherChangedAttribute = URI.create(RDFS.LABEL); + final String changedAttributeName = "definition"; + + final Descriptor vocabularyDescriptor = persistDescriptor(vocabulary.getUri()); + + Stream.of(firstChanges, secondChanges).flatMap(Collection::stream) + .filter(r -> r instanceof UpdateChangeRecord) + .map(r -> (UpdateChangeRecord) r) + .forEach(r -> { + if(random.nextBoolean() || recordCount.get() == 0) { + r.setChangedAttribute(changedAttribute); + recordCount.incrementAndGet(); + } else { + r.setChangedAttribute(anotherChangedAttribute); + } + }); + + transactional(() -> { + em.persist(vocabulary); + + em.persist(firstTerm, vocabularyDescriptor); + em.persist(secondTerm, vocabularyDescriptor); + + Stream.of(firstChanges, secondChanges) + .flatMap(Collection::stream) + .forEach(r -> em.persist(r, vocabularyDescriptor)); + }); + + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); + filter.setChangedAttributeName(changedAttributeName); + + final Pageable pageable = Pageable.unpaged(); + + final List contentChanges = sut.findAllFiltered(vocabulary.getUri(), filter, Optional.empty(), Optional.of(URI.create(SKOS.CONCEPT)), pageable); + + assertEquals(recordCount.get(), contentChanges.size()); + final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); + final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); + final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); + assertEquals(0, persistCount); + assertEquals(recordCount.get(), updatesCount); + assertEquals(0, deleteCount); + } + + @Test + void findAllFilteredReturnsRecordsOfExistingTermFilteredByAuthorName() { + enableRdfsInference(em); + + // Two terms with needle in the label, one term without needle in the label + final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); + final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); + + final List firstChanges = Generator.generateChangeRecords(firstTerm, author); + final List secondChanges = Generator.generateChangeRecords(secondTerm, author); + + // make new author + final User anotherAuthor = Generator.generateUserWithId(); + anotherAuthor.setFirstName("Karel"); + anotherAuthor.setLastName("Novák"); + transactional(() -> em.persist(anotherAuthor)); + Environment.setCurrentUser(anotherAuthor); + + final int recordCount = 2; + // author is this.author (Environment current user) + firstChanges.add(Generator.generateUpdateChange(firstTerm)); + secondChanges.add(Generator.generateUpdateChange(secondTerm)); + + final Descriptor vocabularyDescriptor = persistDescriptor(vocabulary.getUri()); + + transactional(() -> { + em.persist(vocabulary); + + em.persist(firstTerm, vocabularyDescriptor); + em.persist(secondTerm, vocabularyDescriptor); + + Stream.of(firstChanges, secondChanges) + .flatMap(Collection::stream) + .forEach(r -> em.persist(r, vocabularyDescriptor)); + }); + + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); + // full name without first two and last two characters + filter.setAuthorName(anotherAuthor.getFullName().substring(2, anotherAuthor.getFullName().length() - 2)); + + final Pageable pageable = Pageable.unpaged(); + + final List contentChanges = sut.findAllFiltered(vocabulary.getUri(), filter, Optional.empty(), Optional.of(URI.create(SKOS.CONCEPT)), pageable); + + assertEquals(recordCount, contentChanges.size()); + final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); + final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); + final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); + assertEquals(0, persistCount); + assertEquals(recordCount, updatesCount); + assertEquals(0, deleteCount); + } + + @ParameterizedTest + @ValueSource(classes = { + UpdateChangeRecord.class, + PersistChangeRecord.class, + DeleteChangeRecord.class + }) + void findAllFilteredReturnsRecordsOfExistingTermFilteredByChangeType(Class typeClass) { + enableRdfsInference(em); + final URI typeUri = URI.create(typeClass.getAnnotation(OWLClass.class).iri()); + + // Two terms with needle in the label, one term without needle in the label + final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); + final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); + + final List firstChanges = Generator.generateChangeRecords(firstTerm, author); + final List secondChanges = Generator.generateChangeRecords(secondTerm, author); + final DeleteChangeRecord deleteChangeRecord = new DeleteChangeRecord(); + deleteChangeRecord.setChangedEntity(secondTerm.getUri()); + deleteChangeRecord.setTimestamp(Utils.timestamp()); + deleteChangeRecord.setAuthor(author); + deleteChangeRecord.setLabel(secondTerm.getLabel()); + + final int recordCount = (int) Stream.of(firstChanges, secondChanges, List.of(deleteChangeRecord)).flatMap(List::stream).filter(typeClass::isInstance).count(); + + final Descriptor vocabularyDescriptor = persistDescriptor(vocabulary.getUri()); + + transactional(() -> { + em.persist(vocabulary); + + em.persist(firstTerm, vocabularyDescriptor); + em.persist(secondTerm, vocabularyDescriptor); + + Stream.of(firstChanges, secondChanges, List.of(deleteChangeRecord)) + .flatMap(Collection::stream) + .forEach(r -> em.persist(r, vocabularyDescriptor)); + }); + + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); + // full name without first two and last two characters + filter.setChangeType(typeUri); + + final Pageable pageable = Pageable.unpaged(); + + final List contentChanges = sut.findAllFiltered(vocabulary.getUri(), filter, Optional.empty(), Optional.of(URI.create(SKOS.CONCEPT)), pageable); + + assertEquals(recordCount, contentChanges.size()); + assertTrue(contentChanges.stream().allMatch(typeClass::isInstance)); + } + } From e572355c86ae57130706219f45c015b39f434131 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Thu, 14 Nov 2024 21:10:35 +0100 Subject: [PATCH 26/49] [Enhancement kbss-cvut/termit-ui#520] Fix tests not saving change records to change context --- .../dao/changetracking/ChangeRecordDao.java | 62 +++++++++---------- .../changetracking/ChangeRecordDaoTest.java | 27 +++++--- .../repository/ChangeRecordServiceTest.java | 8 ++- 3 files changed, 55 insertions(+), 42 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java index 86376126b..4098d32b8 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java @@ -17,8 +17,8 @@ */ package cz.cvut.kbss.termit.persistence.dao.changetracking; +import cz.cvut.kbss.jopa.exceptions.NoResultException; import cz.cvut.kbss.jopa.model.EntityManager; -import cz.cvut.kbss.jopa.model.descriptors.Descriptor; import cz.cvut.kbss.jopa.model.descriptors.EntityDescriptor; import cz.cvut.kbss.jopa.model.query.TypedQuery; import cz.cvut.kbss.jopa.vocabulary.RDFS; @@ -73,12 +73,30 @@ public void persist(AbstractChangeRecord record, Asset changedAsset) { } } + /** + * Finds all change records related to the specified asset. + * + * @param asset the asset + * @return list of change records + */ + public List findAll(Asset asset) { + return findAll(asset, new ChangeRecordFilterDto()); + } + + /** + * + * @param asset + * @param filterDto + * @return + */ public List findAll(Asset asset, ChangeRecordFilterDto filterDto) { - if (filterDto.isEmpty()) { - // there is nothing to filter, simple query can be used - return findAll(asset); + URI changeTrackingContext = null; + try { + changeTrackingContext = contextResolver.resolveChangeTrackingContext(asset); + } catch (NoResultException e) { + return List.of(); } - return findAllFiltered(contextResolver.resolveChangeTrackingContext(asset), filterDto, Optional.of(asset), Optional.empty(), Pageable.unpaged()); + return findAllFiltered(changeTrackingContext, filterDto, Optional.of(asset), Optional.empty(), Pageable.unpaged()); } /** @@ -100,6 +118,7 @@ public List findAllFiltered(URI changeContext, ChangeRecor ?hasTime ?timestamp ; ?hasAuthor ?author . """ + /* Find an asset type if it is known (deleted assets does not have a type */ """ + BIND(?assetTypeValue as ?assetTypeVar) OPTIONAL { ?asset a ?assetType . OPTIONAL { @@ -108,7 +127,7 @@ public List findAllFiltered(URI changeContext, ChangeRecor } } """ + /* filter assets without a type (deleted) or with a matching type */ """ - FILTER(!BOUND(?assetType) || ?isAssetType) + FILTER(!BOUND(?assetTypeVar) || !BOUND(?assetType) || BOUND(?isAssetType)) """ + /* Get author's name */ """ ?author ?hasFirstName ?firstName ; ?hasLastName ?lastName . @@ -149,7 +168,7 @@ public List findAllFiltered(URI changeContext, ChangeRecor """, AbstractChangeRecord.class) .setParameter("changeContext", changeContext) .setParameter("subClassOf", URI.create(RDFS.SUB_CLASS_OF)) - .setParameter("changeType", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_c_zmena)) + .setParameter("changeType", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_c_zmena)) .setParameter("hasChangedEntity", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_zmenenou_entitu)) .setParameter("hasTime", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_datum_a_cas_modifikace)) .setParameter("hasAuthor", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_editora)) // record has author @@ -184,6 +203,8 @@ public List findAllFiltered(URI changeContext, ChangeRecor query = query.setParameter("attributeNameValue", filter.getChangedAttributeName().trim()); } + query = query.setDescriptor(new EntityDescriptor().anyLanguage()); + if(pageable.isUnpaged()) { return query.getResultList(); } @@ -192,33 +213,6 @@ public List findAllFiltered(URI changeContext, ChangeRecor .setMaxResults(pageable.getPageSize()).getResultList(); } - /** - * Finds all change records to the specified asset. - * - * @param asset The changed asset - * @return List of change records ordered by timestamp (descending) - */ - public List findAll(Asset asset) { - Objects.requireNonNull(asset); - try { - final Descriptor descriptor = new EntityDescriptor(); - descriptor.setLanguage(null); - return em.createNativeQuery("SELECT ?r WHERE {" + - "?r a ?changeRecord ;" + - "?relatesTo ?asset ;" + - "?hasTime ?timestamp ." + - "OPTIONAL { ?r ?hasChangedAttribute ?attribute . }" + - "} ORDER BY DESC(?timestamp) ?attribute", AbstractChangeRecord.class) - .setParameter("changeRecord", URI.create(Vocabulary.s_c_zmena)) - .setParameter("relatesTo", URI.create(Vocabulary.s_p_ma_zmenenou_entitu)) - .setParameter("hasChangedAttribute", URI.create(Vocabulary.s_p_ma_zmeneny_atribut)) - .setParameter("hasTime", URI.create(Vocabulary.s_p_ma_datum_a_cas_modifikace)) - .setParameter("asset", asset.getUri()).setDescriptor(descriptor).getResultList(); - } catch (RuntimeException e) { - throw new PersistenceException(e); - } - } - /** * Gets a set of authors of the specified asset. That is, this method retrieves authors of persist change records * associated with the specified asset. diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java index fec0da69b..9fdf39319 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java @@ -125,11 +125,16 @@ private UpdateChangeRecord generateUpdateRecord(Instant timestamp, URI changedOb @Test void findAllRetrievesChangeRecordsRelatedToSpecifiedAsset() { enableRdfsInference(em); - final Term asset = Generator.generateTermWithId(); + final Term asset = Generator.generateTermWithId(vocabulary.getUri()); + transactional(() -> { + em.persist(vocabulary); + em.persist(asset, persistDescriptor(vocabulary.getUri())); + }); final List records = IntStream.range(0, 5).mapToObj( i -> generateUpdateRecord(Instant.ofEpochMilli(System.currentTimeMillis() - i * 10000L), asset.getUri())).collect(Collectors.toList()); - transactional(() -> records.forEach(r -> em.persist(r, persistDescriptor(vocabulary.getUri())))); + final URI changeContext = contextResolver.resolveChangeTrackingContext(vocabulary); + transactional(() -> records.forEach(r -> em.persist(r, persistDescriptor(changeContext)))); final List result = sut.findAll(asset); assertEquals(records.size(), result.size()); @@ -146,11 +151,16 @@ private Descriptor persistDescriptor(URI context) { @Test void findAllReturnsChangeRecordsOrderedByTimestampDescending() { enableRdfsInference(em); - final Term asset = Generator.generateTermWithId(); + final Term asset = Generator.generateTermWithId(vocabulary.getUri()); + transactional(() -> { + em.persist(vocabulary); + em.persist(asset, persistDescriptor(vocabulary.getUri())); + }); final List records = IntStream.range(0, 5).mapToObj( i -> generateUpdateRecord(Instant.ofEpochMilli(System.currentTimeMillis() + i * 10000L), asset.getUri())).collect(Collectors.toList()); - transactional(() -> records.forEach(r -> em.persist(r, persistDescriptor(vocabulary.getUri())))); + final URI changeContext = contextResolver.resolveChangeTrackingContext(vocabulary); + transactional(() -> records.forEach(r -> em.persist(r, persistDescriptor(changeContext)))); final List result = sut.findAll(asset); records.sort(Comparator.comparing(AbstractChangeRecord::getTimestamp).reversed()); @@ -160,15 +170,18 @@ void findAllReturnsChangeRecordsOrderedByTimestampDescending() { @Test void findAllReturnsChangeRecordsOrderedByTimestampDescendingAndChangedAttributeId() { enableRdfsInference(em); - final Term asset = Generator.generateTermWithId(); + final Term asset = Generator.generateTermWithId(vocabulary.getUri()); final Instant now = Utils.timestamp(); final UpdateChangeRecord rOne = generateUpdateRecord(now, asset.getUri()); rOne.setChangedAttribute(URI.create(SKOS.PREF_LABEL)); final UpdateChangeRecord rTwo = generateUpdateRecord(now, asset.getUri()); rTwo.setChangedAttribute(URI.create(SKOS.DEFINITION)); + final Descriptor changeContextDescriptor = persistDescriptor(contextResolver.resolveChangeTrackingContext(vocabulary)); transactional(() -> { - em.persist(rOne, persistDescriptor(vocabulary.getUri())); - em.persist(rTwo, persistDescriptor(vocabulary.getUri())); + em.persist(vocabulary); + em.persist(asset, persistDescriptor(vocabulary.getUri())); + em.persist(rOne, changeContextDescriptor); + em.persist(rTwo, changeContextDescriptor); }); final List result = sut.findAll(asset); diff --git a/src/test/java/cz/cvut/kbss/termit/service/repository/ChangeRecordServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/repository/ChangeRecordServiceTest.java index 3d2b135a5..24b1f313f 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/repository/ChangeRecordServiceTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/repository/ChangeRecordServiceTest.java @@ -26,6 +26,7 @@ import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; import cz.cvut.kbss.termit.model.changetracking.UpdateChangeRecord; import cz.cvut.kbss.termit.persistence.context.DescriptorFactory; +import cz.cvut.kbss.termit.persistence.dao.changetracking.ChangeRecordDao; import cz.cvut.kbss.termit.service.BaseServiceTestRunner; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -51,6 +52,9 @@ class ChangeRecordServiceTest extends BaseServiceTestRunner { @Autowired private ChangeRecordService sut; + @Autowired + private ChangeRecordDao dao; + private User author; private Vocabulary asset; @@ -84,7 +88,9 @@ private List generateChanges() { r.setTimestamp(Instant.ofEpochMilli(System.currentTimeMillis() - i * 10000L)); return r; }).collect(Collectors.toList()); - transactional(() -> records.forEach(em::persist)); + transactional(() -> { + records.forEach(r -> dao.persist(r, asset)); + }); return records; } } From a657a982612fbd7d953d5c214fbe91280b654b1f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Thu, 14 Nov 2024 21:40:13 +0100 Subject: [PATCH 27/49] [Enhancement kbss-cvut/termit-ui#520] Refactor ChangeRecordDao public interface and add documentation. --- .../termit/persistence/dao/VocabularyDao.java | 13 +----- .../dao/changetracking/ChangeRecordDao.java | 44 ++++++++++++++----- .../persistence/dao/VocabularyDaoTest.java | 9 ++-- .../changetracking/ChangeRecordDaoTest.java | 31 ++++++++----- 4 files changed, 58 insertions(+), 39 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java index cba546691..c8c11eff5 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java @@ -44,7 +44,6 @@ import cz.cvut.kbss.termit.persistence.context.DescriptorFactory; import cz.cvut.kbss.termit.persistence.context.VocabularyContextMapper; import cz.cvut.kbss.termit.persistence.dao.changetracking.ChangeRecordDao; -import cz.cvut.kbss.termit.persistence.dao.changetracking.ChangeTrackingContextResolver; import cz.cvut.kbss.termit.persistence.snapshot.AssetSnapshotLoader; import cz.cvut.kbss.termit.persistence.validation.VocabularyContentValidator; import cz.cvut.kbss.termit.service.snapshot.SnapshotProvider; @@ -90,7 +89,6 @@ public class VocabularyDao extends BaseAssetDao "} GROUP BY ?date HAVING (?cnt > 0) ORDER BY ?date"; private static final String REMOVE_GLOSSARY_TERMS_QUERY_FILE = "remove/removeGlossaryTerms.ru"; - private final ChangeTrackingContextResolver changeTrackingContextResolver; private final ChangeRecordDao changeRecordDao; private volatile long lastModified; @@ -102,12 +100,11 @@ public class VocabularyDao extends BaseAssetDao @Autowired public VocabularyDao(EntityManager em, Configuration config, DescriptorFactory descriptorFactory, VocabularyContextMapper contextMapper, ApplicationContext context, - ChangeTrackingContextResolver changeTrackingContextResolver, ChangeRecordDao changeRecordDao) { + ChangeRecordDao changeRecordDao) { super(Vocabulary.class, em, config.getPersistence(), descriptorFactory); this.contextMapper = contextMapper; refreshLastModified(); this.context = context; - this.changeTrackingContextResolver = changeTrackingContextResolver; this.changeRecordDao = changeRecordDao; } @@ -411,13 +408,7 @@ public List getChangesOfContent(Vocabulary vocabulary) { */ public List getDetailedHistoryOfContent(Vocabulary vocabulary, ChangeRecordFilterDto filter, Pageable pageReq) { Objects.requireNonNull(vocabulary); - return changeRecordDao.findAllFiltered( - changeTrackingContextResolver.resolveChangeTrackingContext(vocabulary), - filter, - Optional.empty(), - Optional.of(URI.create(SKOS.CONCEPT)), // term - pageReq - ); + return changeRecordDao.findAllRelatedToType(vocabulary, filter, URI.create(SKOS.CONCEPT), pageReq); } private Query createContentChangesQuery(Vocabulary vocabulary) { diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java index 4098d32b8..450c57164 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java @@ -83,29 +83,49 @@ public List findAll(Asset asset) { return findAll(asset, new ChangeRecordFilterDto()); } + private Optional resolveChangeTrackingContext(Asset asset) { + try { + return Optional.of(contextResolver.resolveChangeTrackingContext(asset)); + } catch (NoResultException e) { + return Optional.empty(); + } + } + /** + * Finds all change records related to the specified asset matching the filter. * - * @param asset - * @param filterDto - * @return + * @param asset the asset + * @param filterDto filter parameters */ public List findAll(Asset asset, ChangeRecordFilterDto filterDto) { - URI changeTrackingContext = null; - try { - changeTrackingContext = contextResolver.resolveChangeTrackingContext(asset); - } catch (NoResultException e) { - return List.of(); - } - return findAllFiltered(changeTrackingContext, filterDto, Optional.of(asset), Optional.empty(), Pageable.unpaged()); + return resolveChangeTrackingContext(asset).map(context -> + findAllFiltered(context, filterDto, Optional.of(asset), Optional.empty(), Pageable.unpaged())) + .orElseGet(List::of); } /** + * Finds all records from change context resolved from {@code changeContextAsset} + * that are matching the filter and are related to an entity of the type {@code relatedEntityType}. + */ + public List findAllRelatedToType(Asset changeContextAsset, ChangeRecordFilterDto filterDto, URI relatedEntityType, Pageable pageable) { + return resolveChangeTrackingContext(changeContextAsset).map(context -> + findAllFiltered(context, + filterDto, + Optional.empty(), + Optional.ofNullable(relatedEntityType), + pageable + )).orElseGet(List::of); + } + + /** + * Finds all change records matching the filter. + * * @param changeContext the context of change records * @param filter filter parameters * @param asset if present, only changes of the asset will be returned - * @param assetType if present, only changes related to this asset type will be returned. + * @param assetType if present, only changes related to an asset of this type will be returned. */ - public List findAllFiltered(URI changeContext, ChangeRecordFilterDto filter, Optional> asset, Optional assetType, Pageable pageable) { + private List findAllFiltered(URI changeContext, ChangeRecordFilterDto filter, Optional> asset, Optional assetType, Pageable pageable) { TypedQuery query = em.createNativeQuery(""" SELECT DISTINCT ?record WHERE { """ + /* Select anything from change context */ """ diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java index 18cddcaae..1656f3b9d 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java @@ -115,9 +115,6 @@ class VocabularyDaoTest extends BaseDaoTestRunner { @Autowired private VocabularyDao sut; - @Autowired - private TermDao termDao; - @SpyBean private ChangeRecordDao changeRecordDao; @@ -952,17 +949,17 @@ void getAnyExternalRelationsReturnsTermsWithBothRelations(URI termRelation) { void getDetailedHistoryOfContentCallsChangeRecordDaoWithFilter() { final Vocabulary vocabulary = Generator.generateVocabularyWithId(); final List records = List.of(); - final Optional skosConcept = Optional.of(URI.create(SKOS.CONCEPT)); + final URI skosConcept = URI.create(SKOS.CONCEPT); final Pageable unpaged = Pageable.unpaged(); final ChangeRecordFilterDto filterDto = new ChangeRecordFilterDto(); filterDto.setAuthorName("Name of the author"); doReturn(vocabulary.getUri()).when(changeTrackingContextResolver).resolveChangeTrackingContext(vocabulary); - doReturn(records).when(changeRecordDao).findAllFiltered(vocabulary.getUri(), filterDto, Optional.empty(), skosConcept, unpaged); + doReturn(records).when(changeRecordDao).findAllRelatedToType(vocabulary, filterDto, skosConcept, unpaged); sut.getDetailedHistoryOfContent(vocabulary, filterDto, unpaged); verify(changeTrackingContextResolver).resolveChangeTrackingContext(vocabulary); - verify(changeRecordDao).findAllFiltered(vocabulary.getUri(), filterDto, Optional.empty(), skosConcept, unpaged); + verify(changeRecordDao).findAllRelatedToType(vocabulary, filterDto, skosConcept, unpaged); } } diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java index 9fdf39319..2ca822ed8 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java @@ -71,6 +71,7 @@ @DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_EACH_TEST_METHOD) class ChangeRecordDaoTest extends BaseDaoTestRunner { + private static final URI SKOS_CONCEPT = URI.create(SKOS.CONCEPT); @Autowired private ChangeTrackingContextResolver contextResolver; @@ -286,6 +287,11 @@ void getAuthorsRetrievesUsersAssociatedWithPersistChangeRecordsOfSpecifiedAsset( assertEquals(Collections.singleton(author), result); } + @Test + void voidFindAllReturnsChangeRecordsWithoutVocabularyChanges() { + + } + @Test void findAllFilteredReturnsRecordsOfExistingTermFilteredByTermName() { enableRdfsInference(em); @@ -306,6 +312,7 @@ void findAllFilteredReturnsRecordsOfExistingTermFilteredByTermName() { final List secondChanges = Generator.generateChangeRecords(secondTerm, author); final List thirdChanges = Generator.generateChangeRecords(thirdTerm, author); + final Descriptor changeContextDescriptor = persistDescriptor(contextResolver.resolveChangeTrackingContext(vocabulary)); final Descriptor vocabularyDescriptor = persistDescriptor(vocabulary.getUri()); transactional(() -> { @@ -319,7 +326,7 @@ void findAllFilteredReturnsRecordsOfExistingTermFilteredByTermName() { Stream.of(firstChanges, secondChanges, thirdChanges) .flatMap(Collection::stream) - .forEach(r -> em.persist(r, vocabularyDescriptor)); + .forEach(r -> em.persist(r, changeContextDescriptor)); }); final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); @@ -328,7 +335,7 @@ void findAllFilteredReturnsRecordsOfExistingTermFilteredByTermName() { final int recordsCount = firstChanges.size() + secondChanges.size(); final Pageable pageable = Pageable.ofSize(recordsCount * 2); - final List contentChanges = sut.findAllFiltered(vocabulary.getUri(), filter, Optional.empty(), Optional.of(URI.create(SKOS.CONCEPT)), pageable); + final List contentChanges = sut.findAllRelatedToType(vocabulary, filter, SKOS_CONCEPT, pageable); assertEquals(recordsCount, contentChanges.size()); final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); @@ -364,6 +371,7 @@ void findAllFilteredReturnsRecordsOfDeletedTermFilteredByTermName() { deleteChangeRecord.setAuthor(author); deleteChangeRecord.setLabel(termToRemove.getLabel()); + final Descriptor changeContextDescriptor = persistDescriptor(contextResolver.resolveChangeTrackingContext(vocabulary)); final Descriptor vocabularyDescriptor = persistDescriptor(vocabulary.getUri()); transactional(() -> { @@ -374,7 +382,7 @@ void findAllFilteredReturnsRecordsOfDeletedTermFilteredByTermName() { Stream.of(firstChanges, termToRemoveChanges, List.of(deleteChangeRecord)) .flatMap(Collection::stream) - .forEach(r -> em.persist(r, vocabularyDescriptor)); + .forEach(r -> em.persist(r, changeContextDescriptor)); }); final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); @@ -383,7 +391,7 @@ void findAllFilteredReturnsRecordsOfDeletedTermFilteredByTermName() { final int recordsCount = termToRemoveChanges.size() + 1; // +1 for the delete record final Pageable pageable = Pageable.unpaged(); - final List contentChanges = sut.findAllFiltered(vocabulary.getUri(), filter, Optional.empty(), Optional.of(URI.create(SKOS.CONCEPT)), pageable); + final List contentChanges = sut.findAllRelatedToType(vocabulary, filter, SKOS_CONCEPT, pageable); assertEquals(recordsCount, contentChanges.size()); final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); @@ -413,6 +421,7 @@ void findAllFilteredReturnsRecordsOfExistingTermFilteredByChangedAttributeName() final URI anotherChangedAttribute = URI.create(RDFS.LABEL); final String changedAttributeName = "definition"; + final Descriptor changeContextDescriptor = persistDescriptor(contextResolver.resolveChangeTrackingContext(vocabulary)); final Descriptor vocabularyDescriptor = persistDescriptor(vocabulary.getUri()); Stream.of(firstChanges, secondChanges).flatMap(Collection::stream) @@ -435,7 +444,7 @@ void findAllFilteredReturnsRecordsOfExistingTermFilteredByChangedAttributeName() Stream.of(firstChanges, secondChanges) .flatMap(Collection::stream) - .forEach(r -> em.persist(r, vocabularyDescriptor)); + .forEach(r -> em.persist(r, changeContextDescriptor)); }); final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); @@ -443,7 +452,7 @@ void findAllFilteredReturnsRecordsOfExistingTermFilteredByChangedAttributeName() final Pageable pageable = Pageable.unpaged(); - final List contentChanges = sut.findAllFiltered(vocabulary.getUri(), filter, Optional.empty(), Optional.of(URI.create(SKOS.CONCEPT)), pageable); + final List contentChanges = sut.findAllRelatedToType(vocabulary, filter, SKOS_CONCEPT, pageable); assertEquals(recordCount.get(), contentChanges.size()); final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); @@ -477,6 +486,7 @@ void findAllFilteredReturnsRecordsOfExistingTermFilteredByAuthorName() { firstChanges.add(Generator.generateUpdateChange(firstTerm)); secondChanges.add(Generator.generateUpdateChange(secondTerm)); + final Descriptor changeContextDescriptor = persistDescriptor(contextResolver.resolveChangeTrackingContext(vocabulary)); final Descriptor vocabularyDescriptor = persistDescriptor(vocabulary.getUri()); transactional(() -> { @@ -487,7 +497,7 @@ void findAllFilteredReturnsRecordsOfExistingTermFilteredByAuthorName() { Stream.of(firstChanges, secondChanges) .flatMap(Collection::stream) - .forEach(r -> em.persist(r, vocabularyDescriptor)); + .forEach(r -> em.persist(r, changeContextDescriptor)); }); final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); @@ -496,7 +506,7 @@ void findAllFilteredReturnsRecordsOfExistingTermFilteredByAuthorName() { final Pageable pageable = Pageable.unpaged(); - final List contentChanges = sut.findAllFiltered(vocabulary.getUri(), filter, Optional.empty(), Optional.of(URI.create(SKOS.CONCEPT)), pageable); + final List contentChanges = sut.findAllRelatedToType(vocabulary, filter, SKOS_CONCEPT, pageable); assertEquals(recordCount, contentChanges.size()); final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); @@ -531,6 +541,7 @@ void findAllFilteredReturnsRecordsOfExistingTermFilteredByChangeType(Class { @@ -541,7 +552,7 @@ void findAllFilteredReturnsRecordsOfExistingTermFilteredByChangeType(Class em.persist(r, vocabularyDescriptor)); + .forEach(r -> em.persist(r, changeContextDescriptor)); }); final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); @@ -550,7 +561,7 @@ void findAllFilteredReturnsRecordsOfExistingTermFilteredByChangeType(Class contentChanges = sut.findAllFiltered(vocabulary.getUri(), filter, Optional.empty(), Optional.of(URI.create(SKOS.CONCEPT)), pageable); + final List contentChanges = sut.findAllRelatedToType(vocabulary, filter, SKOS_CONCEPT, pageable); assertEquals(recordCount, contentChanges.size()); assertTrue(contentChanges.stream().allMatch(typeClass::isInstance)); From 9c04678f3db5282ce8021e14ebaaa04f4fc32640 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Thu, 14 Nov 2024 22:23:57 +0100 Subject: [PATCH 28/49] [Enhancement kbss-cvut/termit-ui#520] ChangeRecordDaoTest cleanup --- .../persistence/dao/VocabularyDaoTest.java | 6 -- .../changetracking/ChangeRecordDaoTest.java | 87 ++++++++----------- 2 files changed, 34 insertions(+), 59 deletions(-) diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java index 1656f3b9d..46ad8b041 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java @@ -46,7 +46,6 @@ import cz.cvut.kbss.termit.model.util.EntityToOwlClassMapper; import cz.cvut.kbss.termit.persistence.context.DescriptorFactory; import cz.cvut.kbss.termit.persistence.dao.changetracking.ChangeRecordDao; -import cz.cvut.kbss.termit.persistence.dao.changetracking.ChangeTrackingContextResolver; import cz.cvut.kbss.termit.util.Constants; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.ValueFactory; @@ -118,9 +117,6 @@ class VocabularyDaoTest extends BaseDaoTestRunner { @SpyBean private ChangeRecordDao changeRecordDao; - @SpyBean - private ChangeTrackingContextResolver changeTrackingContextResolver; - private User author; @BeforeEach @@ -954,12 +950,10 @@ void getDetailedHistoryOfContentCallsChangeRecordDaoWithFilter() { final ChangeRecordFilterDto filterDto = new ChangeRecordFilterDto(); filterDto.setAuthorName("Name of the author"); - doReturn(vocabulary.getUri()).when(changeTrackingContextResolver).resolveChangeTrackingContext(vocabulary); doReturn(records).when(changeRecordDao).findAllRelatedToType(vocabulary, filterDto, skosConcept, unpaged); sut.getDetailedHistoryOfContent(vocabulary, filterDto, unpaged); - verify(changeTrackingContextResolver).resolveChangeTrackingContext(vocabulary); verify(changeRecordDao).findAllRelatedToType(vocabulary, filterDto, skosConcept, unpaged); } } diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java index 2ca822ed8..ee45f8e9d 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java @@ -288,52 +288,34 @@ void getAuthorsRetrievesUsersAssociatedWithPersistChangeRecordsOfSpecifiedAsset( } @Test - void voidFindAllReturnsChangeRecordsWithoutVocabularyChanges() { - - } - - @Test - void findAllFilteredReturnsRecordsOfExistingTermFilteredByTermName() { + void findAllRelatedToTypeReturnsChangeRecordsWithoutVocabularyChanges() { enableRdfsInference(em); - final String needle = "needle"; - final String haystack = "A label that contains needle somewhere"; - final String mud = "The n3edle is not here"; - - // Two terms with needle in the label, one term without needle in the label final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); - firstTerm.getLabel().set(Environment.LANGUAGE, haystack); final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); - secondTerm.getLabel().set(mud + needle); - final Term thirdTerm = Generator.generateTermWithId(vocabulary.getUri()); - thirdTerm.getLabel().set(Environment.LANGUAGE, mud); final List firstChanges = Generator.generateChangeRecords(firstTerm, author); final List secondChanges = Generator.generateChangeRecords(secondTerm, author); - final List thirdChanges = Generator.generateChangeRecords(thirdTerm, author); + + final List vocabularyChanges = Generator.generateChangeRecords(vocabulary, author); final Descriptor changeContextDescriptor = persistDescriptor(contextResolver.resolveChangeTrackingContext(vocabulary)); final Descriptor vocabularyDescriptor = persistDescriptor(vocabulary.getUri()); transactional(() -> { - vocabulary.getGlossary().addRootTerm(firstTerm); em.persist(vocabulary, vocabularyDescriptor); - Environment.addRelation(vocabulary.getUri(), URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_glosar), vocabulary.getGlossary().getUri(), em); - em.persist(firstTerm, vocabularyDescriptor); em.persist(secondTerm, vocabularyDescriptor); - em.persist(thirdTerm, vocabularyDescriptor); - Stream.of(firstChanges, secondChanges, thirdChanges) + Stream.of(firstChanges, secondChanges, vocabularyChanges) .flatMap(Collection::stream) .forEach(r -> em.persist(r, changeContextDescriptor)); }); final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); - filter.setAssetLabel(needle); final int recordsCount = firstChanges.size() + secondChanges.size(); - final Pageable pageable = Pageable.ofSize(recordsCount * 2); + final Pageable pageable = Pageable.unpaged(); final List contentChanges = sut.findAllRelatedToType(vocabulary, filter, SKOS_CONCEPT, pageable); @@ -341,46 +323,46 @@ void findAllFilteredReturnsRecordsOfExistingTermFilteredByTermName() { final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); + // check that all changes are related to the first or the second term + assertTrue(contentChanges.stream() + .allMatch(ch -> firstTerm.getUri().equals(ch.getChangedEntity()) || + secondTerm.getUri().equals(ch.getChangedEntity()))); assertEquals(2, persistCount); assertEquals(recordsCount - 2, updatesCount); // -2 persist records assertEquals(0, deleteCount); } - @Test - void findAllFilteredReturnsRecordsOfDeletedTermFilteredByTermName() { + void findAllRelatedToTypeReturnsRecordsOfExistingTermFilteredByTermName() { enableRdfsInference(em); final String needle = "needle"; final String haystack = "A label that contains needle somewhere"; final String mud = "The n3edle is not here"; - + + // needle is inside the label of first and the second term final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); - // the needle is placed in the term which will be removed - firstTerm.getLabel().set(Environment.LANGUAGE, mud); - firstTerm.setVocabulary(vocabulary.getUri()); - final Term termToRemove = Generator.generateTermWithId(vocabulary.getUri()); - termToRemove.getLabel().set(Environment.LANGUAGE, haystack); - termToRemove.setVocabulary(vocabulary.getUri()); + firstTerm.getLabel().set(Environment.LANGUAGE, haystack); + final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); + secondTerm.getLabel().set(mud + needle); + final Term thirdTerm = Generator.generateTermWithId(vocabulary.getUri()); + thirdTerm.getLabel().set(Environment.LANGUAGE, mud); final List firstChanges = Generator.generateChangeRecords(firstTerm, author); - final List termToRemoveChanges = Generator.generateChangeRecords(termToRemove, author); - final DeleteChangeRecord deleteChangeRecord = new DeleteChangeRecord(); - deleteChangeRecord.setChangedEntity(termToRemove.getUri()); - deleteChangeRecord.setTimestamp(Utils.timestamp()); - deleteChangeRecord.setAuthor(author); - deleteChangeRecord.setLabel(termToRemove.getLabel()); + final List secondChanges = Generator.generateChangeRecords(secondTerm, author); + final List thirdChanges = Generator.generateChangeRecords(thirdTerm, author); final Descriptor changeContextDescriptor = persistDescriptor(contextResolver.resolveChangeTrackingContext(vocabulary)); final Descriptor vocabularyDescriptor = persistDescriptor(vocabulary.getUri()); transactional(() -> { - em.persist(vocabulary); + em.persist(vocabulary, vocabularyDescriptor); em.persist(firstTerm, vocabularyDescriptor); - em.persist(termToRemove, vocabularyDescriptor); + em.persist(secondTerm, vocabularyDescriptor); + em.persist(thirdTerm, vocabularyDescriptor); - Stream.of(firstChanges, termToRemoveChanges, List.of(deleteChangeRecord)) + Stream.of(firstChanges, secondChanges, thirdChanges) .flatMap(Collection::stream) .forEach(r -> em.persist(r, changeContextDescriptor)); }); @@ -388,8 +370,9 @@ void findAllFilteredReturnsRecordsOfDeletedTermFilteredByTermName() { final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); filter.setAssetLabel(needle); - final int recordsCount = termToRemoveChanges.size() + 1; // +1 for the delete record - final Pageable pageable = Pageable.unpaged(); + // needle is inside the label of first and the second term + final int recordsCount = firstChanges.size() + secondChanges.size(); + final Pageable pageable = Pageable.ofSize(recordsCount * 2); final List contentChanges = sut.findAllRelatedToType(vocabulary, filter, SKOS_CONCEPT, pageable); @@ -397,24 +380,22 @@ void findAllFilteredReturnsRecordsOfDeletedTermFilteredByTermName() { final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); - assertEquals(1, persistCount); - assertEquals(recordsCount - 2, updatesCount); // -1 persist record -1 delete record - assertEquals(1, deleteCount); + assertEquals(2, persistCount); + assertEquals(recordsCount - 2, updatesCount); // -2 persist records + assertEquals(0, deleteCount); } @Test - void findAllFilteredReturnsRecordsOfExistingTermFilteredByChangedAttributeName() { + void findAllRelatedToTypeReturnsRecordsOfExistingTermFilteredByChangedAttributeName() { enableRdfsInference(em); - // Two terms with needle in the label, one term without needle in the label final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); final List firstChanges = Generator.generateChangeRecords(firstTerm, author); final List secondChanges = Generator.generateChangeRecords(secondTerm, author); - // randomize changed attributes final Random random = new Random(); final AtomicInteger recordCount = new AtomicInteger(0); final URI changedAttribute = URI.create(SKOS.DEFINITION); @@ -424,10 +405,12 @@ void findAllFilteredReturnsRecordsOfExistingTermFilteredByChangedAttributeName() final Descriptor changeContextDescriptor = persistDescriptor(contextResolver.resolveChangeTrackingContext(vocabulary)); final Descriptor vocabularyDescriptor = persistDescriptor(vocabulary.getUri()); + // randomize changed attributes Stream.of(firstChanges, secondChanges).flatMap(Collection::stream) .filter(r -> r instanceof UpdateChangeRecord) .map(r -> (UpdateChangeRecord) r) .forEach(r -> { + // ensuring at least one has the "changedAttribute" if(random.nextBoolean() || recordCount.get() == 0) { r.setChangedAttribute(changedAttribute); recordCount.incrementAndGet(); @@ -464,10 +447,9 @@ void findAllFilteredReturnsRecordsOfExistingTermFilteredByChangedAttributeName() } @Test - void findAllFilteredReturnsRecordsOfExistingTermFilteredByAuthorName() { + void findAllRelatedToTypeReturnsRecordsOfExistingTermFilteredByAuthorName() { enableRdfsInference(em); - // Two terms with needle in the label, one term without needle in the label final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); @@ -523,11 +505,10 @@ void findAllFilteredReturnsRecordsOfExistingTermFilteredByAuthorName() { PersistChangeRecord.class, DeleteChangeRecord.class }) - void findAllFilteredReturnsRecordsOfExistingTermFilteredByChangeType(Class typeClass) { + void findAllRelatedToTypeReturnsRecordsOfExistingTermFilteredByChangeType(Class typeClass) { enableRdfsInference(em); final URI typeUri = URI.create(typeClass.getAnnotation(OWLClass.class).iri()); - // Two terms with needle in the label, one term without needle in the label final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); From de1e76a96c8e70e1c7a04a669286220caca521f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Fri, 15 Nov 2024 10:52:47 +0100 Subject: [PATCH 29/49] [Ref] Cleanup changes for PR --- .../dao/changetracking/ChangeRecordDao.java | 4 ++-- .../dao/changetracking/ChangeRecordDaoTest.java | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java index 450c57164..7179cbd8d 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java @@ -76,8 +76,8 @@ public void persist(AbstractChangeRecord record, Asset changedAsset) { /** * Finds all change records related to the specified asset. * - * @param asset the asset - * @return list of change records + * @param asset The changed asset + * @return List of change records ordered by timestamp (descending) */ public List findAll(Asset asset) { return findAll(asset, new ChangeRecordFilterDto()); diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java index ee45f8e9d..4fdee2b5f 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java @@ -153,15 +153,15 @@ private Descriptor persistDescriptor(URI context) { void findAllReturnsChangeRecordsOrderedByTimestampDescending() { enableRdfsInference(em); final Term asset = Generator.generateTermWithId(vocabulary.getUri()); - transactional(() -> { - em.persist(vocabulary); - em.persist(asset, persistDescriptor(vocabulary.getUri())); - }); final List records = IntStream.range(0, 5).mapToObj( i -> generateUpdateRecord(Instant.ofEpochMilli(System.currentTimeMillis() + i * 10000L), asset.getUri())).collect(Collectors.toList()); final URI changeContext = contextResolver.resolveChangeTrackingContext(vocabulary); - transactional(() -> records.forEach(r -> em.persist(r, persistDescriptor(changeContext)))); + transactional(() -> { + em.persist(vocabulary); + em.persist(asset, persistDescriptor(vocabulary.getUri())); + records.forEach(r -> em.persist(r, persistDescriptor(changeContext))); + }); final List result = sut.findAll(asset); records.sort(Comparator.comparing(AbstractChangeRecord::getTimestamp).reversed()); From f44f7b08f1f2ac4f6922b74797f547845c76a3ea Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Tue, 19 Nov 2024 10:03:45 +0100 Subject: [PATCH 30/49] [kbss-cvut/termit-ui#553] Check if text analysis service supports file language before annotation. --- ...upportedTextAnalysisLanguageException.java | 14 ++++ .../business/AccessControlListService.java | 2 +- .../service/business/ResourceService.java | 8 ++ .../service/document/TextAnalysisService.java | 77 +++++++++++++++++-- .../cvut/kbss/termit/util/Configuration.java | 13 ++++ .../handler/WebSocketExceptionHandler.java | 16 +++- src/main/resources/application.yml | 1 + .../service/business/ResourceServiceTest.java | 18 +++++ .../document/TextAnalysisServiceTest.java | 54 ++++++++++++- .../cvut/kbss/termit/util/VocabularyTest.java | 1 - src/test/resources/application.yml | 3 +- 11 files changed, 195 insertions(+), 12 deletions(-) create mode 100644 src/main/java/cz/cvut/kbss/termit/exception/UnsupportedTextAnalysisLanguageException.java diff --git a/src/main/java/cz/cvut/kbss/termit/exception/UnsupportedTextAnalysisLanguageException.java b/src/main/java/cz/cvut/kbss/termit/exception/UnsupportedTextAnalysisLanguageException.java new file mode 100644 index 000000000..3ddb95c60 --- /dev/null +++ b/src/main/java/cz/cvut/kbss/termit/exception/UnsupportedTextAnalysisLanguageException.java @@ -0,0 +1,14 @@ +package cz.cvut.kbss.termit.exception; + +import cz.cvut.kbss.termit.model.Asset; +import cz.cvut.kbss.termit.model.resource.File; + +/** + * Indicates that a language is not supported by the text analysis service. + */ +public class UnsupportedTextAnalysisLanguageException extends TermItException { + + public UnsupportedTextAnalysisLanguageException(String message, Asset asset) { + super(message, asset instanceof File ? "error.annotation.file.unsupportedLanguage" : "error.annotation.term.unsupportedLanguage"); + } +} diff --git a/src/main/java/cz/cvut/kbss/termit/service/business/AccessControlListService.java b/src/main/java/cz/cvut/kbss/termit/service/business/AccessControlListService.java index 4b6cdc889..c2b5772af 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/business/AccessControlListService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/business/AccessControlListService.java @@ -32,7 +32,7 @@ /** * Service for managing {@link AccessControlList}s (ACLs). *

- * Note that only management of ACLs is supported by this service. Access control itself is handled by TODO. + * Note that only management of ACLs is supported by this service. Access control itself is handled by {@link cz.cvut.kbss.termit.service.security.authorization.acl.AccessControlListBasedAuthorizationService}. */ public interface AccessControlListService { diff --git a/src/main/java/cz/cvut/kbss/termit/service/business/ResourceService.java b/src/main/java/cz/cvut/kbss/termit/service/business/ResourceService.java index 08aee833d..0156a8738 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/business/ResourceService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/business/ResourceService.java @@ -24,6 +24,7 @@ import cz.cvut.kbss.termit.exception.InvalidParameterException; import cz.cvut.kbss.termit.exception.NotFoundException; import cz.cvut.kbss.termit.exception.UnsupportedAssetOperationException; +import cz.cvut.kbss.termit.exception.UnsupportedTextAnalysisLanguageException; import cz.cvut.kbss.termit.model.TextAnalysisRecord; import cz.cvut.kbss.termit.model.Vocabulary; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; @@ -300,6 +301,7 @@ public void runTextAnalysis(Resource resource, Set vocabularies) { verifyFileOperationPossible(resource, "Text analysis"); LOG.trace("Invoking text analysis on resource {}.", resource); final File file = (File) resource; + verifyLanguageSupported(file); if (vocabularies.isEmpty()) { if (file.getDocument() == null || file.getDocument().getVocabulary() == null) { throw new UnsupportedAssetOperationException( @@ -313,6 +315,12 @@ public void runTextAnalysis(Resource resource, Set vocabularies) { } } + private void verifyLanguageSupported(File file) { + if (!textAnalysisService.supportsLanguage(file)) { + throw new UnsupportedTextAnalysisLanguageException("Text analysis service does not support language " + file.getLanguage(), file); + } + } + private Set includeImportedVocabularies(Set providedVocabularies) { final Set result = new HashSet<>(providedVocabularies); providedVocabularies.forEach(uri -> { diff --git a/src/main/java/cz/cvut/kbss/termit/service/document/TextAnalysisService.java b/src/main/java/cz/cvut/kbss/termit/service/document/TextAnalysisService.java index 12bddd7e4..18da62044 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/document/TextAnalysisService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/document/TextAnalysisService.java @@ -20,11 +20,15 @@ import cz.cvut.kbss.termit.dto.TextAnalysisInput; import cz.cvut.kbss.termit.event.FileTextAnalysisFinishedEvent; import cz.cvut.kbss.termit.event.TermDefinitionTextAnalysisFinishedEvent; +import cz.cvut.kbss.termit.exception.TermItException; +import cz.cvut.kbss.termit.exception.UnsupportedTextAnalysisLanguageException; import cz.cvut.kbss.termit.exception.WebServiceIntegrationException; import cz.cvut.kbss.termit.model.AbstractTerm; +import cz.cvut.kbss.termit.model.Asset; import cz.cvut.kbss.termit.model.TextAnalysisRecord; import cz.cvut.kbss.termit.model.resource.File; import cz.cvut.kbss.termit.persistence.dao.TextAnalysisRecordDao; +import cz.cvut.kbss.termit.rest.handler.ErrorInfo; import cz.cvut.kbss.termit.util.Configuration; import cz.cvut.kbss.termit.util.Utils; import cz.cvut.kbss.termit.util.throttle.Throttle; @@ -32,20 +36,24 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationEventPublisher; +import org.springframework.core.ParameterizedTypeReference; import org.springframework.core.io.Resource; import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; +import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.client.HttpClientErrorException; import org.springframework.web.client.RestTemplate; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.util.HashSet; +import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Set; @@ -67,6 +75,8 @@ public class TextAnalysisService { private final ApplicationEventPublisher eventPublisher; + private Set supportedLanguages; + @Autowired public TextAnalysisService(RestTemplate restClient, Configuration config, DocumentManager documentManager, AnnotationGenerator annotationGenerator, TextAnalysisRecordDao recordDao, @@ -126,6 +136,8 @@ private void invokeTextAnalysisOnFile(File file, TextAnalysisInput input) { storeTextAnalysisRecord(file, input); } catch (WebServiceIntegrationException e) { throw e; + } catch (HttpClientErrorException e) { + throw handleTextAnalysisInvocationClientException(e, file); } catch (RuntimeException e) { throw new WebServiceIntegrationException("Text analysis invocation failed.", e); } catch (IOException e) { @@ -140,11 +152,10 @@ private Optional invokeTextAnalysisService(TextAnalysisInput input) { return Optional.empty(); } final HttpHeaders headers = new HttpHeaders(); - headers.add(HttpHeaders.ACCEPT, MediaType.APPLICATION_XML_VALUE); - LOG.debug("Invoking text analysis service at '{}' on input: {}", config.getTextAnalysis().getUrl(), input); - final ResponseEntity resp = restClient - .exchange(config.getTextAnalysis().getUrl(), HttpMethod.POST, - new HttpEntity<>(input, headers), Resource.class); + headers.addAll(HttpHeaders.ACCEPT, List.of(MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE)); + LOG.debug("Invoking text analysis service at '{}' on input: {}", taUrl, input); + final ResponseEntity resp = restClient.exchange(taUrl, HttpMethod.POST, + new HttpEntity<>(input, headers), Resource.class); if (!resp.hasBody()) { throw new WebServiceIntegrationException("Text analysis service returned empty response."); } @@ -161,6 +172,16 @@ private void storeTextAnalysisRecord(File file, TextAnalysisInput config) { recordDao.persist(record); } + private TermItException handleTextAnalysisInvocationClientException(HttpClientErrorException ex, Asset asset) { + if (ex.getStatusCode() == HttpStatus.CONFLICT) { + final ErrorInfo errorInfo = ex.getResponseBodyAs(ErrorInfo.class); + if (errorInfo != null && errorInfo.getMessage().contains("language")) { + throw new UnsupportedTextAnalysisLanguageException(errorInfo.getMessage(),asset); + } + } + throw new WebServiceIntegrationException("Text analysis invocation failed.", ex); + } + /** * Gets the latest {@link TextAnalysisRecord} for the specified Resource. * @@ -205,10 +226,56 @@ private void invokeTextAnalysisOnTerm(AbstractTerm term, TextAnalysisInput input } } catch (WebServiceIntegrationException e) { throw e; + } catch (HttpClientErrorException e) { + throw handleTextAnalysisInvocationClientException(e, term); } catch (RuntimeException e) { throw new WebServiceIntegrationException("Text analysis invocation failed.", e); } catch (IOException e) { throw new WebServiceIntegrationException("Unable to read text analysis result from response.", e); } } + + /** + * Checks whether the text analysis service supports the language of the specified file. + *

+ * If the text analysis service does not provide endpoint for getting supported languages (or it is not configured), + * it is assumed that any language is supported. + *

+ * If the file does not have language set, it is assumed that it is supported as well. + * + * @param file File to be analyzed + * @return {@code true} if the file language is supported, {@code false} otherwise + */ + public boolean supportsLanguage(File file) { + Objects.requireNonNull(file); + return file.getLanguage() == null || getSupportedLanguages().isEmpty() || getSupportedLanguages().contains( + file.getLanguage()); + } + + private synchronized Set getSupportedLanguages() { + if (supportedLanguages != null) { + return supportedLanguages; + } + final String languagesEndpointUrl = config.getTextAnalysis().getLanguagesUrl(); + if (languagesEndpointUrl == null || languagesEndpointUrl.isBlank()) { + LOG.warn( + "Text analysis service languages endpoint URL not configured. Assuming any language is supported."); + this.supportedLanguages = Set.of(); + } else { + try { + LOG.debug("Getting list of supported languages from text analysis service at '{}'.", + languagesEndpointUrl); + ResponseEntity> response = restClient.exchange(languagesEndpointUrl, HttpMethod.GET, null, + new ParameterizedTypeReference<>() { + }); + this.supportedLanguages = response.getBody(); + LOG.trace("Text analysis supported languages: {}", supportedLanguages); + } catch (RuntimeException e) { + LOG.error("Unable to get list of supported languages from text analysis service at '{}'.", + languagesEndpointUrl, e); + this.supportedLanguages = Set.of(); + } + } + return supportedLanguages; + } } diff --git a/src/main/java/cz/cvut/kbss/termit/util/Configuration.java b/src/main/java/cz/cvut/kbss/termit/util/Configuration.java index 8a655df59..4785f9eb6 100644 --- a/src/main/java/cz/cvut/kbss/termit/util/Configuration.java +++ b/src/main/java/cz/cvut/kbss/termit/util/Configuration.java @@ -673,6 +673,11 @@ public static class TextAnalysis { */ private String url; + /** + * URL of the endpoint providing list of languages supported by the text analysis service. + */ + private String languagesUrl; + /** * Score threshold for a term occurrence for it to be saved into the repository. */ @@ -693,6 +698,14 @@ public void setUrl(String url) { this.url = url; } + public String getLanguagesUrl() { + return languagesUrl; + } + + public void setLanguagesUrl(String languagesUrl) { + this.languagesUrl = languagesUrl; + } + public String getTermOccurrenceMinScore() { return termOccurrenceMinScore; } diff --git a/src/main/java/cz/cvut/kbss/termit/websocket/handler/WebSocketExceptionHandler.java b/src/main/java/cz/cvut/kbss/termit/websocket/handler/WebSocketExceptionHandler.java index c5869701b..c6042bb9a 100644 --- a/src/main/java/cz/cvut/kbss/termit/websocket/handler/WebSocketExceptionHandler.java +++ b/src/main/java/cz/cvut/kbss/termit/websocket/handler/WebSocketExceptionHandler.java @@ -19,6 +19,7 @@ import cz.cvut.kbss.termit.exception.TermItException; import cz.cvut.kbss.termit.exception.UnsupportedOperationException; import cz.cvut.kbss.termit.exception.UnsupportedSearchFacetException; +import cz.cvut.kbss.termit.exception.UnsupportedTextAnalysisLanguageException; import cz.cvut.kbss.termit.exception.ValidationException; import cz.cvut.kbss.termit.exception.WebServiceIntegrationException; import cz.cvut.kbss.termit.exception.importing.UnsupportedImportMediaTypeException; @@ -87,7 +88,8 @@ private static ErrorInfo errorInfo(Message message, Throwable e) { } private static ErrorInfo errorInfo(Message message, TermItException e) { - return ErrorInfo.createParametrizedWithMessage(e.getMessage(), e.getMessageId(), destination(message), e.getParameters()); + return ErrorInfo.createParametrizedWithMessage(e.getMessage(), e.getMessageId(), destination(message), + e.getParameters()); } @MessageExceptionHandler @@ -95,7 +97,7 @@ public void messageDeliveryException(Message message, MessageDeliveryExceptio // messages without destination will be logged only on trace (hasDestination(message) ? LOG.atError() : LOG.atTrace()) .setMessage("Failed to send message with destination {}: {}") - .addArgument(()-> destination(message)) + .addArgument(() -> destination(message)) .addArgument(e.getMessage()) .setCause(e.getCause()) .log(); @@ -226,7 +228,8 @@ public ErrorInfo invalidParameter(Message message, InvalidParameterException @MessageExceptionHandler public ErrorInfo maxUploadSizeExceededException(Message message, MaxUploadSizeExceededException e) { logException(e, message); - return ErrorInfo.createWithMessageAndMessageId(e.getMessage(), "error.file.maxUploadSizeExceeded", destination(message)); + return ErrorInfo.createWithMessageAndMessageId(e.getMessage(), "error.file.maxUploadSizeExceeded", + destination(message)); } @MessageExceptionHandler @@ -271,4 +274,11 @@ public ErrorInfo uriSyntaxException(Message message, URISyntaxException e) { logException(e, message); return errorInfo(message, e); } + + @MessageExceptionHandler + public ErrorInfo unsupportedTextAnalysisLanguageException(Message message, + UnsupportedTextAnalysisLanguageException e) { + logException(e, message); + return errorInfo(message, e); + } } diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml index 8d9cae801..b6fa55209 100644 --- a/src/main/resources/application.yml +++ b/src/main/resources/application.yml @@ -68,6 +68,7 @@ termit: storage: /tmp/termit textAnalysis: url: http://localhost:8081/annotace/annotate + languagesUrl: http://localhost:8081/annotace/languages changetracking: context: extension: /zmeny diff --git a/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java index 2777f42fe..cae57e7e8 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java @@ -24,6 +24,7 @@ import cz.cvut.kbss.termit.exception.NotFoundException; import cz.cvut.kbss.termit.exception.TermItException; import cz.cvut.kbss.termit.exception.UnsupportedAssetOperationException; +import cz.cvut.kbss.termit.exception.UnsupportedTextAnalysisLanguageException; import cz.cvut.kbss.termit.model.TextAnalysisRecord; import cz.cvut.kbss.termit.model.Vocabulary; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; @@ -203,6 +204,7 @@ void runTextAnalysisInvokesTextAnalysisWithVocabularyRelatedToFilesDocument() { file.setDocument(Generator.generateDocumentWithId()); final Vocabulary vocabulary = Generator.generateVocabularyWithId(); file.getDocument().setVocabulary(vocabulary.getUri()); + when(textAnalysisService.supportsLanguage(file)).thenReturn(true); sut.runTextAnalysis(file, Collections.emptySet()); verify(textAnalysisService).analyzeFile(file, Collections.singleton(vocabulary.getUri())); } @@ -218,6 +220,7 @@ void runTextAnalysisThrowsUnsupportedAssetOperationWhenResourceIsNotFile() { @Test void runTextAnalysisThrowsUnsupportedAssetOperationWhenFileHasNoVocabularyAndNoVocabulariesAreSpecifiedEither() { final File file = Generator.generateFileWithId("test.html"); + when(textAnalysisService.supportsLanguage(file)).thenReturn(true); assertThrows(UnsupportedAssetOperationException.class, () -> sut.runTextAnalysis(file, Collections.emptySet())); verify(textAnalysisService, never()).analyzeFile(any(), anySet()); @@ -227,6 +230,7 @@ void runTextAnalysisThrowsUnsupportedAssetOperationWhenFileHasNoVocabularyAndNoV void runTextAnalysisInvokesAnalysisWithCustomVocabulariesWhenSpecified() { final File file = Generator.generateFileWithId("test.html"); final Set vocabularies = new HashSet<>(Arrays.asList(Generator.generateUri(), Generator.generateUri())); + when(textAnalysisService.supportsLanguage(file)).thenReturn(true); sut.runTextAnalysis(file, vocabularies); verify(textAnalysisService).analyzeFile(file, vocabularies); } @@ -240,6 +244,7 @@ void runTextAnalysisInvokesAnalysisAlsoWithImportedVocabulariesOfVocabularyRElat final Set imported = new HashSet<>(Arrays.asList(Generator.generateUri(), Generator.generateUri())); when(vocabularyService.getReference(vocabulary.getUri())).thenReturn(vocabulary); when(vocabularyService.getTransitivelyImportedVocabularies(vocabulary)).thenReturn(imported); + when(textAnalysisService.supportsLanguage(file)).thenReturn(true); sut.runTextAnalysis(file, Collections.emptySet()); final Set expected = new HashSet<>(imported); @@ -259,6 +264,7 @@ void runTextAnalysisInvokesAnalysisWithProvidedVocabulariesAndTheirImports() { when(vocabularyService.getTransitivelyImportedVocabularies(vOne)).thenReturn(vOneImports); when(vocabularyService.getReference(vTwo.getUri())).thenReturn(vTwo); when(vocabularyService.getTransitivelyImportedVocabularies(vTwo)).thenReturn(vTwoImports); + when(textAnalysisService.supportsLanguage(file)).thenReturn(true); sut.runTextAnalysis(file, new HashSet<>(Arrays.asList(vOne.getUri(), vTwo.getUri()))); final Set expected = new HashSet<>(vOneImports); @@ -554,4 +560,16 @@ void addFileToDocumentDoesNotModifyLanguageWhenItIsAlreadySet() { verify(resourceRepositoryService).persist(file, vocabulary); assertEquals("cs", file.getLanguage()); } + + @Test + void runTextAnalysisThrowsUnsupportedTextAnalysisExceptionWhenTextAnalysisServiceDoesNotSupportFileLanguage() { + final File file = Generator.generateFileWithId("test.html"); + file.setDocument(Generator.generateDocumentWithId()); + final Vocabulary vocabulary = Generator.generateVocabularyWithId(); + file.getDocument().setVocabulary(vocabulary.getUri()); + file.setLanguage("sk"); + when(textAnalysisService.supportsLanguage(file)).thenReturn(false); + assertThrows(UnsupportedTextAnalysisLanguageException.class, () -> sut.runTextAnalysis(file, Set.of(vocabulary.getUri()))); + verify(textAnalysisService).supportsLanguage(file); + } } diff --git a/src/test/java/cz/cvut/kbss/termit/service/document/TextAnalysisServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/document/TextAnalysisServiceTest.java index 6bdc27284..794753204 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/document/TextAnalysisServiceTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/document/TextAnalysisServiceTest.java @@ -27,14 +27,17 @@ import cz.cvut.kbss.termit.event.FileTextAnalysisFinishedEvent; import cz.cvut.kbss.termit.event.TermDefinitionTextAnalysisFinishedEvent; import cz.cvut.kbss.termit.exception.NotFoundException; +import cz.cvut.kbss.termit.exception.UnsupportedTextAnalysisLanguageException; import cz.cvut.kbss.termit.exception.WebServiceIntegrationException; import cz.cvut.kbss.termit.model.Term; import cz.cvut.kbss.termit.model.TextAnalysisRecord; import cz.cvut.kbss.termit.model.Vocabulary; import cz.cvut.kbss.termit.model.resource.File; import cz.cvut.kbss.termit.persistence.dao.TextAnalysisRecordDao; +import cz.cvut.kbss.termit.rest.handler.ErrorInfo; import cz.cvut.kbss.termit.service.BaseServiceTestRunner; import cz.cvut.kbss.termit.util.Configuration; +import cz.cvut.kbss.termit.util.Constants; import cz.cvut.kbss.termit.util.Utils; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -70,6 +73,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -87,6 +91,7 @@ import static org.springframework.test.web.client.match.MockRestRequestMatchers.jsonPath; import static org.springframework.test.web.client.match.MockRestRequestMatchers.method; import static org.springframework.test.web.client.match.MockRestRequestMatchers.requestTo; +import static org.springframework.test.web.client.response.MockRestResponseCreators.withRequestConflict; import static org.springframework.test.web.client.response.MockRestResponseCreators.withServerError; import static org.springframework.test.web.client.response.MockRestResponseCreators.withSuccess; @@ -203,7 +208,7 @@ void analyzeFilePassesContentTypeAndAcceptHeadersToService() throws Exception { .andExpect(method(HttpMethod.POST)) .andExpect(content().string(objectMapper.writeValueAsString(input))) .andExpect(header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE)) - .andExpect(header(HttpHeaders.ACCEPT, MediaType.APPLICATION_XML_VALUE)) + .andExpect(header(HttpHeaders.ACCEPT,MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE)) .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); mockServer.verify(); @@ -478,4 +483,51 @@ void analyzeFileUsesConfiguredPersistenceLanguageInTextAnalysisInvocationInputWh sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); mockServer.verify(); } + + @Test + void analyzeFileThrowsUnsupportedLanguageExceptionWhenTextAnalysisInvocationReturnsConflictWithUnsupportedLanguageError() + throws Exception { + file.setLanguage("de"); + final ErrorInfo respBody = ErrorInfo.createWithMessage("No taggers for language 'de' available.", + "/annotace/annotate"); + mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) + .andExpect(method(HttpMethod.POST)) + .andRespond(withRequestConflict().body(objectMapper.writeValueAsString(respBody)) + .contentType(MediaType.APPLICATION_JSON)); + + final UnsupportedTextAnalysisLanguageException ex = assertThrows(UnsupportedTextAnalysisLanguageException.class, + () -> sut.analyzeFile(file, + Collections.singleton( + vocabulary.getUri()))); + assertEquals("error.annotation.file.unsupportedLanguage", ex.getMessageId()); + } + + @Test + void supportsLanguageGetsListOfSupportedLanguagesFromTextAnalysisServiceAndChecksIfFileLanguageIsAmongThem() { + file.setLanguage("cs"); + mockServer.expect(requestTo(config.getTextAnalysis().getLanguagesUrl())) + .andExpect(method(HttpMethod.GET)) + .andRespond(withSuccess("[\"cs\", \"en\"]", MediaType.APPLICATION_JSON)); + assertTrue(sut.supportsLanguage(file)); + mockServer.verify(); + + file.setLanguage("de"); + assertFalse(sut.supportsLanguage(file)); + } + + @Test + void supportsLanguageReturnsTrueWhenTextAnalysisServiceLanguagesEndpointUrlIsNotConfigured() { + String endpointUrl = config.getTextAnalysis().getLanguagesUrl(); + file.setLanguage(Constants.DEFAULT_LANGUAGE); + config.getTextAnalysis().setLanguagesUrl(null); + assertTrue(sut.supportsLanguage(file)); + // Reset configuration state + config.getTextAnalysis().setLanguagesUrl(endpointUrl); + } + + @Test + void supportsLanguageReturnsTrueWhenFileHasNoLanguageSet() { + file.setLanguage(null); + assertTrue(sut.supportsLanguage(file)); + } } diff --git a/src/test/java/cz/cvut/kbss/termit/util/VocabularyTest.java b/src/test/java/cz/cvut/kbss/termit/util/VocabularyTest.java index a35fd6534..7c4d6aac9 100644 --- a/src/test/java/cz/cvut/kbss/termit/util/VocabularyTest.java +++ b/src/test/java/cz/cvut/kbss/termit/util/VocabularyTest.java @@ -23,7 +23,6 @@ public class VocabularyTest { @Test - // @todo until https://github.com/kbss-cvut/jopa/issues/85 is resolved public void ensureContentHasCorrectUrl() { Assert.equals("http://rdfs.org/sioc/ns#content", Vocabulary.s_p_sioc_content); } diff --git a/src/test/resources/application.yml b/src/test/resources/application.yml index 9365e2b7f..258bfa622 100644 --- a/src/test/resources/application.yml +++ b/src/test/resources/application.yml @@ -30,7 +30,8 @@ termit: file: storage: /tmp/termit textAnalysis: - url: http://localhost/annotace + url: http://localhost/annotace/annotate + languagesUrl: http://localhost/annotace/languages termOccurrenceMinScore: 0.49 comments: context: http://onto.fel.cvut.cz/ontologies/komentare From 58c10e23e3d3399df67cd0f10e9dd5583852f3ff Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Tue, 19 Nov 2024 10:10:38 +0100 Subject: [PATCH 31/49] [kbss-cvut/termit-ui#553] Handle unsupported text analysis language exception - return 409 status. --- .../termit/rest/handler/RestExceptionHandler.java | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/main/java/cz/cvut/kbss/termit/rest/handler/RestExceptionHandler.java b/src/main/java/cz/cvut/kbss/termit/rest/handler/RestExceptionHandler.java index 0ea71c47c..53ba971a6 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/handler/RestExceptionHandler.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/handler/RestExceptionHandler.java @@ -36,6 +36,7 @@ import cz.cvut.kbss.termit.exception.TermItException; import cz.cvut.kbss.termit.exception.UnsupportedOperationException; import cz.cvut.kbss.termit.exception.UnsupportedSearchFacetException; +import cz.cvut.kbss.termit.exception.UnsupportedTextAnalysisLanguageException; import cz.cvut.kbss.termit.exception.ValidationException; import cz.cvut.kbss.termit.exception.WebServiceIntegrationException; import cz.cvut.kbss.termit.exception.importing.UnsupportedImportMediaTypeException; @@ -99,7 +100,8 @@ private static ErrorInfo errorInfo(HttpServletRequest request, Throwable e) { } private static ErrorInfo errorInfo(HttpServletRequest request, TermItException e) { - return ErrorInfo.createParametrizedWithMessage(e.getMessage(), e.getMessageId(), request.getRequestURI(), e.getParameters()); + return ErrorInfo.createParametrizedWithMessage(e.getMessage(), e.getMessageId(), request.getRequestURI(), + e.getParameters()); } @ExceptionHandler(PersistenceException.class) @@ -290,4 +292,11 @@ public ResponseEntity uriSyntaxException(HttpServletRequest request, .addParameter("char", Character.toString(e.getInput().charAt(e.getIndex()))); return new ResponseEntity<>(errorInfo(request, exception), HttpStatus.CONFLICT); } + + @ExceptionHandler + public ResponseEntity unsupportedTextAnalysisLanguageException(HttpServletRequest request, + UnsupportedTextAnalysisLanguageException e) { + logException(e, request); + return new ResponseEntity<>(errorInfo(request, e), HttpStatus.CONFLICT); + } } From bf2c16dadb73a8266e70161f169ebc22f915b300 Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Wed, 20 Nov 2024 09:58:28 +0100 Subject: [PATCH 32/49] [kbss-cvut/termit-ui#553] Add language to TextAnalysisRecord. --- .../kbss/termit/model/TextAnalysisRecord.java | 22 ++++++++++++++++--- .../service/document/TextAnalysisService.java | 2 +- .../TextAnalysisRecordDaoTest.java | 5 +++-- .../termit/rest/ResourceControllerTest.java | 2 +- .../service/business/ResourceServiceTest.java | 7 +++--- .../document/TextAnalysisServiceTest.java | 7 +++--- 6 files changed, 31 insertions(+), 14 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/model/TextAnalysisRecord.java b/src/main/java/cz/cvut/kbss/termit/model/TextAnalysisRecord.java index 837e55280..fe8dfe13d 100644 --- a/src/main/java/cz/cvut/kbss/termit/model/TextAnalysisRecord.java +++ b/src/main/java/cz/cvut/kbss/termit/model/TextAnalysisRecord.java @@ -17,10 +17,12 @@ */ package cz.cvut.kbss.termit.model; +import cz.cvut.kbss.jopa.model.annotations.OWLAnnotationProperty; import cz.cvut.kbss.jopa.model.annotations.OWLClass; import cz.cvut.kbss.jopa.model.annotations.OWLDataProperty; import cz.cvut.kbss.jopa.model.annotations.OWLObjectProperty; import cz.cvut.kbss.jopa.model.annotations.ParticipationConstraints; +import cz.cvut.kbss.jopa.vocabulary.DC; import cz.cvut.kbss.termit.model.resource.Resource; import cz.cvut.kbss.termit.util.Vocabulary; @@ -44,12 +46,16 @@ public class TextAnalysisRecord extends AbstractEntity { @OWLObjectProperty(iri = Vocabulary.s_p_ma_slovnik_pro_analyzu) private Set vocabularies; + @OWLAnnotationProperty(iri = DC.Terms.LANGUAGE, simpleLiteral = true) + private String language; + public TextAnalysisRecord() { } - public TextAnalysisRecord(Instant date, Resource analyzedResource) { + public TextAnalysisRecord(Instant date, Resource analyzedResource, String language) { this.date = date; this.analyzedResource = analyzedResource; + this.language = language; } public Instant getDate() { @@ -76,6 +82,14 @@ public void setVocabularies(Set vocabularies) { this.vocabularies = vocabularies; } + public String getLanguage() { + return language; + } + + public void setLanguage(String language) { + this.language = language; + } + @Override public boolean equals(Object o) { if (this == o) { @@ -86,12 +100,13 @@ public boolean equals(Object o) { } return Objects.equals(date, that.date) && Objects.equals(analyzedResource, that.analyzedResource) && - Objects.equals(vocabularies, that.vocabularies); + Objects.equals(vocabularies, that.vocabularies) && + Objects.equals(language, that.language); } @Override public int hashCode() { - return Objects.hash(date, analyzedResource, vocabularies); + return Objects.hash(date, analyzedResource, vocabularies, language); } @Override @@ -100,6 +115,7 @@ public String toString() { "date=" + date + ",analyzedResource=" + analyzedResource + ",vocabularies=" + vocabularies + + ", language=" + language + "}"; } } diff --git a/src/main/java/cz/cvut/kbss/termit/service/document/TextAnalysisService.java b/src/main/java/cz/cvut/kbss/termit/service/document/TextAnalysisService.java index 18da62044..6ef927e72 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/document/TextAnalysisService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/document/TextAnalysisService.java @@ -167,7 +167,7 @@ private void storeTextAnalysisRecord(File file, TextAnalysisInput config) { LOG.trace("Creating record of text analysis event for file {}.", file); assert config.getVocabularyContexts() != null; - final TextAnalysisRecord record = new TextAnalysisRecord(Utils.timestamp(), file); + final TextAnalysisRecord record = new TextAnalysisRecord(Utils.timestamp(), file, config.getLanguage()); record.setVocabularies(new HashSet<>(config.getVocabularyContexts())); recordDao.persist(record); } diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/TextAnalysisRecordDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/TextAnalysisRecordDaoTest.java index 7eb5a23e8..cc0c320d3 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/TextAnalysisRecordDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/TextAnalysisRecordDaoTest.java @@ -63,9 +63,10 @@ void setUp() { @Test void findLatestGetsLatestTextAnalysisRecordForResource() { final URI vocabulary = Generator.generateUri(); - final TextAnalysisRecord old = new TextAnalysisRecord(Instant.ofEpochMilli(System.currentTimeMillis() - 10000), resource); + final TextAnalysisRecord old = new TextAnalysisRecord(Instant.ofEpochMilli(System.currentTimeMillis() - 10000), + resource, Environment.LANGUAGE); old.setVocabularies(Collections.singleton(vocabulary)); - final TextAnalysisRecord latest = new TextAnalysisRecord(Utils.timestamp(), resource); + final TextAnalysisRecord latest = new TextAnalysisRecord(Utils.timestamp(), resource, Environment.LANGUAGE); latest.setVocabularies(Collections.singleton(vocabulary)); transactional(() -> { sut.persist(old); diff --git a/src/test/java/cz/cvut/kbss/termit/rest/ResourceControllerTest.java b/src/test/java/cz/cvut/kbss/termit/rest/ResourceControllerTest.java index bd50b7258..2d136b8e1 100644 --- a/src/test/java/cz/cvut/kbss/termit/rest/ResourceControllerTest.java +++ b/src/test/java/cz/cvut/kbss/termit/rest/ResourceControllerTest.java @@ -326,7 +326,7 @@ void getLatestTextAnalysisRecordRetrievesAnalysisRecordFromService() throws Exce final File file = generateFile(); when(identifierResolverMock.resolveIdentifier(RESOURCE_NAMESPACE, FILE_NAME)).thenReturn(file.getUri()); when(resourceServiceMock.findRequired(file.getUri())).thenReturn(file); - final TextAnalysisRecord record = new TextAnalysisRecord(Utils.timestamp(), file); + final TextAnalysisRecord record = new TextAnalysisRecord(Utils.timestamp(), file, Environment.LANGUAGE); record.setVocabularies(Collections.singleton(Generator.generateUri())); when(resourceServiceMock.findLatestTextAnalysisRecord(file)).thenReturn(record); final MvcResult mvcResult = mockMvc.perform(get(PATH + "/" + FILE_NAME + "/text-analysis/records/latest") diff --git a/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java index cae57e7e8..ab93c17c7 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java @@ -37,7 +37,6 @@ import cz.cvut.kbss.termit.service.repository.ChangeRecordService; import cz.cvut.kbss.termit.service.repository.ResourceRepositoryService; import cz.cvut.kbss.termit.util.Configuration; -import cz.cvut.kbss.termit.util.Constants; import cz.cvut.kbss.termit.util.TypeAwareByteArrayResource; import cz.cvut.kbss.termit.util.TypeAwareResource; import cz.cvut.kbss.termit.util.Utils; @@ -388,7 +387,7 @@ void removeFileThrowsTermItExceptionWhenFileIsNotLinkedToDocument() { @Test void findLatestTextAnalysisRecordRetrievesLatestTextAnalysisRecordForResource() { final File file = Generator.generateFileWithId("test.html"); - final TextAnalysisRecord record = new TextAnalysisRecord(Utils.timestamp(), file); + final TextAnalysisRecord record = new TextAnalysisRecord(Utils.timestamp(), file, Environment.LANGUAGE); when(textAnalysisService.findLatestAnalysisRecord(file)).thenReturn(Optional.of(record)); final TextAnalysisRecord result = sut.findLatestTextAnalysisRecord(file); @@ -530,7 +529,7 @@ void getContentWithoutUnconfirmedOccurrencesRemovesUnconfirmedOccurrencesFromFil @Test void addFileToDocumentSetsFileLanguageToDefaultConfiguredWhenNotProvided() { - config.getPersistence().setLanguage(Constants.DEFAULT_LANGUAGE); + config.getPersistence().setLanguage(Environment.LANGUAGE); final Vocabulary vocabulary = Generator.generateVocabularyWithId(); final Document document = Generator.generateDocumentWithId(); document.setVocabulary(vocabulary.getUri()); @@ -546,7 +545,7 @@ void addFileToDocumentSetsFileLanguageToDefaultConfiguredWhenNotProvided() { @Test void addFileToDocumentDoesNotModifyLanguageWhenItIsAlreadySet() { - config.getPersistence().setLanguage(Constants.DEFAULT_LANGUAGE); + config.getPersistence().setLanguage(Environment.LANGUAGE); final Vocabulary vocabulary = Generator.generateVocabularyWithId(); final Document document = Generator.generateDocumentWithId(); document.setVocabulary(vocabulary.getUri()); diff --git a/src/test/java/cz/cvut/kbss/termit/service/document/TextAnalysisServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/document/TextAnalysisServiceTest.java index 794753204..9a049a40a 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/document/TextAnalysisServiceTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/document/TextAnalysisServiceTest.java @@ -37,7 +37,6 @@ import cz.cvut.kbss.termit.rest.handler.ErrorInfo; import cz.cvut.kbss.termit.service.BaseServiceTestRunner; import cz.cvut.kbss.termit.util.Configuration; -import cz.cvut.kbss.termit.util.Constants; import cz.cvut.kbss.termit.util.Utils; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -326,6 +325,7 @@ void analyzeFileBacksUpFileContentBeforeSavingNewAnalyzedContent() throws Except @Test void analyzeFileCreatesTextAnalysisRecord() { + file.setLanguage("cs"); mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) .andExpect(method(HttpMethod.POST)).andExpect(content().string(containsString(CONTENT))) .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); @@ -334,11 +334,12 @@ void analyzeFileCreatesTextAnalysisRecord() { verify(textAnalysisRecordDao).persist(captor.capture()); assertEquals(file, captor.getValue().getAnalyzedResource()); assertEquals(Collections.singleton(vocabulary.getUri()), captor.getValue().getVocabularies()); + assertEquals(file.getLanguage(), captor.getValue().getLanguage()); } @Test void findLatestAnalysisRecordFindsLatestTextAnalysisRecordForResource() { - final TextAnalysisRecord record = new TextAnalysisRecord(Utils.timestamp(), file); + final TextAnalysisRecord record = new TextAnalysisRecord(Utils.timestamp(), file, Environment.LANGUAGE); record.setVocabularies(Collections.singleton(vocabulary.getUri())); when(textAnalysisRecordDao.findLatest(file)).thenReturn(Optional.of(record)); @@ -518,7 +519,7 @@ void supportsLanguageGetsListOfSupportedLanguagesFromTextAnalysisServiceAndCheck @Test void supportsLanguageReturnsTrueWhenTextAnalysisServiceLanguagesEndpointUrlIsNotConfigured() { String endpointUrl = config.getTextAnalysis().getLanguagesUrl(); - file.setLanguage(Constants.DEFAULT_LANGUAGE); + file.setLanguage(Environment.LANGUAGE); config.getTextAnalysis().setLanguagesUrl(null); assertTrue(sut.supportsLanguage(file)); // Reset configuration state From 851326211a408bcb2670be53fa5845b713fe782c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Tue, 19 Nov 2024 17:41:52 +0100 Subject: [PATCH 33/49] [Bug #314] Fix ThrottledFuture throwing on "then action" when completed exceptionally --- .../kbss/termit/util/throttle/ThrottledFuture.java | 10 +++++++++- .../termit/util/throttle/ThrottledFutureTest.java | 12 ++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/src/main/java/cz/cvut/kbss/termit/util/throttle/ThrottledFuture.java b/src/main/java/cz/cvut/kbss/termit/util/throttle/ThrottledFuture.java index e32f8ef40..e6e492474 100644 --- a/src/main/java/cz/cvut/kbss/termit/util/throttle/ThrottledFuture.java +++ b/src/main/java/cz/cvut/kbss/termit/util/throttle/ThrottledFuture.java @@ -245,7 +245,7 @@ public boolean isRunning() { public ThrottledFuture then(Consumer action) { try { callbackLock.lock(); - if (future.isDone() && !future.isCancelled()) { + if (future.isDone() && !future.isCancelled() && !future.isCompletedExceptionally()) { try { action.accept(future.get()); } catch (InterruptedException e) { @@ -262,4 +262,12 @@ public ThrottledFuture then(Consumer action) { } return this; } + + /** + * @return {@code true} if this future completed + * exceptionally + */ + public boolean isCompletedExceptionally() { + return future.isCompletedExceptionally(); + } } diff --git a/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java b/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java index bf8f4f4e0..039a2e3d8 100644 --- a/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java +++ b/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java @@ -138,6 +138,18 @@ void thenActionIsNotExecutedOnceFutureIsCancelled() { assertFalse(completed.get()); } + @Test + void thenActionIsNotExecutedWhenFutureCompletedExceptionally() { + final AtomicBoolean completed = new AtomicBoolean(false); + final ThrottledFuture future = ThrottledFuture.of(() -> { + throw new RuntimeException(); + }); + future.run(null); + assertFalse(completed.get()); + future.then(futureResult -> completed.set(true)); + assertFalse(completed.get()); + } + @Test void callingRunWillExecuteFutureOnlyOnce() { AtomicInteger count = new AtomicInteger(0); From 1462c058b789f41cced1e9619784d62131a4593e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Wed, 20 Nov 2024 13:38:17 +0100 Subject: [PATCH 34/49] [Bug #314] Change ThrottledFuture#then to accept the completed future --- .../termit/persistence/dao/VocabularyDao.java | 4 +- .../service/business/VocabularyService.java | 4 +- .../VocabularyRepositoryService.java | 4 +- .../termit/util/throttle/CacheableFuture.java | 2 +- .../termit/util/throttle/ChainableFuture.java | 13 +++--- .../termit/util/throttle/ThrottledFuture.java | 41 ++++++++++--------- .../websocket/VocabularySocketController.java | 9 ++-- .../util/throttle/ThrottledFutureTest.java | 14 +++---- 8 files changed, 48 insertions(+), 43 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java index 9a9a7d734..2b215ed33 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java @@ -48,7 +48,7 @@ import cz.cvut.kbss.termit.service.snapshot.SnapshotProvider; import cz.cvut.kbss.termit.util.Configuration; import cz.cvut.kbss.termit.util.Utils; -import cz.cvut.kbss.termit.util.throttle.CacheableFuture; +import cz.cvut.kbss.termit.util.throttle.ThrottledFuture; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -366,7 +366,7 @@ public void refreshLastModified(RefreshLastModifiedEvent event) { } @Transactional - public CacheableFuture> validateContents(URI vocabulary) { + public ThrottledFuture> validateContents(URI vocabulary) { final VocabularyContentValidator validator = context.getBean(VocabularyContentValidator.class); final Collection importClosure = getTransitivelyImportedVocabularies(vocabulary); importClosure.add(vocabulary); diff --git a/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java b/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java index 6f265656c..5ebc83804 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java @@ -46,8 +46,8 @@ import cz.cvut.kbss.termit.util.TypeAwareClasspathResource; import cz.cvut.kbss.termit.util.TypeAwareFileSystemResource; import cz.cvut.kbss.termit.util.TypeAwareResource; -import cz.cvut.kbss.termit.util.throttle.CacheableFuture; import cz.cvut.kbss.termit.util.throttle.Throttle; +import cz.cvut.kbss.termit.util.throttle.ThrottledFuture; import jakarta.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -384,7 +384,7 @@ public void remove(Vocabulary asset) { * * @param vocabulary Vocabulary to validate */ - public CacheableFuture> validateContents(URI vocabulary) { + public ThrottledFuture> validateContents(URI vocabulary) { return repositoryService.validateContents(vocabulary); } diff --git a/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java b/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java index 6cffad957..8f9090820 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java @@ -42,7 +42,7 @@ import cz.cvut.kbss.termit.util.Configuration; import cz.cvut.kbss.termit.util.Constants; import cz.cvut.kbss.termit.util.Utils; -import cz.cvut.kbss.termit.util.throttle.CacheableFuture; +import cz.cvut.kbss.termit.util.throttle.ThrottledFuture; import cz.cvut.kbss.termit.workspace.EditableVocabularies; import jakarta.annotation.Nonnull; import jakarta.validation.Validator; @@ -334,7 +334,7 @@ private void ensureNoTermRelationsExists(Vocabulary vocabulary) throws AssetRemo } } - public CacheableFuture> validateContents(URI vocabulary) { + public ThrottledFuture> validateContents(URI vocabulary) { return vocabularyDao.validateContents(vocabulary); } diff --git a/src/main/java/cz/cvut/kbss/termit/util/throttle/CacheableFuture.java b/src/main/java/cz/cvut/kbss/termit/util/throttle/CacheableFuture.java index f1dd254a5..b6afe3872 100644 --- a/src/main/java/cz/cvut/kbss/termit/util/throttle/CacheableFuture.java +++ b/src/main/java/cz/cvut/kbss/termit/util/throttle/CacheableFuture.java @@ -11,7 +11,7 @@ * A future which can provide a cached result before its completion. * @see Future */ -public interface CacheableFuture extends ChainableFuture { +public interface CacheableFuture extends Future { /** * @return the cached result when available diff --git a/src/main/java/cz/cvut/kbss/termit/util/throttle/ChainableFuture.java b/src/main/java/cz/cvut/kbss/termit/util/throttle/ChainableFuture.java index 0d8b63d6c..12d2b915e 100644 --- a/src/main/java/cz/cvut/kbss/termit/util/throttle/ChainableFuture.java +++ b/src/main/java/cz/cvut/kbss/termit/util/throttle/ChainableFuture.java @@ -3,14 +3,15 @@ import java.util.concurrent.Future; import java.util.function.Consumer; -public interface ChainableFuture extends Future { +public interface ChainableFuture> extends Future { /** - * Executes this action once the future is completed normally. - * Action is not executed on exceptional completion. + * Executes this action once the future is completed. + * Action is executed no matter if the future is completed successfully, exceptionally or cancelled. *

- * If the future is already completed, action is executed synchronously. - * @param action action to be executed + * If the future is already completed, it is executed synchronously. + * @param action action receiving this future after completion + * @return this future */ - ChainableFuture then(Consumer action); + ChainableFuture then(Consumer action); } diff --git a/src/main/java/cz/cvut/kbss/termit/util/throttle/ThrottledFuture.java b/src/main/java/cz/cvut/kbss/termit/util/throttle/ThrottledFuture.java index e6e492474..b32e6e095 100644 --- a/src/main/java/cz/cvut/kbss/termit/util/throttle/ThrottledFuture.java +++ b/src/main/java/cz/cvut/kbss/termit/util/throttle/ThrottledFuture.java @@ -1,6 +1,5 @@ package cz.cvut.kbss.termit.util.throttle; -import cz.cvut.kbss.termit.exception.TermItException; import cz.cvut.kbss.termit.util.Utils; import cz.cvut.kbss.termit.util.longrunning.LongRunningTask; import jakarta.annotation.Nonnull; @@ -20,7 +19,7 @@ import java.util.function.Consumer; import java.util.function.Supplier; -public class ThrottledFuture implements CacheableFuture, LongRunningTask { +public class ThrottledFuture implements CacheableFuture, ChainableFuture>, LongRunningTask { private final ReentrantLock lock = new ReentrantLock(); private final ReentrantLock callbackLock = new ReentrantLock(); @@ -33,7 +32,7 @@ public class ThrottledFuture implements CacheableFuture, LongRunningTask { private @Nullable Supplier task; - private final List> onCompletion = new ArrayList<>(); + private final List>> onCompletion = new ArrayList<>(); private final AtomicReference startedAt = new AtomicReference<>(null); @@ -90,7 +89,16 @@ public ThrottledFuture setCachedResult(@Nullable final T cachedResult) { @Override public boolean cancel(boolean mayInterruptIfRunning) { - return future.cancel(mayInterruptIfRunning); + if(!future.cancel(mayInterruptIfRunning)) { + return false; + } + + if (task != null) { + callbackLock.lock(); + onCompletion.forEach(c -> c.accept(this)); + callbackLock.unlock(); + } + return true; } @Override @@ -124,7 +132,7 @@ public T get(long timeout, @Nonnull TimeUnit unit) * @return If the current task is already running, was canceled or already completed, returns a new future for the given task. * Otherwise, replaces the current task and returns self. */ - protected ThrottledFuture update(Supplier task, @Nonnull List> onCompletion) { + protected ThrottledFuture update(Supplier task, @Nonnull List>> onCompletion) { boolean locked = false; try { locked = lock.tryLock(); @@ -201,14 +209,16 @@ protected void run(@Nullable Consumer> startedCallback) { T result = null; if (task != null) { result = task.get(); - final T finalResult = result; - callbackLock.lock(); - onCompletion.forEach(c -> c.accept(finalResult)); - callbackLock.unlock(); } future.complete(result); } catch (Exception e) { future.completeExceptionally(e); + } finally { + if (task != null) { + callbackLock.lock(); + onCompletion.forEach(c -> c.accept(this)); + callbackLock.unlock(); + } } } finally { if (locked) { @@ -242,18 +252,11 @@ public boolean isRunning() { } @Override - public ThrottledFuture then(Consumer action) { + public ThrottledFuture then(Consumer> action) { try { callbackLock.lock(); - if (future.isDone() && !future.isCancelled() && !future.isCompletedExceptionally()) { - try { - action.accept(future.get()); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new TermItException(e); - } catch (ExecutionException e) { - throw new TermItException(e); - } + if (future.isDone()) { + action.accept(this); } else { onCompletion.add(action); } diff --git a/src/main/java/cz/cvut/kbss/termit/websocket/VocabularySocketController.java b/src/main/java/cz/cvut/kbss/termit/websocket/VocabularySocketController.java index 00c2e8b83..f244358ed 100644 --- a/src/main/java/cz/cvut/kbss/termit/websocket/VocabularySocketController.java +++ b/src/main/java/cz/cvut/kbss/termit/websocket/VocabularySocketController.java @@ -11,7 +11,7 @@ import cz.cvut.kbss.termit.service.business.VocabularyService; import cz.cvut.kbss.termit.util.Configuration; import cz.cvut.kbss.termit.util.Constants; -import cz.cvut.kbss.termit.util.throttle.CacheableFuture; +import cz.cvut.kbss.termit.util.throttle.ThrottledFuture; import jakarta.annotation.Nonnull; import org.springframework.context.event.EventListener; import org.springframework.messaging.MessageHeaders; @@ -53,7 +53,7 @@ public void validateVocabulary(@DestinationVariable String localName, final URI identifier = resolveIdentifier(namespace.orElse(config.getNamespace().getVocabulary()), localName); final Vocabulary vocabulary = vocabularyService.getReference(identifier); - final CacheableFuture> future = vocabularyService.validateContents(vocabulary.getUri()); + final ThrottledFuture> future = vocabularyService.validateContents(vocabulary.getUri()); future.getNow().ifPresentOrElse(validationResults -> // if there is a result present (returned from cache), send it @@ -66,14 +66,15 @@ public void validateVocabulary(@DestinationVariable String localName, messageHeaders ), () -> // otherwise reply will be sent once the future is resolved - future.then(results -> + future.then(completedFuture -> + completedFuture.getNow().ifPresent(results -> sendToSession( WebSocketDestinations.VOCABULARIES_VALIDATION, results, getHeaders(identifier, Map.of("cached", false)), messageHeaders - )) + ))) ); } diff --git a/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java b/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java index 039a2e3d8..adf349a03 100644 --- a/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java +++ b/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java @@ -299,8 +299,8 @@ void transferUpdatesSecondFutureWithTask() { @Test void transferUpdatesSecondFutureWithCallbacks() { - final Consumer firstCallback = (result) -> {}; - final Consumer secondCallback = (result) -> {}; + final Consumer> firstCallback = (result) -> {}; + final Consumer> secondCallback = (result) -> {}; final ThrottledFuture firstFuture = ThrottledFuture.of(()->"").then(firstCallback); final ThrottledFuture secondFuture = ThrottledFuture.of(()->"").then(secondCallback); final ThrottledFuture mocked = mock(ThrottledFuture.class); @@ -323,14 +323,14 @@ void transferUpdatesSecondFutureWithCallbacks() { @Test void callbacksAreClearedAfterTransferring() { - final Consumer firstCallback = (result) -> {}; - final Consumer secondCallback = (result) -> {}; + final Consumer> firstCallback = (result) -> {}; + final Consumer> secondCallback = (result) -> {}; final ThrottledFuture future = ThrottledFuture.of(()->"").then(firstCallback).then(secondCallback); final ThrottledFuture mocked = mock(ThrottledFuture.class); future.transfer(mocked); - final ArgumentCaptor>> captor = ArgumentCaptor.forClass(List.class); + final ArgumentCaptor>>> captor = ArgumentCaptor.forClass(List.class); verify(mocked).update(notNull(), captor.capture()); // captor takes the original list from the future @@ -386,8 +386,8 @@ void updateSetsTask() { @Test void updateAddsCallbacksToTheCurrentOnes() { - final Consumer callback = result -> {}; - final Consumer originalCallback = result -> {}; + final Consumer> callback = result -> {}; + final Consumer> originalCallback = result -> {}; final ThrottledFuture future = ThrottledFuture.of(() -> "").then(originalCallback); future.update(()->"", List.of(callback)); From 3f938ecd48b26a01ec7cbeb1153fe3a341f049a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Wed, 20 Nov 2024 14:01:23 +0100 Subject: [PATCH 35/49] [Bug #314] Update tests for ThrottledFuture#then accepting the completed future --- .../util/throttle/ThrottledFutureTest.java | 22 +++++++++---------- 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java b/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java index adf349a03..e3fc38c53 100644 --- a/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java +++ b/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java @@ -85,8 +85,7 @@ void getNowReturnsEmptyWhenCacheIsNull() { @Test void thenActionIsExecutedSynchronouslyWhenFutureIsAlreadyDoneAndNotCanceled() { - final Object result = new Object(); - final ThrottledFuture future = ThrottledFuture.of(() -> result); + final ThrottledFuture future = ThrottledFuture.of(() -> null); final AtomicBoolean completed = new AtomicBoolean(false); final AtomicReference futureResult = new AtomicReference<>(null); future.run(null); @@ -97,25 +96,24 @@ void thenActionIsExecutedSynchronouslyWhenFutureIsAlreadyDoneAndNotCanceled() { futureResult.set(fResult); }); assertTrue(completed.get()); - assertEquals(result, futureResult.get()); + assertEquals(future, futureResult.get()); } @Test - void thenActionIsNotExecutedWhenFutureIsAlreadyCancelled() { + void thenActionIsExecutedWhenFutureIsAlreadyCancelled() { final ThrottledFuture future = ThrottledFuture.of(Object::new); final AtomicBoolean completed = new AtomicBoolean(false); future.cancel(false); assertTrue(future.isCancelled()); future.then(result -> completed.set(true)); - assertFalse(completed.get()); + assertTrue(completed.get()); } @Test void thenActionIsExecutedOnceFutureIsRun() { - final Object result = new Object(); final AtomicBoolean completed = new AtomicBoolean(false); final AtomicReference fResult = new AtomicReference<>(null); - final ThrottledFuture future = ThrottledFuture.of(() -> result); + final ThrottledFuture future = ThrottledFuture.of(() -> null); future.then(futureResult -> { completed.set(true); fResult.set(futureResult); @@ -124,22 +122,22 @@ void thenActionIsExecutedOnceFutureIsRun() { assertFalse(completed.get()); // action was not executed yet future.run(null); assertTrue(completed.get()); - assertEquals(result, fResult.get()); + assertEquals(future, fResult.get()); } @Test - void thenActionIsNotExecutedOnceFutureIsCancelled() { + void thenActionIsExecutedOnceFutureIsCancelled() { final Object result = new Object(); final AtomicBoolean completed = new AtomicBoolean(false); final ThrottledFuture future = ThrottledFuture.of(() -> result); future.then(futureResult -> completed.set(true)); assertFalse(completed.get()); // action was not executed yet future.cancel(false); - assertFalse(completed.get()); + assertTrue(completed.get()); } @Test - void thenActionIsNotExecutedWhenFutureCompletedExceptionally() { + void thenActionIsExecutedWhenFutureCompletedExceptionally() { final AtomicBoolean completed = new AtomicBoolean(false); final ThrottledFuture future = ThrottledFuture.of(() -> { throw new RuntimeException(); @@ -147,7 +145,7 @@ void thenActionIsNotExecutedWhenFutureCompletedExceptionally() { future.run(null); assertFalse(completed.get()); future.then(futureResult -> completed.set(true)); - assertFalse(completed.get()); + assertTrue(completed.get()); } @Test From 8315b90bb986da00d01d3c68fb87acfacc31cb4d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Wed, 20 Nov 2024 16:47:45 +0100 Subject: [PATCH 36/49] [Bug #314] Add tests ensuring proper ThrottledFuture#then callbacks execution when future is cancelled. --- .../termit/util/throttle/ChainableFuture.java | 2 + .../termit/util/throttle/ThrottledFuture.java | 5 +- .../util/throttle/ThrottledFutureTest.java | 54 ++++++++++++++++++- 3 files changed, 58 insertions(+), 3 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/util/throttle/ChainableFuture.java b/src/main/java/cz/cvut/kbss/termit/util/throttle/ChainableFuture.java index 12d2b915e..831f00d52 100644 --- a/src/main/java/cz/cvut/kbss/termit/util/throttle/ChainableFuture.java +++ b/src/main/java/cz/cvut/kbss/termit/util/throttle/ChainableFuture.java @@ -10,6 +10,8 @@ public interface ChainableFuture> extends Fut * Action is executed no matter if the future is completed successfully, exceptionally or cancelled. *

* If the future is already completed, it is executed synchronously. + *

+ * Note that you must use the future passed as the parameter and not the original future object. * @param action action receiving this future after completion * @return this future */ diff --git a/src/main/java/cz/cvut/kbss/termit/util/throttle/ThrottledFuture.java b/src/main/java/cz/cvut/kbss/termit/util/throttle/ThrottledFuture.java index b32e6e095..045d06cdf 100644 --- a/src/main/java/cz/cvut/kbss/termit/util/throttle/ThrottledFuture.java +++ b/src/main/java/cz/cvut/kbss/termit/util/throttle/ThrottledFuture.java @@ -89,11 +89,12 @@ public ThrottledFuture setCachedResult(@Nullable final T cachedResult) { @Override public boolean cancel(boolean mayInterruptIfRunning) { + final boolean wasCanceled = isCancelled(); if(!future.cancel(mayInterruptIfRunning)) { return false; } - if (task != null) { + if (!wasCanceled && task != null) { callbackLock.lock(); onCompletion.forEach(c -> c.accept(this)); callbackLock.unlock(); @@ -268,7 +269,7 @@ public ThrottledFuture then(Consumer> action) { /** * @return {@code true} if this future completed - * exceptionally + * exceptionally or was cancelled. */ public boolean isCompletedExceptionally() { return future.isCompletedExceptionally(); diff --git a/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java b/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java index e3fc38c53..b051471ab 100644 --- a/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java +++ b/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java @@ -137,7 +137,59 @@ void thenActionIsExecutedOnceFutureIsCancelled() { } @Test - void thenActionIsExecutedWhenFutureCompletedExceptionally() { + void thenActionIsExecutedOnlyOnceWhenFutureIsCancelled() { + final AtomicInteger executionCount = new AtomicInteger(0); + final ThrottledFuture future = ThrottledFuture.of(() -> null); + future.then(f -> executionCount.incrementAndGet()); + assertEquals(0, executionCount.get()); + future.cancel(false); + assertEquals(1, executionCount.get()); + future.cancel(false); + future.cancel(true); + assertEquals(1, executionCount.get()); + } + + @Test + void thenActionIsExecutedWhenFutureCompletesExceptionally() { + final AtomicBoolean completed = new AtomicBoolean(false); + final ThrottledFuture future = ThrottledFuture.of(() -> { + throw new RuntimeException(); + }); + future.then(futureResult -> completed.set(true)); + assertFalse(completed.get()); + future.run(null); + assertTrue(completed.get()); + } + + @Test + void isCompletedExceptionallyReturnsTrueWhenFutureCompletesExceptionally() { + final ThrottledFuture future = ThrottledFuture.of(() -> { + throw new RuntimeException(); + }); + future.run(null); + assertTrue(future.isCompletedExceptionally()); + } + + @Test + void isCompletedExceptionallyReturnsFalseWhenFutureCompletesNormally() { + final ThrottledFuture future = ThrottledFuture.of(() -> null); + future.run(null); + assertFalse(future.isCompletedExceptionally()); + assertFalse(future.isCancelled()); + assertTrue(future.isDone()); + } + + @Test + void isCompletedExceptionallyReturnsTrueWhenFutureIsCancelled() { + final ThrottledFuture future = ThrottledFuture.of(() -> null); + future.cancel(false); + assertTrue(future.isCompletedExceptionally()); + assertTrue(future.isCancelled()); + assertTrue(future.isDone()); + } + + @Test + void thenActionIsExecutedWhenFutureIsAlreadyCompletedExceptionally() { final AtomicBoolean completed = new AtomicBoolean(false); final ThrottledFuture future = ThrottledFuture.of(() -> { throw new RuntimeException(); From 58302d9a686d997a9931c64ecf16f35d837dd8fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Sun, 24 Nov 2024 11:22:43 +0100 Subject: [PATCH 37/49] [Enhancement kbss-cvut/termit-ui#520] Use constants for api description in TermController & add constructors for ChangeRecordFilterDto --- .../dto/filter/ChangeRecordFilterDto.java | 16 +++++++++++++++ .../cvut/kbss/termit/rest/TermController.java | 20 +++++++------------ .../termit/rest/VocabularyController.java | 11 ++-------- 3 files changed, 25 insertions(+), 22 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/dto/filter/ChangeRecordFilterDto.java b/src/main/java/cz/cvut/kbss/termit/dto/filter/ChangeRecordFilterDto.java index e3b00750f..f646587b4 100644 --- a/src/main/java/cz/cvut/kbss/termit/dto/filter/ChangeRecordFilterDto.java +++ b/src/main/java/cz/cvut/kbss/termit/dto/filter/ChangeRecordFilterDto.java @@ -15,6 +15,22 @@ public class ChangeRecordFilterDto { private String authorName = ""; private URI changeType = null; + public ChangeRecordFilterDto() { + } + + public ChangeRecordFilterDto(String changedAttributeName, String authorName, URI changeType) { + this.changedAttributeName = changedAttributeName; + this.authorName = authorName; + this.changeType = changeType; + } + + public ChangeRecordFilterDto(String assetLabel, String changedAttributeName, String authorName, URI changeType) { + this.assetLabel = assetLabel; + this.changedAttributeName = changedAttributeName; + this.authorName = authorName; + this.changeType = changeType; + } + public String getAssetLabel() { return assetLabel; } diff --git a/src/main/java/cz/cvut/kbss/termit/rest/TermController.java b/src/main/java/cz/cvut/kbss/termit/rest/TermController.java index 250c6c0be..50bf8bcf6 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/TermController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/TermController.java @@ -700,16 +700,13 @@ public List getHistory( @Parameter(description = ApiDoc.ID_NAMESPACE_DESCRIPTION, example = ApiDoc.ID_NAMESPACE_EXAMPLE) @RequestParam(name = QueryParams.NAMESPACE, required = false) Optional namespace, @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGE_TYPE_DESCRIPTION) - @RequestParam(name = "type", required = false) URI changeType, + @RequestParam(name = "changeType", required = false) URI changeType, @Parameter(description = ChangeRecordFilterDto.ApiDoc.AUTHOR_NAME_DESCRIPTION) @RequestParam(name = "author", required = false, defaultValue = "") String authorName, @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGED_ATTRIBUTE_DESCRIPTION) @RequestParam(name = "attribute", required = false, defaultValue = "") String changedAttributeName) { final URI termUri = getTermUri(localName, termLocalName, namespace); - final ChangeRecordFilterDto filterDto = new ChangeRecordFilterDto(); - filterDto.setChangeType(changeType); - filterDto.setAuthorName(authorName); - filterDto.setChangedAttributeName(changedAttributeName); + final ChangeRecordFilterDto filterDto = new ChangeRecordFilterDto(changedAttributeName, authorName, changeType); return termService.getChanges(termService.findRequired(termUri), filterDto); } @@ -734,19 +731,16 @@ public List getHistory(@Parameter(description = ApiDoc.ID_ @Parameter(description = ApiDoc.ID_STANDALONE_NAMESPACE_DESCRIPTION, example = ApiDoc.ID_STANDALONE_NAMESPACE_EXAMPLE) @RequestParam(name = QueryParams.NAMESPACE) String namespace, - @Parameter(description = "Change type used for filtering.") - @RequestParam(name = "type", required = false) URI changeType, - @Parameter(description = "Author name used for filtering.") + @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGE_TYPE_DESCRIPTION) + @RequestParam(name = "changeType", required = false) URI changeType, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.AUTHOR_NAME_DESCRIPTION) @RequestParam(name = "author", required = false, defaultValue = "") String authorName, - @Parameter(description = "Changed attribute name used for filtering.") + @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGED_ATTRIBUTE_DESCRIPTION) @RequestParam(name = "attribute", required = false, defaultValue = "") String changedAttributeName) { final URI termUri = idResolver.resolveIdentifier(namespace, localName); - final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); - filter.setChangeType(changeType); - filter.setAuthorName(authorName); - filter.setChangedAttributeName(changedAttributeName); + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(changedAttributeName, authorName, changeType); return termService.getChanges(termService.findRequired(termUri), filter); } diff --git a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java index f59ad8384..d66524486 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java @@ -272,10 +272,7 @@ public List getHistory( @RequestParam(name = "attribute", required = false, defaultValue = "") String changedAttributeName) { final Vocabulary vocabulary = vocabularyService.getReference( resolveVocabularyUri(localName, namespace)); - final ChangeRecordFilterDto filterDto = new ChangeRecordFilterDto(); - filterDto.setChangeType(changeType); - filterDto.setAuthorName(authorName); - filterDto.setChangedAttributeName(changedAttributeName); + final ChangeRecordFilterDto filterDto = new ChangeRecordFilterDto(changedAttributeName, authorName, changeType); return vocabularyService.getChanges(vocabulary, filterDto); } @@ -330,11 +327,7 @@ public List getDetailedHistoryOfContent( name = Constants.QueryParams.PAGE, required = false, defaultValue = DEFAULT_PAGE) Integer pageNo) { final Pageable pageReq = createPageRequest(pageSize, pageNo); final Vocabulary vocabulary = vocabularyService.getReference(resolveVocabularyUri(localName, namespace)); - final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); - filter.setAssetLabel(termName); - filter.setChangeType(changeType); - filter.setAuthorName(authorName); - filter.setChangedAttributeName(changedAttributeName); + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(termName, changedAttributeName, authorName, changeType); return vocabularyService.getDetailedHistoryOfContent(vocabulary, filter, pageReq); } From ab074859fdc3e9e0f6c246b065c2fb145938542d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Ka=C5=88ka?= Date: Sun, 24 Nov 2024 11:33:46 +0100 Subject: [PATCH 38/49] [Enhancement kbss-cvut/termit-ui#520] Replace System#currentTimeMillis with Utils#timestamp --- .../persistence/dao/changetracking/ChangeRecordDaoTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java index 4fdee2b5f..2aaffee88 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java @@ -132,7 +132,7 @@ void findAllRetrievesChangeRecordsRelatedToSpecifiedAsset() { em.persist(asset, persistDescriptor(vocabulary.getUri())); }); final List records = IntStream.range(0, 5).mapToObj( - i -> generateUpdateRecord(Instant.ofEpochMilli(System.currentTimeMillis() - i * 10000L), + i -> generateUpdateRecord(Utils.timestamp().minusSeconds(i * 10L), asset.getUri())).collect(Collectors.toList()); final URI changeContext = contextResolver.resolveChangeTrackingContext(vocabulary); transactional(() -> records.forEach(r -> em.persist(r, persistDescriptor(changeContext)))); @@ -154,7 +154,7 @@ void findAllReturnsChangeRecordsOrderedByTimestampDescending() { enableRdfsInference(em); final Term asset = Generator.generateTermWithId(vocabulary.getUri()); final List records = IntStream.range(0, 5).mapToObj( - i -> generateUpdateRecord(Instant.ofEpochMilli(System.currentTimeMillis() + i * 10000L), + i -> generateUpdateRecord(Utils.timestamp().plusSeconds(i * 10L), asset.getUri())).collect(Collectors.toList()); final URI changeContext = contextResolver.resolveChangeTrackingContext(vocabulary); transactional(() -> { From 2c9568af1bd90272c8f809e1a2c0f71856859960 Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Mon, 25 Nov 2024 10:26:14 +0100 Subject: [PATCH 39/49] [kbss-cvut/termit-ui#571] Authorize TermOccurrence modifications (approval, removal, etc.). --- .../kbss/termit/persistence/dao/TermDao.java | 21 +++-- .../termit/rest/TermOccurrenceController.java | 3 - .../TermOccurrenceRepositoryService.java | 5 ++ .../repository/TermRepositoryService.java | 11 +++ .../ResourceAuthorizationService.java | 7 +- .../TermOccurrenceAuthorizationService.java | 61 +++++++++++++ ...ermOccurrenceAuthorizationServiceTest.java | 86 +++++++++++++++++++ 7 files changed, 181 insertions(+), 13 deletions(-) create mode 100644 src/main/java/cz/cvut/kbss/termit/service/security/authorization/TermOccurrenceAuthorizationService.java create mode 100644 src/test/java/cz/cvut/kbss/termit/service/security/authorization/TermOccurrenceAuthorizationServiceTest.java diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/TermDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/TermDao.java index 47180aa51..999c2d4c4 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/TermDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/TermDao.java @@ -91,8 +91,10 @@ protected URI labelProperty() { @Override public Optional find(URI id) { try { - final Optional result = resolveVocabularyId(id).map(vocabulary -> - em.find(Term.class, id, descriptorFactory.termDescriptor(vocabulary))); + final Optional result = findTermVocabulary(id).map(vocabulary -> + em.find(Term.class, id, + descriptorFactory.termDescriptor( + vocabulary))); result.ifPresent(this::postLoad); return result; } catch (RuntimeException e) { @@ -100,12 +102,19 @@ public Optional find(URI id) { } } - private Optional resolveVocabularyId(URI termId) { + /** + * Finds vocabulary to which a term with the specified id belongs. + * + * @param termId Term identifier + * @return Vocabulary identifier wrapped in {@code Optional} + */ + public Optional findTermVocabulary(URI termId) { + Objects.requireNonNull(termId); try { return Optional.of(em.createNativeQuery("SELECT DISTINCT ?v WHERE { ?t ?inVocabulary ?v . }", URI.class) - .setParameter("t", termId) - .setParameter("inVocabulary", TERM_FROM_VOCABULARY) - .getSingleResult()); + .setParameter("t", termId) + .setParameter("inVocabulary", TERM_FROM_VOCABULARY) + .getSingleResult()); } catch (NoResultException | NoUniqueResultException e) { return Optional.empty(); } diff --git a/src/main/java/cz/cvut/kbss/termit/rest/TermOccurrenceController.java b/src/main/java/cz/cvut/kbss/termit/rest/TermOccurrenceController.java index ddfa5057a..7fe2bc420 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/TermOccurrenceController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/TermOccurrenceController.java @@ -76,7 +76,6 @@ public TermOccurrenceController(IdentifierResolver idResolver, Configuration con }) @PutMapping(consumes = {JsonLd.MEDIA_TYPE, MediaType.APPLICATION_JSON_VALUE}) @ResponseStatus(HttpStatus.NO_CONTENT) - @PreAuthorize("hasRole('" + SecurityConstants.ROLE_FULL_USER + "')") public void saveOccurrence(@Parameter(description = "Term occurrence to save") @RequestBody TermOccurrence occurrence) { occurrenceService.persistOrUpdate(occurrence); @@ -91,7 +90,6 @@ public void saveOccurrence(@Parameter(description = "Term occurrence to save") }) @PutMapping(value = "/{localName}") @ResponseStatus(HttpStatus.ACCEPTED) - @PreAuthorize("hasRole('" + SecurityConstants.ROLE_FULL_USER + "')") public void approveOccurrence( @Parameter(description = TermOccurrenceControllerDoc.ID_LOCAL_NAME_DESCRIPTION, example = TermOccurrenceControllerDoc.ID_LOCAL_NAME_EXAMPLE) @@ -113,7 +111,6 @@ public void approveOccurrence( }) @DeleteMapping(value = "/{localName}") @ResponseStatus(HttpStatus.NO_CONTENT) - @PreAuthorize("hasRole('" + SecurityConstants.ROLE_FULL_USER + "')") public void removeOccurrence(@Parameter(description = TermOccurrenceControllerDoc.ID_LOCAL_NAME_DESCRIPTION, example = TermOccurrenceControllerDoc.ID_LOCAL_NAME_EXAMPLE) @PathVariable String localName, diff --git a/src/main/java/cz/cvut/kbss/termit/service/repository/TermOccurrenceRepositoryService.java b/src/main/java/cz/cvut/kbss/termit/service/repository/TermOccurrenceRepositoryService.java index 88940766e..104cac82a 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/repository/TermOccurrenceRepositoryService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/repository/TermOccurrenceRepositoryService.java @@ -30,6 +30,7 @@ import org.springframework.retry.annotation.Retryable; import org.springframework.scheduling.annotation.Async; import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -58,6 +59,7 @@ public TermOccurrenceRepositoryService(TermOccurrenceDao termOccurrenceDao, Term this.resourceService = resourceService; } + @PreAuthorize("@termOccurrenceAuthorizationService.canModify(#occurrence)") @Transactional @Override public void persist(TermOccurrence occurrence) { @@ -78,6 +80,7 @@ private void checkTermExists(TermOccurrence occurrence) { } } + @PreAuthorize("@termOccurrenceAuthorizationService.canModify(#occurrence)") @Transactional @Override public void persistOrUpdate(TermOccurrence occurrence) { @@ -95,6 +98,7 @@ public void persistOrUpdate(TermOccurrence occurrence) { } } + @PreAuthorize("@termOccurrenceAuthorizationService.canModify(#occurrenceId)") @Async // Retry in case the occurrence has not been persisted, yet (see AsynchronousTermOccurrenceSaver) @Retryable(retryFor = NotFoundException.class, maxAttempts = 3, backoff = @Backoff(delay = 30000L)) @@ -108,6 +112,7 @@ public void approve(URI occurrenceId) { toApprove.markApproved(); } + @PreAuthorize("@termOccurrenceAuthorizationService.canModify(#occurrenceId)") @Transactional @Override public void remove(URI occurrenceId) { diff --git a/src/main/java/cz/cvut/kbss/termit/service/repository/TermRepositoryService.java b/src/main/java/cz/cvut/kbss/termit/service/repository/TermRepositoryService.java index 15b11b1f8..488b3edb3 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/repository/TermRepositoryService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/repository/TermRepositoryService.java @@ -392,6 +392,17 @@ public List getDefinitionallyRelatedOf(Term instance) { return termOccurrenceDao.findAllDefinitionalOf(instance); } + /** + * Gets the identifier of a vocabulary to which a term with the specified id belongs. + * + * @param termId Term identifier + * @return Vocabulary identifier wrapped in {@code Optional} + */ + @Transactional(readOnly = true) + public Optional findTermVocabulary(URI termId) { + return termDao.findTermVocabulary(termId); + } + /** * Checks that a term can be removed. *

diff --git a/src/main/java/cz/cvut/kbss/termit/service/security/authorization/ResourceAuthorizationService.java b/src/main/java/cz/cvut/kbss/termit/service/security/authorization/ResourceAuthorizationService.java index 631790307..f0152280a 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/security/authorization/ResourceAuthorizationService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/security/authorization/ResourceAuthorizationService.java @@ -49,11 +49,10 @@ public boolean canModify(Resource asset) { } private Optional resolveVocabulary(Resource resource) { - if (resource instanceof Document) { - final URI vocIri = ((Document) resource).getVocabulary(); + if (resource instanceof Document document) { + final URI vocIri = document.getVocabulary(); return vocIri != null ? Optional.of(new Vocabulary(vocIri)) : Optional.empty(); - } else if (resource instanceof File) { - final File f = (File) resource; + } else if (resource instanceof File f) { return f.getDocument() != null ? getDocumentVocabulary(f.getDocument()) : Optional.empty(); } return Optional.empty(); diff --git a/src/main/java/cz/cvut/kbss/termit/service/security/authorization/TermOccurrenceAuthorizationService.java b/src/main/java/cz/cvut/kbss/termit/service/security/authorization/TermOccurrenceAuthorizationService.java new file mode 100644 index 000000000..f3c063cd0 --- /dev/null +++ b/src/main/java/cz/cvut/kbss/termit/service/security/authorization/TermOccurrenceAuthorizationService.java @@ -0,0 +1,61 @@ +package cz.cvut.kbss.termit.service.security.authorization; + +import cz.cvut.kbss.termit.model.Vocabulary; +import cz.cvut.kbss.termit.model.assignment.TermDefinitionalOccurrence; +import cz.cvut.kbss.termit.model.assignment.TermFileOccurrence; +import cz.cvut.kbss.termit.model.assignment.TermOccurrence; +import cz.cvut.kbss.termit.model.resource.Resource; +import cz.cvut.kbss.termit.persistence.dao.TermOccurrenceDao; +import cz.cvut.kbss.termit.service.repository.ResourceRepositoryService; +import cz.cvut.kbss.termit.service.repository.TermRepositoryService; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import java.net.URI; +import java.util.Objects; +import java.util.Optional; + +@Service +public class TermOccurrenceAuthorizationService { + + private final TermOccurrenceDao dao; + + private final TermRepositoryService termService; + + private final ResourceRepositoryService resourceService; + + private final VocabularyAuthorizationService vocabularyAuthorizationService; + + private final ResourceAuthorizationService resourceAuthorizationService; + + public TermOccurrenceAuthorizationService(TermOccurrenceDao dao, TermRepositoryService termService, + ResourceRepositoryService resourceService, + VocabularyAuthorizationService vocabularyAuthorizationService, + ResourceAuthorizationService resourceAuthorizationService) { + this.dao = dao; + this.termService = termService; + this.resourceService = resourceService; + this.vocabularyAuthorizationService = vocabularyAuthorizationService; + this.resourceAuthorizationService = resourceAuthorizationService; + } + + @Transactional(readOnly = true) + public boolean canModify(TermOccurrence occurrence) { + Objects.requireNonNull(occurrence); + if (occurrence instanceof TermDefinitionalOccurrence definitionalOccurrence) { + final Optional vocabularyUri = termService.findTermVocabulary( + definitionalOccurrence.getTarget().getSource()); + return vocabularyUri.map(vUri -> vocabularyAuthorizationService.canModify(new Vocabulary(vUri))) + .orElse(false); + } else { + final TermFileOccurrence fo = (TermFileOccurrence) occurrence; + final Optional file = resourceService.find(fo.getTarget().getSource()); + return file.map(resourceAuthorizationService::canModify).orElse(false); + } + } + + @Transactional(readOnly = true) + public boolean canModify(URI occurrenceId) { + return dao.find(occurrenceId).map(this::canModify).orElse(false); + } +} diff --git a/src/test/java/cz/cvut/kbss/termit/service/security/authorization/TermOccurrenceAuthorizationServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/security/authorization/TermOccurrenceAuthorizationServiceTest.java new file mode 100644 index 000000000..360f73ff2 --- /dev/null +++ b/src/test/java/cz/cvut/kbss/termit/service/security/authorization/TermOccurrenceAuthorizationServiceTest.java @@ -0,0 +1,86 @@ +package cz.cvut.kbss.termit.service.security.authorization; + +import cz.cvut.kbss.termit.environment.Generator; +import cz.cvut.kbss.termit.model.Vocabulary; +import cz.cvut.kbss.termit.model.assignment.DefinitionalOccurrenceTarget; +import cz.cvut.kbss.termit.model.assignment.FileOccurrenceTarget; +import cz.cvut.kbss.termit.model.assignment.TermDefinitionalOccurrence; +import cz.cvut.kbss.termit.model.assignment.TermFileOccurrence; +import cz.cvut.kbss.termit.model.assignment.TermOccurrence; +import cz.cvut.kbss.termit.model.resource.File; +import cz.cvut.kbss.termit.persistence.dao.TermOccurrenceDao; +import cz.cvut.kbss.termit.service.repository.ResourceRepositoryService; +import cz.cvut.kbss.termit.service.repository.TermRepositoryService; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.net.URI; +import java.util.Optional; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +class TermOccurrenceAuthorizationServiceTest { + + @Mock + private TermOccurrenceDao toDao; + + @Mock + private TermRepositoryService termService; + + @Mock + private ResourceRepositoryService resourceService; + + @Mock + private VocabularyAuthorizationService vocabularyAuthorizationService; + + @Mock + private ResourceAuthorizationService resourceAuthorizationService; + + @InjectMocks + private TermOccurrenceAuthorizationService sut; + + @Test + void canModifyResolvesTermVocabularyAndChecksIfUserCanModifyItWhenTermOccurrenceIsDefinitional() { + final URI vocabularyUri = Generator.generateUri(); + final TermOccurrence to = new TermDefinitionalOccurrence(Generator.generateUri(), + new DefinitionalOccurrenceTarget( + Generator.generateTermWithId(vocabularyUri))); + to.setUri(Generator.generateUri()); + when(termService.findTermVocabulary(to.getTarget().getSource())).thenReturn(Optional.of(vocabularyUri)); + when(vocabularyAuthorizationService.canModify(new Vocabulary(vocabularyUri))).thenReturn(true); + when(toDao.find(to.getUri())).thenReturn(Optional.of(to)); + + assertTrue(sut.canModify(to.getUri())); + verify(vocabularyAuthorizationService).canModify(new Vocabulary(vocabularyUri)); + } + + @Test + void canModifyResolvesResourceVocabularyAndChecksIfUserCanModifyItWhenTermOccurrenceIsFileOccurrence() { + final URI vocabularyUri = Generator.generateUri(); + final File file = Generator.generateFileWithId("test.html"); + file.setDocument(Generator.generateDocumentWithId()); + file.getDocument().setVocabulary(vocabularyUri); + final TermOccurrence to = new TermFileOccurrence(Generator.generateUri(), new FileOccurrenceTarget(file)); + to.setUri(Generator.generateUri()); + when(resourceService.find(file.getUri())).thenReturn(Optional.of(file)); + when(resourceAuthorizationService.canModify(file)).thenReturn(true); + when(toDao.find(to.getUri())).thenReturn(Optional.of(to)); + + assertTrue(sut.canModify(to.getUri())); + verify(resourceAuthorizationService).canModify(file); + } + + @Test + void canModifyReturnsFalseWhenTermOccurrenceDoesNotExist() { + when(toDao.find(any())).thenReturn(Optional.empty()); + assertFalse(sut.canModify(Generator.generateUri())); + } +} From 991574e16ca38d12006834905544b8e38ca56fd7 Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Fri, 29 Nov 2024 13:05:43 +0100 Subject: [PATCH 40/49] [kbss-cvut/termit-ui#581] Modify REST API to support term translations import. --- .../termit/rest/VocabularyController.java | 36 +++++++++++------ .../service/business/VocabularyService.java | 13 +++++++ .../document/TermOccurrenceResolver.java | 3 +- .../VocabularyAuthorizationService.java | 6 +++ .../termit/rest/VocabularyControllerTest.java | 39 ++++++++++++++++--- 5 files changed, 77 insertions(+), 20 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java index f3416c040..9f8475ad0 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java @@ -235,12 +235,22 @@ public ResponseEntity createVocabulary( example = ApiDoc.ID_NAMESPACE_EXAMPLE) @RequestParam(name = QueryParams.NAMESPACE, required = false) Optional namespace, - @Parameter(description = "File containing a SKOS glossary in RDF.") - @RequestParam(name = "file") MultipartFile file) { + @Parameter( + description = "File containing a SKOS glossary in RDF or an Excel file with supported structure.") + @RequestParam(name = "file") MultipartFile file, + @Parameter(description = "Whether to import only translations of existing terms from the vocabulary.") + @RequestParam(name = "translationsOnly", required = false, + defaultValue = "false") boolean translationsOnly) { final URI vocabularyIri = resolveVocabularyUri(localName, namespace); - final Vocabulary vocabulary = vocabularyService.importVocabulary(vocabularyIri, file); - LOG.debug("Vocabulary {} re-imported.", vocabulary); - return ResponseEntity.created(locationWithout(generateLocation(vocabulary.getUri()), "/import/" + localName)) + final Vocabulary result; + if (translationsOnly) { + result = vocabularyService.importTermTranslations(vocabularyIri, file); + LOG.debug("Translations of terms in vocabulary {} imported.", result); + } else { + result = vocabularyService.importVocabulary(vocabularyIri, file); + LOG.debug("Vocabulary {} re-imported.", result); + } + return ResponseEntity.created(locationWithout(generateLocation(result.getUri()), "/import/" + localName)) .build(); } @@ -310,13 +320,14 @@ public List getDetailedHistoryOfContent( example = ApiDoc.ID_NAMESPACE_EXAMPLE) @RequestParam(name = QueryParams.NAMESPACE, required = false) Optional namespace, @Parameter(description = ChangeRecordFilterDto.ApiDoc.TERM_NAME_DESCRIPTION) @RequestParam(name = "term", - required = false, - defaultValue = "") String termName, + required = false, + defaultValue = "") String termName, @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGE_TYPE_DESCRIPTION) @RequestParam(name = "type", - required = false) URI changeType, - @Parameter(description = ChangeRecordFilterDto.ApiDoc.AUTHOR_NAME_DESCRIPTION) @RequestParam(name = "author", - required = false, - defaultValue = "") String authorName, + required = false) URI changeType, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.AUTHOR_NAME_DESCRIPTION) @RequestParam( + name = "author", + required = false, + defaultValue = "") String authorName, @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGED_ATTRIBUTE_DESCRIPTION) @RequestParam( name = "attribute", required = false, defaultValue = "") String changedAttributeName, @@ -327,7 +338,8 @@ public List getDetailedHistoryOfContent( name = Constants.QueryParams.PAGE, required = false, defaultValue = DEFAULT_PAGE) Integer pageNo) { final Pageable pageReq = createPageRequest(pageSize, pageNo); final Vocabulary vocabulary = vocabularyService.getReference(resolveVocabularyUri(localName, namespace)); - final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(termName, changedAttributeName, authorName, changeType); + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(termName, changedAttributeName, authorName, + changeType); return vocabularyService.getDetailedHistoryOfContent(vocabulary, filter, pageReq); } diff --git a/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java b/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java index ca2e75465..e697b9498 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java @@ -281,6 +281,19 @@ public Vocabulary importVocabulary(URI vocabularyIri, MultipartFile file) { return repositoryService.importVocabulary(vocabularyIri, file); } + /** + * Imports translations of terms in the specified vocabulary from the specified file. + * @param vocabularyIri IRI of vocabulary for whose terms to import translations + * @param file File from which to import the translations + * @return The imported vocabulary metadata + * @throws cz.cvut.kbss.termit.exception.importing.VocabularyImportException If the import fails + */ + @PreAuthorize("@vocabularyAuthorizationService.canModify(#vocabularyIri)") + public Vocabulary importTermTranslations(URI vocabularyIri, MultipartFile file) { + // TODO + return null; + } + /** * Gets an Excel template file that can be used to import terms into TermIt. * diff --git a/src/main/java/cz/cvut/kbss/termit/service/document/TermOccurrenceResolver.java b/src/main/java/cz/cvut/kbss/termit/service/document/TermOccurrenceResolver.java index 616c0707d..6cb6d66ec 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/document/TermOccurrenceResolver.java +++ b/src/main/java/cz/cvut/kbss/termit/service/document/TermOccurrenceResolver.java @@ -31,7 +31,6 @@ import java.net.URI; import java.util.Collections; import java.util.List; -import java.util.function.Consumer; /** * Base class for resolving term occurrences in an annotated document. @@ -50,7 +49,7 @@ protected TermOccurrenceResolver(TermRepositoryService termService) { * Parses the specified input into some abstract representation from which new terms and term occurrences can be * extracted. *

- * Note that this method has to be called before calling {@link #findTermOccurrences(Consumer)}. + * Note that this method has to be called before calling {@link #findTermOccurrences(OccurrenceConsumer)}. * * @param input The input to parse * @param source Original source of the input. Used for term occurrence generation diff --git a/src/main/java/cz/cvut/kbss/termit/service/security/authorization/VocabularyAuthorizationService.java b/src/main/java/cz/cvut/kbss/termit/service/security/authorization/VocabularyAuthorizationService.java index 37f99ff3d..777f7413c 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/security/authorization/VocabularyAuthorizationService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/security/authorization/VocabularyAuthorizationService.java @@ -165,6 +165,12 @@ public boolean canRead(VocabularyDto dto) { return canRead(new Vocabulary(dto.getUri())); } + public boolean canModify(URI vocabularyIri) { + Objects.requireNonNull(vocabularyIri); + final Vocabulary vocabulary = new Vocabulary(vocabularyIri); + return canModify(vocabulary); + } + @Override public boolean canModify(Vocabulary asset) { Objects.requireNonNull(asset); diff --git a/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java b/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java index 119681497..0a5a6b9e1 100644 --- a/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java +++ b/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java @@ -442,7 +442,7 @@ void getHistoryReturnsListOfChangeRecordsForSpecifiedVocabulary() throws Excepti }); assertNotNull(result); assertEquals(records, result); - verify(serviceMock).getChanges(vocabulary,emptyFilter); + verify(serviceMock).getChanges(vocabulary, emptyFilter); } @Test @@ -653,15 +653,18 @@ void getDetailedHistoryOfContentReturnsListOfChangeRecordsWhenNoFilterIsSpecifie final int pageSize = Integer.parseInt(VocabularyController.DEFAULT_PAGE_SIZE); final Vocabulary vocabulary = generateVocabularyAndInitReferenceResolution(); final Term term = Generator.generateTermWithId(); - final List changeRecords = IntStream.range(0, 5).mapToObj(i -> Generator.generateChangeRecords(term, user)).flatMap(List::stream).toList(); + final List changeRecords = IntStream.range(0, 5).mapToObj( + i -> Generator.generateChangeRecords(term, user)).flatMap(List::stream).toList(); final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); final Pageable pageable = Pageable.ofSize(pageSize); doReturn(changeRecords).when(serviceMock).getDetailedHistoryOfContent(vocabulary, filter, pageable); - final MvcResult mvcResult = mockMvc.perform(get(PATH + "/" + FRAGMENT + "/history-of-content/detail")).andExpect(status().isOk()).andReturn(); + final MvcResult mvcResult = mockMvc.perform(get(PATH + "/" + FRAGMENT + "/history-of-content/detail")) + .andExpect(status().isOk()).andReturn(); final List result = - readValue(mvcResult, new TypeReference>() {}); + readValue(mvcResult, new TypeReference<>() { + }); assertNotNull(result); assertEquals(changeRecords, result); verify(serviceMock).getDetailedHistoryOfContent(vocabulary, filter, pageable); @@ -673,9 +676,33 @@ void getLanguagesRetrievesAndReturnsListOfLanguagesUsedInVocabulary() throws Exc final List languages = List.of(Environment.LANGUAGE, "cs", "de"); when(serviceMock.getLanguages(VOCABULARY_URI)).thenReturn(languages); - final MvcResult mvcResult = mockMvc.perform(get(PATH + "/" + FRAGMENT + "/languages").queryParam(QueryParams.NAMESPACE, NAMESPACE)).andReturn(); - final List result = readValue(mvcResult, new TypeReference>() {}); + final MvcResult mvcResult = mockMvc.perform( + get(PATH + "/" + FRAGMENT + "/languages").queryParam(QueryParams.NAMESPACE, NAMESPACE)).andReturn(); + final List result = readValue(mvcResult, new TypeReference<>() { + }); assertEquals(languages, result); verify(serviceMock).getLanguages(VOCABULARY_URI); } + + @Test + void reImportVocabularyRunsTermTranslationsImportForUploadedFileWhenTranslationsOnlyIsSpecified() throws Exception { + when(configMock.getNamespace().getVocabulary()).thenReturn(NAMESPACE); + final Vocabulary vocabulary = Generator.generateVocabulary(); + vocabulary.setUri(URI.create(NAMESPACE + FRAGMENT)); + when(idResolverMock.resolveIdentifier(NAMESPACE, FRAGMENT)).thenReturn(vocabulary.getUri()); + when(serviceMock.importTermTranslations(any(URI.class), any())).thenReturn(vocabulary); + final MockMultipartFile upload = new MockMultipartFile("file", "vocabulary.xlsx", + Constants.MediaType.EXCEL, + Environment.loadFile("data/import-simple-en-cs.xlsx")); + final MvcResult mvcResult = mockMvc.perform(multipart(PATH + "/" + FRAGMENT + "/import").file(upload) + .queryParam( + "translationsOnly", + "true")) + .andExpect(status().isCreated()) + .andReturn(); + verifyLocationEquals(PATH + "/" + FRAGMENT, mvcResult); + assertThat(mvcResult.getResponse().getHeader(HttpHeaders.LOCATION), + containsString(QueryParams.NAMESPACE + "=" + NAMESPACE)); + verify(serviceMock).importTermTranslations(vocabulary.getUri(), upload); + } } From c2132175459f5cc72ad7c3983f1fb22210113fc2 Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Fri, 29 Nov 2024 13:46:02 +0100 Subject: [PATCH 41/49] [kbss-cvut/termit-ui#581] Implement invocation of term translations import in services. --- .../persistence/dao/skos/SKOSImporter.java | 13 +++++++++-- .../service/business/VocabularyService.java | 9 ++++---- .../service/importer/VocabularyImporter.java | 22 +++++++++++++++---- .../service/importer/VocabularyImporters.java | 22 ++++++++++++++----- .../service/importer/excel/ExcelImporter.java | 8 ++++++- .../VocabularyRepositoryService.java | 19 ++++++++++++++-- ...VocabularyRepositoryServiceImportTest.java | 18 +++++++++++++++ 7 files changed, 92 insertions(+), 19 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/skos/SKOSImporter.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/skos/SKOSImporter.java index 6d94fd9e9..7da506ad4 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/skos/SKOSImporter.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/skos/SKOSImporter.java @@ -19,6 +19,7 @@ import cz.cvut.kbss.jopa.model.EntityManager; import cz.cvut.kbss.jopa.model.MultilingualString; +import cz.cvut.kbss.termit.exception.UnsupportedOperationException; import cz.cvut.kbss.termit.exception.importing.UnsupportedImportMediaTypeException; import cz.cvut.kbss.termit.exception.importing.VocabularyExistsException; import cz.cvut.kbss.termit.exception.importing.VocabularyImportException; @@ -28,6 +29,7 @@ import cz.cvut.kbss.termit.service.importer.VocabularyImporter; import cz.cvut.kbss.termit.util.Configuration; import cz.cvut.kbss.termit.util.Utils; +import jakarta.annotation.Nonnull; import jakarta.validation.constraints.NotNull; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.Literal; @@ -105,10 +107,11 @@ public SKOSImporter(Configuration config, VocabularyDao vocabularyDao, EntityMan } @Override - public Vocabulary importVocabulary(ImportConfiguration config, ImportInput data) { + public Vocabulary importVocabulary(@Nonnull ImportConfiguration config, @Nonnull ImportInput data) { Objects.requireNonNull(config); Objects.requireNonNull(data); - return importVocabulary(config.allowReIdentify(), config.vocabularyIri(), data.mediaType(), config.prePersist(), data.data()); + return importVocabulary(config.allowReIdentify(), config.vocabularyIri(), data.mediaType(), config.prePersist(), + data.data()); } private Vocabulary importVocabulary(final boolean rename, @@ -363,6 +366,12 @@ private void setVocabularyDescriptionFromGlossary(final Vocabulary vocabulary) { handleGlossaryStringProperty(DCTERMS.DESCRIPTION, vocabulary::setDescription); } + @Override + public Vocabulary importTermTranslations(@Nonnull URI vocabularyIri, @Nonnull ImportInput data) { + throw new UnsupportedOperationException( + "Importing term translations from SKOS file is currently not supported."); + } + /** * Checks whether this importer supports the specified media type. * diff --git a/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java b/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java index e697b9498..4861464e2 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java @@ -283,15 +283,15 @@ public Vocabulary importVocabulary(URI vocabularyIri, MultipartFile file) { /** * Imports translations of terms in the specified vocabulary from the specified file. + * * @param vocabularyIri IRI of vocabulary for whose terms to import translations - * @param file File from which to import the translations + * @param file File from which to import the translations * @return The imported vocabulary metadata * @throws cz.cvut.kbss.termit.exception.importing.VocabularyImportException If the import fails */ @PreAuthorize("@vocabularyAuthorizationService.canModify(#vocabularyIri)") public Vocabulary importTermTranslations(URI vocabularyIri, MultipartFile file) { - // TODO - return null; + return repositoryService.importTermTranslations(vocabularyIri, file); } /** @@ -333,7 +333,8 @@ public List getChangesOfContent(Vocabulary vocabulary) { * @param pageReq Specification of the size and number of the page to return * @return List of change records, ordered by date in descending order */ - public List getDetailedHistoryOfContent(Vocabulary vocabulary, ChangeRecordFilterDto filter, Pageable pageReq) { + public List getDetailedHistoryOfContent(Vocabulary vocabulary, ChangeRecordFilterDto filter, + Pageable pageReq) { return repositoryService.getDetailedHistoryOfContent(vocabulary, filter, pageReq); } diff --git a/src/main/java/cz/cvut/kbss/termit/service/importer/VocabularyImporter.java b/src/main/java/cz/cvut/kbss/termit/service/importer/VocabularyImporter.java index b82b49a0d..bd71b8375 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/importer/VocabularyImporter.java +++ b/src/main/java/cz/cvut/kbss/termit/service/importer/VocabularyImporter.java @@ -2,7 +2,7 @@ import cz.cvut.kbss.termit.exception.importing.VocabularyExistsException; import cz.cvut.kbss.termit.model.Vocabulary; -import jakarta.validation.constraints.NotNull; +import jakarta.annotation.Nonnull; import java.io.InputStream; import java.net.URI; @@ -26,7 +26,21 @@ public interface VocabularyImporter { * @throws IllegalArgumentException Indicates invalid input data, e.g., no input streams, missing language tags * etc. */ - Vocabulary importVocabulary(@NotNull ImportConfiguration config, @NotNull ImportInput data); + Vocabulary importVocabulary(@Nonnull ImportConfiguration config, @Nonnull ImportInput data); + + /** + * Imports term translations from the specified data into the specified vocabulary. + *

+ * Only translations of existing terms are imported, no new terms are created. Only translations of multilingual + * attributes are imported. If a value in the specified language exists in the repository, it is preserved. + * + * @param vocabularyIri Vocabulary identifier + * @param data Data to import + * @return Vocabulary whose content was changed + * @throws IllegalArgumentException Indicates invalid input data, e.g., no input streams, missing language tags + * etc. + */ + Vocabulary importTermTranslations(@Nonnull URI vocabularyIri, @Nonnull ImportInput data); /** * Vocabulary import configuration. @@ -38,7 +52,7 @@ public interface VocabularyImporter { * @param prePersist Procedure to call before persisting the resulting vocabulary */ record ImportConfiguration(boolean allowReIdentify, URI vocabularyIri, - @NotNull Consumer prePersist) { + @Nonnull Consumer prePersist) { } /** @@ -47,6 +61,6 @@ record ImportConfiguration(boolean allowReIdentify, URI vocabularyIri, * @param mediaType Media type of the imported data * @param data Streams containing the data */ - record ImportInput(@NotNull String mediaType, InputStream... data) { + record ImportInput(@Nonnull String mediaType, InputStream... data) { } } diff --git a/src/main/java/cz/cvut/kbss/termit/service/importer/VocabularyImporters.java b/src/main/java/cz/cvut/kbss/termit/service/importer/VocabularyImporters.java index 5eb792580..1859ba7bb 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/importer/VocabularyImporters.java +++ b/src/main/java/cz/cvut/kbss/termit/service/importer/VocabularyImporters.java @@ -8,6 +8,8 @@ import org.springframework.context.ApplicationContext; import org.springframework.stereotype.Component; +import java.net.URI; + /** * Ensures correct importer is invoked for provided media types. */ @@ -22,14 +24,22 @@ public VocabularyImporters(ApplicationContext appContext) { @Override public Vocabulary importVocabulary(@Nonnull ImportConfiguration config, @Nonnull ImportInput data) { - if (SKOSImporter.supportsMediaType(data.mediaType())) { - return getSkosImporter().importVocabulary(config, data); - } - if (ExcelImporter.supportsMediaType(data.mediaType())) { - return getExcelImporter().importVocabulary(config, data); + return resolveImporter(data.mediaType()).importVocabulary(config, data); + } + + private VocabularyImporter resolveImporter(String mediaType) { + if (SKOSImporter.supportsMediaType(mediaType)) { + return getSkosImporter(); + } else if (ExcelImporter.supportsMediaType(mediaType)) { + return getExcelImporter(); } throw new UnsupportedImportMediaTypeException( - "Unsupported media type '" + data.mediaType() + "' for vocabulary import."); + "Unsupported media type '" + mediaType + "' for vocabulary import."); + } + + @Override + public Vocabulary importTermTranslations(@Nonnull URI vocabularyIri, @Nonnull ImportInput data) { + return resolveImporter(data.mediaType()).importTermTranslations(vocabularyIri, data); } private VocabularyImporter getSkosImporter() { diff --git a/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java b/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java index 1a8ed5f68..a86d0a6c6 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java +++ b/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java @@ -92,7 +92,7 @@ public ExcelImporter(VocabularyDao vocabularyDao, TermRepositoryService termServ } @Override - public Vocabulary importVocabulary(ImportConfiguration config, ImportInput data) { + public Vocabulary importVocabulary(@Nonnull ImportConfiguration config, @Nonnull ImportInput data) { Objects.requireNonNull(config); Objects.requireNonNull(data); if (config.vocabularyIri() == null || !vocabularyDao.exists(config.vocabularyIri())) { @@ -215,6 +215,12 @@ private URI resolveTermIdentifier(Vocabulary vocabulary, Term term) { return term.getUri(); } + @Override + public Vocabulary importTermTranslations(@Nonnull URI vocabularyIri, @Nonnull ImportInput data) { + // TODO + return null; + } + /** * Checks whether this importer supports the specified media type. * diff --git a/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java b/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java index 96e3cb94d..55efa3c65 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java @@ -245,7 +245,7 @@ public Vocabulary importVocabulary(boolean rename, MultipartFile file) { } catch (VocabularyImportException e) { throw e; } catch (Exception e) { - throw new VocabularyImportException("Unable to import vocabulary, because of: " + e.getMessage()); + throw new VocabularyImportException("Unable to import vocabulary. Cause: " + e.getMessage()); } } @@ -259,6 +259,7 @@ private static String resolveContentType(MultipartFile file) throws IOException @CacheEvict(allEntries = true) @Transactional public Vocabulary importVocabulary(URI vocabularyIri, MultipartFile file) { + Objects.requireNonNull(vocabularyIri); Objects.requireNonNull(file); try { String contentType = resolveContentType(file); @@ -268,7 +269,21 @@ public Vocabulary importVocabulary(URI vocabularyIri, MultipartFile file) { } catch (VocabularyImportException e) { throw e; } catch (Exception e) { - throw new VocabularyImportException("Unable to import vocabulary, because of: " + e.getMessage(), e); + throw new VocabularyImportException("Unable to import vocabulary. Cause: " + e.getMessage(), e); + } + } + + @Transactional + public Vocabulary importTermTranslations(URI vocabularyIri, MultipartFile file) { + Objects.requireNonNull(vocabularyIri); + Objects.requireNonNull(file); + try { + String contentType = resolveContentType(file); + return importers.importTermTranslations(vocabularyIri, new VocabularyImporter.ImportInput(contentType, file.getInputStream())); + } catch (VocabularyImportException e) { + throw e; + } catch (Exception e) { + throw new VocabularyImportException("Unable to import vocabulary. Cause: " + e.getMessage(), e); } } diff --git a/src/test/java/cz/cvut/kbss/termit/service/VocabularyRepositoryServiceImportTest.java b/src/test/java/cz/cvut/kbss/termit/service/VocabularyRepositoryServiceImportTest.java index 276a146a9..3abc0872b 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/VocabularyRepositoryServiceImportTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/VocabularyRepositoryServiceImportTest.java @@ -34,10 +34,12 @@ import org.springframework.web.multipart.MultipartFile; import java.io.IOException; +import java.net.URI; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -65,4 +67,20 @@ void passesInputStreamFromProvidedInputFileToImporter() throws IOException { assertNotNull(captor.getValue()); assertEquals(vocabulary, result); } + + @Test + void importTermTranslationsInvokesImporterWithProvidedData() throws IOException { + final MultipartFile input = new MockMultipartFile("vocabulary.xlsx", "vocabulary.xlsx", + Constants.MediaType.EXCEL, + Environment.loadFile("data/import-simple-en-cs.xlsx")); + final Vocabulary vocabulary = Generator.generateVocabularyWithId(); + when(importer.importTermTranslations(any(URI.class), any(VocabularyImporter.ImportInput.class))).thenReturn( + vocabulary); + final Vocabulary result = sut.importTermTranslations(vocabulary.getUri(), input); + final ArgumentCaptor captor = ArgumentCaptor.forClass( + VocabularyImporter.ImportInput.class); + verify(importer).importTermTranslations(eq(vocabulary.getUri()), captor.capture()); + assertNotNull(captor.getValue()); + assertEquals(vocabulary, result); + } } From 71bb2aa2fbfd97c813f62568e749622928454a7c Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Fri, 29 Nov 2024 17:46:45 +0100 Subject: [PATCH 42/49] [kbss-cvut/termit-ui#581] Implement importing of term translations from Excel. --- .../service/importer/excel/ExcelImporter.java | 223 +++++++++++++----- .../excel/LocalizedSheetImporter.java | 2 +- .../importer/excel/ExcelImporterTest.java | 132 +++++++++-- 3 files changed, 280 insertions(+), 77 deletions(-) diff --git a/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java b/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java index a86d0a6c6..f55e2ef75 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java +++ b/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java @@ -1,6 +1,7 @@ package cz.cvut.kbss.termit.service.importer.excel; import cz.cvut.kbss.jopa.model.EntityManager; +import cz.cvut.kbss.jopa.model.MultilingualString; import cz.cvut.kbss.termit.exception.NotFoundException; import cz.cvut.kbss.termit.exception.importing.VocabularyDoesNotExistException; import cz.cvut.kbss.termit.exception.importing.VocabularyImportException; @@ -21,6 +22,7 @@ import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.xssf.usermodel.XSSFWorkbook; +import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.config.ConfigurableBeanFactory; @@ -119,43 +121,8 @@ public Vocabulary importVocabulary(@Nonnull ImportConfiguration config, @Nonnull terms = sheetImporter.resolveTermsFromSheet(sheet); rawDataToInsert.addAll(sheetImporter.getRawDataToInsert()); } - terms.stream().peek(t -> t.setUri(resolveTermIdentifier(targetVocabulary, t))) - .peek(t -> t.getLabel().getValue().forEach((lang, value) -> { - final Optional existingUri = termService.findIdentifierByLabel(value, - targetVocabulary, - lang); - if (existingUri.isPresent() && !existingUri.get().equals(t.getUri())) { - throw new VocabularyImportException( - "Vocabulary already contains a term with label '" + value + "' with a different identifier than the imported one.", - "error.vocabulary.import.excel.labelWithDifferentIdentifierExists") - .addParameter("label", value) - .addParameter("existingUri", Utils.uriToString(existingUri.get())); - } - })) - .filter(t -> termService.exists(t.getUri())).forEach(t -> { - LOG.trace("Term {} already exists. Removing old version.", t); - termService.forceRemove(termService.findRequired(t.getUri())); - // Flush changes to prevent EntityExistsExceptions when term is already managed in PC as different type (Term vs TermInfo) - em.flush(); - }); - // Ensure all parents are saved before we start adding children - terms.stream().filter(t -> Utils.emptyIfNull(t.getParentTerms()).isEmpty()) - .forEach(root -> { - LOG.trace("Persisting root term {}.", root); - termService.addRootTermToVocabulary(root, targetVocabulary); - root.setVocabulary(targetVocabulary.getUri()); - }); - terms.stream().filter(t -> !Utils.emptyIfNull(t.getParentTerms()).isEmpty()) - .forEach(t -> { - t.setVocabulary(targetVocabulary.getUri()); - LOG.trace("Persisting child term {}.", t); - termService.addChildTerm(t, t.getParentTerms().iterator().next()); - }); - // Insert term relationships as raw data because of possible object conflicts in the persistence context - - // the same term being as multiple types (Term, TermInfo) in the same persistence context - dataDao.insertRawData(rawDataToInsert.stream().map(tr -> new Quad(tr.subject().getUri(), tr.property(), - tr.object().getUri(), - targetVocabulary.getUri())).toList()); + prepareTermsForPersist(terms, targetVocabulary); + persistNewTerms(terms, targetVocabulary, rawDataToInsert); } } catch (IOException e) { throw new VocabularyImportException("Unable to read input as Excel.", e); @@ -174,30 +141,17 @@ private PrefixMap resolvePrefixMap(Workbook excel) { } /** - * Resolves namespace for identifiers of terms in the specified vocabulary. - *

- * It uses the vocabulary identifier and the configured term namespace separator. - * - * @param vocabulary Vocabulary whose term identifier namespace to resolve - * @return Resolved namespace - */ - private String resolveVocabularyTermNamespace(Vocabulary vocabulary) { - return idResolver.buildNamespace(vocabulary.getUri().toString(), - config.getNamespace().getTerm().getSeparator()); - } - - /** - * Resolves term identifier. + * Resolves term identifier w.r.t. the target vocabulary. *

* If the term does not have an identifier, it is generated so that existing instance can be removed before * inserting the imported term. If the term has an identifier, but it does not match the expected vocabulary-based * namespace, it is adjusted so that it does. Otherwise, the identifier is used. * - * @param vocabulary Vocabulary into which the term will be added * @param term The imported term + * @param vocabulary Vocabulary into which the term will be added * @return Term identifier */ - private URI resolveTermIdentifier(Vocabulary vocabulary, Term term) { + private URI resolveTermIdentifierWrtVocabulary(Term term, Vocabulary vocabulary) { final String termNamespace = resolveVocabularyTermNamespace(vocabulary); if (term.getUri() == null) { return idResolver.generateDerivedIdentifier(vocabulary.getUri(), @@ -215,10 +169,169 @@ private URI resolveTermIdentifier(Vocabulary vocabulary, Term term) { return term.getUri(); } + /** + * Resolves namespace for identifiers of terms in the specified vocabulary. + *

+ * It uses the vocabulary identifier and the configured term namespace separator. + * + * @param vocabulary Vocabulary whose term identifier namespace to resolve + * @return Resolved namespace + */ + private String resolveVocabularyTermNamespace(Vocabulary vocabulary) { + return idResolver.buildNamespace(vocabulary.getUri().toString(), + config.getNamespace().getTerm().getSeparator()); + } + + /** + * Prepares terms for persist by: + *

    + *
  • Resolving their identifiers and harmonizing them with vocabulary namespace
  • + *
  • Removing possibly pre-existing terms
  • + *
+ * + * @param terms Terms to process + * @param targetVocabulary Target vocabulary + */ + private void prepareTermsForPersist(List terms, Vocabulary targetVocabulary) { + terms.stream().peek(t -> t.setUri(resolveTermIdentifierWrtVocabulary(t, targetVocabulary))) + .peek(t -> t.getLabel().getValue().forEach((lang, value) -> { + final Optional existingUri = termService.findIdentifierByLabel(value, + targetVocabulary, + lang); + if (existingUri.isPresent() && !existingUri.get().equals(t.getUri())) { + throw new VocabularyImportException( + "Vocabulary already contains a term with label '" + value + "' with a different identifier than the imported one.", + "error.vocabulary.import.excel.labelWithDifferentIdentifierExists") + .addParameter("label", value) + .addParameter("existingUri", Utils.uriToString(existingUri.get())); + } + })) + .filter(t -> termService.exists(t.getUri())).forEach(t -> { + LOG.trace("Term {} already exists. Removing old version.", t); + termService.forceRemove(termService.findRequired(t.getUri())); + // Flush changes to prevent EntityExistsExceptions when term is already managed in PC as different type (Term vs TermInfo) + em.flush(); + }); + } + + private void persistNewTerms(List terms, Vocabulary targetVocabulary, Set rawDataToInsert) { + // Ensure all parents are saved before we start adding children + terms.stream().filter(t -> Utils.emptyIfNull(t.getParentTerms()).isEmpty()) + .forEach(root -> { + LOG.trace("Persisting root term {}.", root); + termService.addRootTermToVocabulary(root, targetVocabulary); + root.setVocabulary(targetVocabulary.getUri()); + }); + terms.stream().filter(t -> !Utils.emptyIfNull(t.getParentTerms()).isEmpty()) + .forEach(t -> { + t.setVocabulary(targetVocabulary.getUri()); + LOG.trace("Persisting child term {}.", t); + termService.addChildTerm(t, t.getParentTerms().iterator().next()); + }); + // Insert term relationships as raw data because of possible object conflicts in the persistence context - + // the same term being as multiple types (Term, TermInfo) in the same persistence context + dataDao.insertRawData(rawDataToInsert.stream().map(tr -> new Quad(tr.subject().getUri(), tr.property(), + tr.object().getUri(), + targetVocabulary.getUri())).toList()); + } + @Override public Vocabulary importTermTranslations(@Nonnull URI vocabularyIri, @Nonnull ImportInput data) { - // TODO - return null; + Objects.requireNonNull(vocabularyIri); + Objects.requireNonNull(data); + final Vocabulary targetVocabulary = vocabularyDao.find(vocabularyIri).orElseThrow( + () -> NotFoundException.create(Vocabulary.class, vocabularyIri)); + LOG.debug("Importing translations for terms in vocabulary {}.", vocabularyIri); + try { + final List terms = readTermsFromSheet(data); + terms.forEach(t -> { + identifyTermByLabelIfNecessary(t, targetVocabulary); + final Optional existingTerm = termService.find(t.getUri()); + if (existingTerm.isEmpty() || !existingTerm.get().getVocabulary().equals(vocabularyIri)) { + LOG.warn( + "Term with identifier '{}' not found in vocabulary '{}'. Skipping record resolved from Excel file.", + t.getUri(), vocabularyIri); + return; + } + mergeTranslations(t, existingTerm.get()); + termService.update(existingTerm.get()); + }); + } catch (IOException e) { + throw new VocabularyImportException("Unable to read input as Excel.", e); + } + return targetVocabulary; + } + + private void identifyTermByLabelIfNecessary(Term t, Vocabulary targetVocabulary) { + if (t.getUri() == null) { + final String termLabel = t.getLabel().get(config.getPersistence().getLanguage()); + if (termLabel == null) { + throw new VocabularyImportException( + "Unable to identify terms in Excel - it contains neither term identifiers nor labels in primary language.", + "error.vocabulary.import.excel.missingIdentifierOrLabel"); + } + t.setUri(idResolver.generateDerivedIdentifier(targetVocabulary.getUri(), + config.getNamespace().getTerm().getSeparator(), + termLabel)); + } + } + + private List readTermsFromSheet(@NotNull ImportInput data) throws IOException { + List terms = Collections.emptyList(); + for (InputStream input : data.data()) { + final Workbook workbook = new XSSFWorkbook(input); + assert workbook.getNumberOfSheets() > 0; + PrefixMap prefixMap = resolvePrefixMap(workbook); + for (int i = 0; i < workbook.getNumberOfSheets(); i++) { + final Sheet sheet = workbook.getSheetAt(i); + if (ExcelVocabularyExporter.PREFIX_SHEET_NAME.equals(sheet.getSheetName())) { + // Skip already processed prefix sheet + continue; + } + final LocalizedSheetImporter sheetImporter = new LocalizedSheetImporter( + new LocalizedSheetImporter.Services(termService, languageService), + prefixMap, terms); + terms = sheetImporter.resolveTermsFromSheet(sheet); + } + } + return terms; + } + + private void mergeTranslations(Term source, Term target) { + target.setLabel(mergeSingularTranslations(source.getLabel(), target.getLabel())); + target.setDefinition(mergeSingularTranslations(source.getDefinition(), target.getDefinition())); + target.setDescription(mergeSingularTranslations(source.getDescription(), target.getDescription())); + assert target.getAltLabels() != null; + mergePluralTranslations(source.getAltLabels(), target.getAltLabels()); + assert target.getHiddenLabels() != null; + mergePluralTranslations(source.getHiddenLabels(), target.getHiddenLabels()); + assert target.getExamples() != null; + mergePluralTranslations(source.getExamples(), target.getExamples()); + } + + private MultilingualString mergeSingularTranslations(MultilingualString source, MultilingualString target) { + if (target == null) { + return source; + } + if (source == null) { + return target; + } + source.getValue().forEach((lang, value) -> { + if (!target.contains(lang)) { + target.set(lang, value); + } + }); + return target; + } + + private void mergePluralTranslations(Set source, Set target) { + if (Utils.emptyIfNull(source).isEmpty()) { + return; + } + // Remove just the existing language values + target.forEach(t -> t.getLanguages().forEach(lang -> source.forEach(mls -> mls.remove(lang)))); + // Add the remainder + target.addAll(source.stream().filter(mls -> !mls.isEmpty()).toList()); } /** diff --git a/src/main/java/cz/cvut/kbss/termit/service/importer/excel/LocalizedSheetImporter.java b/src/main/java/cz/cvut/kbss/termit/service/importer/excel/LocalizedSheetImporter.java index 67187fc3e..17ba9dc02 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/importer/excel/LocalizedSheetImporter.java +++ b/src/main/java/cz/cvut/kbss/termit/service/importer/excel/LocalizedSheetImporter.java @@ -84,7 +84,7 @@ class LocalizedSheetImporter { * @return Terms resolved from the sheet */ List resolveTermsFromSheet(Sheet sheet) { - LOG.debug("Importing terms from sheet '{}'.", sheet.getSheetName()); + LOG.debug("Reading terms from sheet '{}'.", sheet.getSheetName()); this.rawDataToInsert = new ArrayList<>(); final Optional lang = resolveLanguage(sheet); if (lang.isEmpty()) { diff --git a/src/test/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporterTest.java b/src/test/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporterTest.java index 5804ca6e8..eb682d4ae 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporterTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporterTest.java @@ -38,6 +38,7 @@ import java.io.ByteArrayOutputStream; import java.net.URI; import java.util.Collection; +import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Optional; @@ -86,7 +87,7 @@ class ExcelImporterTest { @SuppressWarnings("unused") @Spy - private IdentifierResolver idResolver = new IdentifierResolver(new Configuration()); + private IdentifierResolver idResolver = new IdentifierResolver(config); @InjectMocks private ExcelImporter sut; @@ -97,6 +98,7 @@ class ExcelImporterTest { void setUp() { this.vocabulary = Generator.generateVocabularyWithId(); config.getNamespace().getTerm().setSeparator("/terms"); + config.getPersistence().setLanguage(Environment.LANGUAGE); } @ParameterizedTest @@ -350,9 +352,7 @@ void importFallsBackToEnglishColumnLabelsForUnknownLanguages() { @Test void importSupportsTermIdentifiers() { - vocabulary.setUri(URI.create("http://example.com")); - when(vocabularyDao.exists(vocabulary.getUri())).thenReturn(true); - when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + initVocabularyResolution(); final Vocabulary result = sut.importVocabulary( new VocabularyImporter.ImportConfiguration(false, vocabulary.getUri(), prePersist), @@ -378,11 +378,15 @@ void importSupportsTermIdentifiers() { building.get().getUri(), vocabulary.getUri())), quadsCaptor.getValue()); } - @Test - void importSupportsPrefixedTermIdentifiers() { + private void initVocabularyResolution() { vocabulary.setUri(URI.create("http://example.com")); when(vocabularyDao.exists(vocabulary.getUri())).thenReturn(true); when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + } + + @Test + void importSupportsPrefixedTermIdentifiers() { + initVocabularyResolution(); final Vocabulary result = sut.importVocabulary( new VocabularyImporter.ImportConfiguration(false, vocabulary.getUri(), prePersist), @@ -431,9 +435,7 @@ void importAdjustsTermIdentifiersToUseExistingVocabularyIdentifierAndSeparatorAs @Test void importRemovesExistingInstanceWhenImportedTermAlreadyExists() { - vocabulary.setUri(URI.create("http://example.com")); - when(vocabularyDao.exists(vocabulary.getUri())).thenReturn(true); - when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + initVocabularyResolution(); final Term existingBuilding = Generator.generateTermWithId(); existingBuilding.setUri(URI.create("http://example.com/terms/building")); final Term existingConstruction = Generator.generateTermWithId(); @@ -457,9 +459,7 @@ void importRemovesExistingInstanceWhenImportedTermAlreadyExists() { @Test void importSupportsReferencesToOtherVocabulariesViaTermIdentifiersWhenReferencedTermsExist() { - vocabulary.setUri(URI.create("http://example.com")); - when(vocabularyDao.exists(vocabulary.getUri())).thenReturn(true); - when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + initVocabularyResolution(); when(termService.exists(any())).thenReturn(false); when(termService.exists(URI.create("http://example.com/another-vocabulary/terms/relatedMatch"))).thenReturn( true); @@ -568,9 +568,7 @@ void importThrowsVocabularyImportExceptionWhenSheetContainsDuplicateLabels() thr @Test void importThrowsVocabularyImportExceptionWhenSheetContainsDuplicateIdentifiers() throws Exception { - vocabulary.setUri(URI.create("http://example.com")); - when(vocabularyDao.exists(vocabulary.getUri())).thenReturn(true); - when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + initVocabularyResolution(); final Workbook input = new XSSFWorkbook(Environment.loadFile("template/termit-import.xlsx")); final Sheet sheet = input.getSheet("English"); sheet.shiftColumns(0, 12, 1); @@ -597,9 +595,7 @@ void importThrowsVocabularyImportExceptionWhenSheetContainsDuplicateIdentifiers( @Test void importSupportsSpecifyingStateAndTypeOnlyInOneSheet() throws Exception { - vocabulary.setUri(URI.create("http://example.com")); - when(vocabularyDao.exists(vocabulary.getUri())).thenReturn(true); - when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + initVocabularyResolution(); final Workbook input = new XSSFWorkbook(Environment.loadFile("template/termit-import.xlsx")); final Sheet englishSheet = input.getSheet("English"); englishSheet.getRow(1).createCell(0).setCellValue("Construction"); @@ -651,9 +647,7 @@ void importThrowsVocabularyImportExceptionWhenVocabularyAlreadyContainsTermWithS @Test void importSupportsMultipleTypesDeclaredForTerm() throws Exception { - vocabulary.setUri(URI.create("http://example.com")); - when(vocabularyDao.exists(vocabulary.getUri())).thenReturn(true); - when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + initVocabularyResolution(); final Workbook input = new XSSFWorkbook(Environment.loadFile("template/termit-import.xlsx")); final Sheet englishSheet = input.getSheet("English"); englishSheet.getRow(1).createCell(0).setCellValue("Construction"); @@ -678,4 +672,100 @@ void importSupportsMultipleTypesDeclaredForTerm() throws Exception { assertThat(captor.getValue().getTypes(), hasItems(objectType.getUri().toString(), eventType.getUri().toString())); } + + @Test + void importTermTranslationsFromExcelWithIdentifiersUpdatesExistingTerms() { + vocabulary.setUri(URI.create("http://example.com")); + when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + final Term building = initTermBuilding(); + final Term construction = initTermConstruction(); + + final Vocabulary result = sut.importTermTranslations(vocabulary.getUri(), new VocabularyImporter.ImportInput( + Constants.MediaType.EXCEL, + Environment.loadFile("data/import-with-identifiers-en-cs.xlsx"))); + assertEquals(vocabulary, result); + assertEquals("Budova", building.getLabel().get("cs")); + List.of("Barák", "Dům").forEach(t -> assertTrue( + building.getAltLabels().stream().anyMatch(mls -> mls.contains("cs") && mls.get("cs").equals(t)))); + assertEquals("Definice pojmu budova", building.getDefinition().get("cs")); + assertEquals("Doplňující poznámka pojmu budova", building.getDescription().get("cs")); + assertEquals("Stavba", construction.getLabel().get("cs")); + assertEquals("Proces výstavby budovy", construction.getDefinition().get("cs")); + assertTrue(construction.getAltLabels().stream() + .anyMatch(mls -> mls.contains("cs") && mls.get("cs").equals("Staveniště"))); + verify(termService).update(building); + verify(termService).update(construction); + } + + private Term initTermBuilding() { + final Term building = new Term(URI.create("http://example.com/terms/budova")); + building.setLabel(MultilingualString.create("Building", "en")); + building.setAltLabels(new HashSet<>(Set.of(MultilingualString.create("Complex", "en")))); + building.setDefinition(MultilingualString.create("Definition of term Building", "en")); + building.setDescription(MultilingualString.create("Building scope note", "en")); + building.setHiddenLabels(new HashSet<>()); + building.setExamples(new HashSet<>()); + building.setVocabulary(vocabulary.getUri()); + when(termService.find(building.getUri())).thenReturn(Optional.of(building)); + return building; + } + + private Term initTermConstruction() { + final Term construction = new Term(URI.create("http://example.com/terms/stavba")); + construction.setLabel(MultilingualString.create("Construction", "en")); + construction.setAltLabels(new HashSet<>(Set.of(MultilingualString.create("Construction site", "en")))); + construction.setDefinition(MultilingualString.create("The process of building a building", "en")); + construction.setHiddenLabels(new HashSet<>()); + construction.setExamples(new HashSet<>()); + construction.setVocabulary(vocabulary.getUri()); + when(termService.find(construction.getUri())).thenReturn(Optional.of(construction)); + return construction; + } + + @Test + void importTermTranslationsPreservesExistingValues() { + vocabulary.setUri(URI.create("http://example.com")); + when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + final Term building = initTermBuilding(); + + final Vocabulary result = sut.importTermTranslations(vocabulary.getUri(), new VocabularyImporter.ImportInput( + Constants.MediaType.EXCEL, + Environment.loadFile("data/import-with-identifiers-en-cs.xlsx"))); + assertEquals(vocabulary, result); + assertEquals("Building", building.getLabel().get("en")); + assertEquals("Definition of term Building", building.getDefinition().get("en")); + assertTrue(building.getAltLabels().stream() + .anyMatch(mls -> mls.contains("en") && mls.get("en").equals("Complex"))); + } + + @Test + void importTermTranslationsUsesTermLabelToResolveIdentifierWhenExcelDoesNotContainIdentifiers() { + vocabulary.setUri(URI.create("http://example.com")); + when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + config.getPersistence().setLanguage("cs"); + final Term building = initTermBuilding(); + + sut.importTermTranslations(vocabulary.getUri(), new VocabularyImporter.ImportInput( + Constants.MediaType.EXCEL, + Environment.loadFile("data/import-simple-en-cs.xlsx"))); + verify(termService).find(building.getUri()); + assertEquals("Budova", building.getLabel().get("cs")); + verify(termService).update(any(Term.class)); + } + + @Test + void importTermTranslationsThrowsVocabularyImportExceptionWhenExcelDoesNotContainIdentifierAndSheetWithLabelsInPrimaryLanguage() { + vocabulary.setUri(URI.create("http://example.com")); + when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + + VocabularyImportException ex = assertThrows(VocabularyImportException.class, + () -> sut.importTermTranslations(vocabulary.getUri(), + new VocabularyImporter.ImportInput( + Constants.MediaType.EXCEL, + Environment.loadFile( + "data/import-simple-de.xlsx")) + )); + assertEquals("error.vocabulary.import.excel.missingIdentifierOrLabel", ex.getMessageId()); + verify(termService, never()).update(any()); + } } From 76109d5fcbb856ec74722d1708ed7bbcbbd63196 Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Fri, 29 Nov 2024 17:50:30 +0100 Subject: [PATCH 43/49] [kbss-cvut/termit-ui#581] Add a bit of logging to ExcelImporter. --- .../cvut/kbss/termit/service/importer/excel/ExcelImporter.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java b/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java index f55e2ef75..f588798d1 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java +++ b/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java @@ -102,6 +102,7 @@ public Vocabulary importVocabulary(@Nonnull ImportConfiguration config, @Nonnull } final Vocabulary targetVocabulary = vocabularyDao.find(config.vocabularyIri()).orElseThrow( () -> NotFoundException.create(Vocabulary.class, config.vocabularyIri())); + LOG.debug("Importing terms from Excel into vocabulary {}.", targetVocabulary); try { List terms = Collections.emptyList(); Set rawDataToInsert = new HashSet<>(); From 28e6b953f6976012a3d02f5699713485f3e58144 Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Fri, 29 Nov 2024 17:51:51 +0100 Subject: [PATCH 44/49] [kbss-cvut/termit-ui#581] Add missing test sample file. --- .../data/import-with-identifiers-en-cs.xlsx | Bin 0 -> 65700 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 src/test/resources/data/import-with-identifiers-en-cs.xlsx diff --git a/src/test/resources/data/import-with-identifiers-en-cs.xlsx b/src/test/resources/data/import-with-identifiers-en-cs.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..c5309e834d6f01aba44ba6ef6b6311ebf4601870 GIT binary patch literal 65700 zcmd^|3tUWF|NnD$3Zc?*9l0xv;})VpE)jA|qMAcRp@vFH&1??IeMlLkh9g20l|q+| zkQxagj2fvaU1p~1+-L9qcXH}Xotg9cJ^mig^PKU#&T)Fa?V0ucthLwv%y+F>?`1X} zIvVP9?b=mmLJ-wKN9B(m{5RR#ZL1G)obul{LLV&69N%rfAn=f>Xma7Kt`{z+F1AiJ znKbDBFt7f%4_U37N;X(-3UKrJ}`NV~WO)uyH)qXLzivR0 z8}%-WBM9r!x7&E@CA}CXW&Edo@>7nPD8g73r%xN)yIbbHZ9_xn+_c{N#&7ZsTr!ku zy_nB58aR=>=8U~ zI(qclSc!~xjY>3JJ>iB;&&!>b-@p2RzvF@Fl0Nn4w(s2f)3l^(eD{N%3&LU^Ii)2l z9H#xW#D9m8i?6kJ&g8)_=M1tOPBxkR^nmx$b4U8s#1!4slXRcc@5rMG?pwRjyY-MV z(7S6NZMpg`(Qtp=l54}tO@6s%X`ea2yA95LKWXo-^9T7yvPO=W5YY9fxG%ojE#H+G zuomyp8=yZT^SNL5t?N!7Uo7{SHNfhmY~wPMz5%CCO}un;?1tr|CcN+P*6PUqOMwrb z@~_LLKkVTwLWOrmOWNA5m!e&rMT_yX%p=uL~}}l z$nDGxL*9yJVZ!t0|ttF{u*^4tY*eO`$RF%qHEav8S_?c8}LxqY-hjppTgosWsDjB_Wt;7 zMi1vy)uoqTjlSRQYKP?;yf#iS>HBKqC?DJK-WOLn^t@Q~P82i9YG3B4PkMbO-k3F{ zk7NJ7o?Y*-%3pVkG3Yv=@UHdIFx%5E2S+vhet%2X-v=86q=eqgSnv`^>uF)O2zYX1Gw|~6Vs`c;oTOL}Mp*z3b<1h#%mrZ#IIKMZ{x z`MEuzGR*nQkN$n7mt{uOWe#lEIZ*?;yXNH@YOwj6Vlz1 zi^EW%M^dcq(dSDp-Ybq<89Qa?v<*-8p6>FfIxV^LFMChVel~bx&&0#0vx_Yim(Lv$ zF3NR{e|FgJVR;o5s_|CBR=>Jqwt;S*w_EGKmrwp?n%&KGTskVksrf?)kPPaobM zbPyqPl6_^x42yo#J(-Syk0XWsR+qnUkGSly+|r5>kUJvUC+VGq59Q+hLG1lChiBf7 z8JJh5H)nFOS7~9Fb9zr+m?FNfI}EVws>4wb#=3v#uq5TTJ}Wjvtr}Z)C*+J=H$S#9 zF2tf}f9m}SzX;9;pKBP*f3ef^a^LItgWigc%KJhxbU^B#-9P7w<=SJk zD2E!sE3?u0b7=-YSFRXxxvRKe((o<6Ba`076CV4I_vH`H`^|5A)Oy|CXA=TL!}M?) zUmxb_zJ3qMVFOO-lV;ws$9~0U0-lNdalLg?p+Ryblt@1zzxnj z8r5z5JeLh0dp}tmm0^GSE^4*A;*rRC-<5(+8_usV+`m^qs;;w%eOcdb;N$0&uXg2n zyz5%)y129XB)bHUpQ-&_mo8;J6!lyqDt%Pez&o|8l9@at=C{6|itohczVhmOWk+ns zD>WxKIJJx488)I~%%PLcBd+AwZ87grEXW2@73*NacN%R1fSv8Mm^a_?!Lw4?U#V!tQzLoz--4k3sw#pn!I=6 z$>o!#=qgZ?{3))xhPzH0badr9L&o*gQOD;8jClOuba8?J) z9TB{%`HpMPmo$brT1$uAnP1(aUFM-xvx$0>lo)rv+}kv8K0;aHQ>@o-$6@K z$sdHVv4ifgk5Vlo_HF;5_{2@>)$h^0%4=JFCdCaqv~juNlM;^sAqKO|f?pIk&E|aa z@17LC|LJ7v9<$9ouJu@Z+EeaCa3{$kIHGQk7Jc}#W`Njbo>(_&&xA#~uij<7en{E( zy7zMTHU0M=iXjfbuV3MFdJmcr*_oQUTe!n)bG_-x%k_1MD?Uy(Jmhs{$KC0Gqo|>9pd3IC-RQ-imOX6?5fu-;Rs`HoY~agMO3!ibMnubFHENUkOp<$n};oo z!1a`qgW;aw9R={f>;n!}voGuSb#nOXU{&2&lbcOH+2UrO`kzhiuXMA6qn)yL?jE%x zVzX!O_wABX$UVDG_qaB7v4ul&36A$bt~IV7@Tf~~QPJp82?IJ#FNrAHpAL3|4))91jlUFnvik=K%cygWiEFHnWwo0&dUNG?Q!zg=x8{UYnXsMx=eN7khrs4AJP$-Fn{HJ!bmsb%wX1 zb#6PIpVhzPt&>(1Gw)$>bNLfwnFn_L=3J*Uf9Z7L;L@MY>E((l$Bdpfed^;;BM-hd zC&_Z3rCfZ;vO8fF{AAIMCodC=9Xej=9l2;SVF86gBPk}guiCe0UYyafi~T$syz4Tb zuI=J_t$J$CIWzL3TT3R4**CDVw~=B({^g#>2IbC3zBK#9hi)&80_5`QYQ~+OE9M<{ zkN-GTDv9Zu7AlzOTa9DpZ^KFB!Xv8Sq@lNPfn?W^e15ehR zj}JD9dvIZWLS1K{;_-?edX>h%pQQNT?tQT8#&L&VMxI`k@v0;D)WP1PbIz5EyFWkv z^ODnj1^e}6T{GtzZ(p!%M&s4Ej;t(-bJ_ay4xjV}w!d+#dgF`4xNX~3U7z`L^>!+@ zWQFzBYoqC5dNHzv*qVF@SFL#g$`r2D-- z%Z@I!EP6Sa%x-TOzSW$M8N`o^oD)2y!Z3+n|JmrE@8wsc^`wkt&NY2*tvr8j)CJGq z&Md9i_`=RS=&lz!W{dPQo9JEKv(JXBvtue0G3QRF;ax?9h@%d>ruLPK8?qA<>x)jl zzuo)p`+e`%TKDVrValAoAM!%5Z6+1zm9Gt!hK*vgi(M{G{X!bVOw4>yI_olZ)ra5N zOA>P@T}qF;s`u8$Y0A11t5Pr0{un{JU%CC}D4U2z{TKdv*LuudFFW{gWuSGZ;JnBgyr`8u zk4@oR^>_D=={~6L<`%ZX^4AM*2U#AtIh!`N{llMnlxMa}pZd=;nKHmL#~XRFv-w1H~nU9QcHwc_!=yH@GXsWtlf+n-9^2;_asN zN1iL3C=UlKKSH?gq?~wr`e@SR_kEZf6XxcgO8@1~>f|{;oqvOnK5RCh$l*&qIr4*p zXsdSk>QuR2)SDan)1g`Q^R1nX+!LK2iAH%xc|53`Z?$LXrx$5yth*P|Zq;n=-{Gyz zw&h)49rIp!K2o%QL$$bvq0ho?Uj}>ik3FsbV)eW6Sgz^)3fBplx-$%(P5jvD$;QQV z%dH<|Stq9KAPk!4ZBWR6euye0=1&i%vH<`VC(;OO4 zov55bo>5*Fsy(uP+csr~L>^y4d8n+f$c*eB-CnpZ`QfsDNgf}5^04rAPcgM|eEK?Q z)WTkJS*I7fCtALz-N+K!Fal?~TGY&T$UooPcvOd>*H>-KKOZlg$-FpbuGcWa^*x;j z4n99T@I13
3LPkTSu*V)s>S8vX5?|v(5H}d_kODWFvrx&3n$%63R=4SJ|j|jas zYIA#8*ZIFaE9`XO`J>0Cgcmn7|bR%4jAZL-7YiTqSSK;awu``lTKC~>D=*-59cR)vpMD**?>x8jm{h+4$Dy4LWVbs!X7HA2F@~Ejy?g$tJ7b#5xS{hX zE26fqm~`ydo@^!3L5cLDM9)m6H`VJzO~gaS_EN(DNc~6WdrhAd27W z>}Ei#YvG^2&ug6(#mm>t%a^<=V4ItdlQOSdyLGmigAHlQn{;xp=ZGh#{Q_(1uGpN* z>s*}lINWA;E{~%ZIqvY=%7ppUA>4C<`8_5@#JNRYzd!wg-Ld&Q5A2xtizx|viuG`UpwOh{*d}Y|aY}|kpqXD6PZ)2x&Ud$Tg()+i-=lPeD z;<67XA0Bzb_>Sz|y4SwTyLNU=ywt5{^^s0FPMtov9gQ%uF@0?=eNNfDjT&nnO|mZ1 z8Q^hhdEN*=c^B!=xB(#sDRr^HWIhrb`v-Qf+oPf>Y5wW9rI!8-`fjF!r{m7iV+VMB zp6Nd~W<<|DjlauGs|`CezPxIZK^-8z@L_93+NlLBiihQ4{Ryw+w@1V)yKDOEJ`=rQ z2$pvKyYABP!E=JL&(aj@=wr**mwOcD#q{*$oT@wLaI^5V_oG0|_16MBpSCzPmk}H~ z_oy|Si#4R*9Uf>X$edL9(8zp<#IQg#<*Dt53#i z={V1rgBQ~|wEC)ePD3gO`&`RFgA|Vi8IoK9g2gNl+ahviz!3q4K~rNXbOEiAj>?20 zlDuLzCgxF4Nii!Q#|X6g5CPu!63dFmV+IKd5P2CBm0!W7LD=WfgAi#BIBNJ-Po{+Gz0-iaF3lC#8S%`chhQhC)GJZ-0D_va2YGjbbI8DaA ztT2<;!rKutW@HO_kS;}El4*`NqCqk?1!>GoK{1vf0vBge&;fO1jO&j#g8b|baw!|K zAt16$x@|FQJ4aDVRxmSWlt8BnkhF`aqV70HQJggt+OMy06Qtx&^YV%5}QUubHfu0;W%O4jvCd(zUBrHH255LJlq6q{MgT!xW zj1kxNfN@5^tK||0xqK5R=p`Z*+UAu7iMiy5%nZ>c4vuFWt1B_aJ(CCoDG|rWSwE2x z&x~qCd0;KqQjWkVB4EX=iGlLi6h?|DB#VKhE+=nX$ia5zqqIgAfsa?Q2H>X1piil! z!U`b|ZUHv20HMx7Jjgx7`TF9+ED3ZJnO`X)AOx0{FzG?^ zgF^|gKt zX$P?e!6yn$Tt>mAbdI7v4-+>Uf3od@SBQ9==h9kZQAJ6-v-8eA$UQkn!o{Q=#gYv9 zJ5rRW1Rarq@2X}{Fzhgv&#}yqHxjXmtQc{*4OY(_7a(WZIL9;Og|?1$e(9y+gEmF+ zjB}*z*;$RDpN89_Y0kZo&!Hhz!z)IP3rKSgg&$(7hI8SE4CfW_1CO7~yyKdhsu(%$ zJ)aMkC#PC8{omt=0T|9HBnL^16e*niETml2P}>+^gl8hQ1l00&47zi(KHF28hukZb z5SfZ8a*pEBaCS`=>RfjiQQVLp6uhSyJhXL8P>gt_SfAe`ASA)6@B;6J|IZfB2EHZV zcPoxbHKuJS&hpH_<)zdjY^nm1NKix#u$4U|)y`xy<8LCiA5D12{YP269{8aLHRM+2 z$MrL*3FY-@VmSk^r{eli^&}F-wp!pf)o-eQSBpmjU$uFk`jP$$Zdodpz-b(KY~b^O z4|{A6NwO-sP`%9G-J)UOlO8@Hjdj^hNV+5r<@-}ZTta+8Zd(22UDIja1st!8DvHdB zQ~a*@qe*p7e`|}Pgd|&W)>n`BUqfMjY!pQZq%vB8zozEMpgASboLhg>oZ4(B zycQi&xQmUv|InA3sORu<*I9x)fs|#R^FAcXcUJ-#)j66b;m+SOF>C zOO0f@t4YxoQp|@G=c-FldOecK_bMN$t{3y67k!}@U#jWFD9bJ7-<+ZZQoO7#Mda=C zJV?(Ea)6>e+%@5n$B^C&SAj# z3syJ&>KrW4kA=^P>QZDtid^WN3u-ziJ?QOoVhVbfA>eb_ffCF);{iiZ!(A678AQ&= zW(exI>>!CHIpZZm(7;_sh0#icuGCZ4m0ak`yQz_6b)%ICUCDy3oTIKQCD$YWR*+pF zh`Kq)8Rnb<5F}ikAW{$n=A3wSf)s!tjUdP+b%Ma06AvkxsY|gDQcQ;w)s5D8NU@4X z3FBF~$yUG5uurC(;-$LD{N87flPRZp78_-!-e+t|rkv%aZj>dzSN=8#vPhjE3J?S) zVS9BWqzXpJ-wBcef*e&Rh!6yU8H%D#kQ5Mv2ZCHwCkXU81qA7@P7odllA0RHQP<}b zNRbUG&R3VB?0O^zZUS?4DY79&KUv69$<>v#>M*7Sw~MVLdnK(Qj5(7VVk@awNt1^$ zN!%{WBtNa9b$p}zZ5Scy=CdNWW&TzWm=y%-1QCHC3J@eloggru2|y4Fb%H2B5EKMa zH$ns;hztZ-s!k9b1c5&HS0{)JQlvqO`D$i`BDe`e(C3lr`W%Bk|JD=qR>BVhP_5JFlh}sW9<fVdwoWkvj#z$16y0z0U4W2i8p@Zp)@UyQk znJn>ea$m>gzr%KY&d|L(Hgd)B-@~HE7+qbor1I7A%Sq#sZg~3#Av3p=gXO1|j<#OD zXvL{Rt1gY$;bFaUt?`G|cP*ZvQFr3!9J7frj2<<7w9UM0D^A{<6g~Qeo@eaGf#qb| zkM<^cmsaYo2n!9nG{o-eLZ^PO!rlfa+wqSz#*gI39ey3^=I3v*VSxb`}FTz8K} zRdd*;{#mjD1CiZRfAwi%O=+h6HO(F2A#67GAUQ;>K3W&5GNq@aC@y;H4^$_z)`be} zqP!@wk?cX!qty&ZtqaA)&fpXY^O9%!TC2NxwJqdiA8i}b$w(saf|a@lsJ4Z^+{=;iJaI3!RmfkT!pS*W7upd{2X~RF z*?4GIh-FGg(^7e8)J~^)Y9F?IFPYN)y%4H1CjKc3rTcRaRO|lOP3|u#UER1V^u(1W z_k**#dz7DyD^0E^QQdsz&u%ig$qaRKzcLdrxoov2w@|1wxfgh9O|IrN3+x7gt0|9^-3gA(Wej>!cxawnZf&}>GuD+Fd_Czuh5`-Guw$%dMOWWe-Vf_*^K}#MlwH@o@4*%Xv14#Dqj}r!y+1bL z_UFZ>4QP65-HUGC*z5JjHrx4hyJIPe!=Cz1E!%oizHYMJaxu#^tEntAu4Pm2y01Hl zZ@+X2FY&}Tu&o&CQmAcOceGKLz)m*$PWWQDy5^@fZLACZOXFv*o>n9z!Xsv3^N!`h zKlTqF9TfX5X;H%qpz*NYr>XI`c^5P_9xA$;8t)nSMMLA24?zu$w`UL1)F)8o(bOkz zl0!80$-kObQhXgBIt4bZX-ZkELLdhQa-?XML#sj{NAXU!^0Tsr(V<--kfRplNYX5a zR)s(gzqBU1t6}1!XrB0>Xsc=Bi_kpr!L9^N6W>P7^ZDP*VDB}~=Sr*oFQp~4vm8i~ z*=kN%yFwrbfg;P(EQfZ5lI<+9GNpanPM)WB;4f7!_A_C-t2!SRqpaxW!FE@5PV9%< zNpNJ!9j!reu-#RSCp)L~^p?u)u4>HY-=s+mXZ-J)%J1!!VQ$?8j`)AmRKh0}Fj3#p zD+}$4U~cu-Jeg=$2;_jdRis%C?Fzx820jPsYnDU1LLkQ=ij1yV4($rT%>osU|4~!f zRN?rZUO6Kt1|G7ohNa0zYgZ^EryS&XuUQW53MJ>TDNPPdWEZXW6aP0BApcE{G0XIa z8{-f50pGbC`MsUvSM&8_s;n~8cqkEn$8zL%caBXH=#Qzgx|6wuQ2O4axe%dMA&^4> za!@qOp;aM}BeOOc|5c+L+7$vhN=1UWRWAQtfb0nKx(c7;F=63C%>Zq=?3JZh#E2??4< zhjxWPju{|_=Fy>DA-Gv!8RmPIBY&W&Y}!Xg(Oeb(m(!w#+rmTkq~_fU+7+UP=YbsX zbpTDf*0d|+8J>qWEt7w=KcTkP{!dx9|7O<=zD&7H{9`JW|MlBkVQz(0yB|}j{O+c* zX>N_zJh%SKX;H)dVKRx-Jeg=$h#D>eInp)Dp>S<{nO zyF%3P6j;;!F_p^iZYrB99N%%9E9@hyfGx{#zKYuQTx|=191_SmS+g8k6-v5MR~NxK zcT7eacvnzo=BGFmOui*HJ4Jh37r#bg6pL;U#jlYWRiPV7?6+cvZWLlkuTU(L0vx;Y zst6`3VIwjggM$kwG?9V<Ao3Pw5&FSG^HUI}9_&QE3H6{!N6*t>y($YZKv2neD8 z%wS)caW^!`ALr`>#E*c=>L+5Kxgi|9wj8M?A(@DxA{7yPk9Nj$N#vtUTqZzgNK*yy zMhs>HAtNmd6Q@#~F(D$0N948TxC?a}KZ__hfl4K*OpJxDMdYbOq&fw`>NrS!CI3ilZ<>}BCm)<5go;2K|QW#BOwDPkUQgCKRGv;Dr)R3=l+y|OGucQnSl$-IkY{X zbDpSxNXF|7>to_$0M5u00oAZjzn*nlQfH~CkP#VWRsCRupm=Qz6Ooq5m}uG|7)>Hv z!co-l5UCP!$xRpWVB}7=bI9Z?*kVA4V&=>yYTd|0l4 z7GjJgWLm%u(-_6Y}**K--jNl8;G^qc}7fD^*0q2g}906iJmqP=Jqu z$V4O!wt3=_7$Xla$jk_m*n<8{w1|KUk@iZ0Xn6IAA#xuboetAF{|)d}13~ncW`gMC zg1-_(r$3(lCqnf1KMXF}uP<=8a`@Vgp`o_(`L`!xk`>}!?*efijzJ2Ac^H=s zq|XQeBJzj97eU4V+lcY)ffZ_w%7}=l9Tq@enEGVL{q#6))<$|9h30=lT? zS&2euj!H8{q#&G5!gAv?a2_-2EQy;%wDcmC16Y$(_lPFTC16z~97AMv_u;?g^hWXOuyM6yg_j&s>~1CNfE3s9OO3onsL zI$?4yS*`#)DvU)DBCFyA-3=z?Vu_G8T`6;UCUROIm2-*6y)RXhaRDbt27*53;ME~; zTaz;xG2*6)lG#KM1$ZT(DrGrae#!4(7RR~Ei19XC{Ir;#D#MB4g4T zbN$I)uNmuVjPZ{sO?Hh`_)40KQt>>pfW^#^<`WL!a)k(Q94W2Xj!5#Ck&z=zuUY{r zsnE}3Mm>fBmuswGP;mKHnuJl4B~ZNA$5GOaIu;&yn>k1c56!FE7mu359Fm_d_9kE? zRMJSt8@Oj?V!3PMt?A=B$5@dG6d z=0v>SHb{~W_)uO3R)=KkOQbPF$@m9Aht@ZtLkY-1OiV1M(Bx$TT&T~%tMafKK!?)1 z;I)7b&68Fbi)tGwMdY1*kTf}3&r&r0R8vot+(M=cVvr^Bn#OlTnEyab6z9SKB-3{7 zNA7(lVYP>{&^w=tX^%-eAD1W&Wtv6t?aw*a2BAZCK4vKneKCp`nT6V?6~;exuBD;B z?|hu4I8+t?@fP>;Thu$-+n!Cn>w5QwBHl6KV?nL`I0vt4xfw&w@fRACX}{o#l=|%= zN_j~^jIe=W>v^UD-Y~QrO>=$UK)`Z_v#w+@iIT@mgn;LW#!T!&3~=0dtSa-eQJt<^ z@Rs0vbJksmAHVuE(k>;K=|_}5dPrvx_Tl1EsxvkiCiAIPQD%;2uZ=$FQiE?j43fI$mE+H&$QV(iKjdAhYu5XL$x%z-vbeqNy0q#x|6US&4G?3~#? zqtCh?!D(|Giq~FZ^TG=~7qVPfo;Z`oM>9+B+WpG3`ZUm>1l`Y{|$OsGk2}50jClAEi{fWq)`ST~8QHka>NR@> zgySus(Wk**37bLU>z)L^EMA8lhyVKinVW9NZd&*et~mXPyYx2s3K1n(ZsCL!*2-on zhWttN)ccAjmDD)sk*bdsciH^2x+Z-PulFB7T9)-E(X({USBR)((AymSR2RvfwJJnU zPo&SfW=xtOIuP@@lDlmR?lyMZY!0a!9ci(X%;4C8ZMxc$-x5aLp+yDV+h3qACHeB}FA) z6jD5+iic}XQOOrA!hUllpQyG8r7lSzE)v>hXN-AYFkJS~m3*e!<|NfKfw)xoCOczB z)9<#ygf`2O2XfSd9IAlR7CDqGQW(^&0H@78UJpGEnA%o=)0Q5GLH&1fpw(?Kq0MqA ziJoq4Frm$IWGIQAAV(`qXtNwjM(6uBn9$}FiI5@-QdGr+wxp1ZXG6I5_k)))3LW->rqRlBPX`e7DwnB(Dr>LZTy0k%vHm68a z(mo-@RtVANUQ_~2-?c%AHuqu^(DYkVtYH<_${Qcadc0~uw^f9GrE3WoYO*d8ut|Gl?)6XPpHw0=IzG_^!tb+F2`^ll5$ z$>u9WG~l9Jc;PLq@zD+=Z>MIW)2rcMA);w76LR=F;W(vxx{`5`e|)dLRhrMfrrAPt zYDv?V%v}!$xWn4CT79%F^d%z)jxJLkeA05Jtg6Ub7ovv8z?rG8s%NIE3`ngD$xQR$ z%vAWQ1R2~XjwJlU=AHXgW zDmNmwFm+WQpxPF?7hjhFXQsjy+ZM7;%TpN4hPH*kc(??z5zWR!yFy=*b4rL!93(-k z^`9JBm4|H;&vSusYa%)+4>)S(UaQ`t z%F~Y=Re7E&^I5g>^rJXcY^loJ@25QEsX!H5s`8v{I_|Ve^|;gjTbk7H5O5su#ZtBU zXkAEVs^sjx)NDqyD=Sc(KVS7?F#W@XHY1zO`6S!c7?!z|NPSN^6)FKSb@r_TS?Epv0*>G5&wU5bX+y>@2}uglIM%+7D%L;bTBj!;tj$^XBr#Q7gExnM@# z6$$kF64@1j8G#e&HB~{hD+K#PU`fVba}`9pLWTAcFdhWW#zVV8 z_uwn)tfobNnW=`&s{hsN_)^^5hdA?;)4_nt_@VQ&=|}wLl@6a!cuFTlbdUT}BQxKAdy%SQXaL(s(i%wml@U7DgW$XFThfY* zO@lLcy))D6jvt!Iksw+L%TxP>uSwymG&)9g}r8aVCS17KoxD9`)wH&SZ zOaJ^(TwQG&{!*KqRnbiU{7_t-unm7n^~u_XaYI$HqYBDir3BXGV85XN`R{RzV7h*| zF}|C_`o87J@9rF%%-3PnBcoe?u>Foz*6H>E)HW#l)>Fp6T#o$i&ar6%{V`S6a#I16 zzFYC?RX&MnT?pi8i?VMmM=O;5KR#4$N`N7tuenU6T_KPIMh9K99NHBsH>JQw=I>dK z{NB#7X~~bGxxc=rA$H{E!$Yt{HsUKXU2*+q`nQY5aFCNB%%lnPLa7;d|~#aO4)kWb$0|RtxP4 zIdT;sN0DYZv@7Ju%>+3fXqH2}LXO-LkOPht`IGb4`pK(ZAxAC`)*ZfMIr2xE%BFo} z-*HDmWNgr`G@kO?0K+?tUbHZ0#fh({Y%XfvI;UMBkb?$turYn=9-E351n7&2y`E zg+LAj7bEW5TgU0)PkoNo1gT=5jDL1(jn6QlBb7gXCBikp8&s z@!Ak#0p1ud&{wbB=Xb#@?r|k15hOv!YmNr4Kz8=A7FwCeH>7M0k?2*zKA0v ziuRFD;5CJLNXBYZQ5z$`v*E4d6>B zh>0mBL}GPny!j`hZH*!iug$_66e%QJs3!*iYX(Y_NEtC}NYt4MHW6r@Vzz*`h8&m6 z5LMW6kh_z}1!5Q>3K%cY4;)<1wUhHBP&^Okhfwf*xIjV_q*DMOO2E8Pj7xb7Kv7iA zAaZawCg4kpPk)vwM8Fvp&?GOhvpx^~x2-gJ>FQo}vLI||^J`ao-yT+=`Q{NB0ceSO^B+P_bT3_ z*NiEKOG)rb{r-Fj_%Bqq=~cdFu9|~d6{b$JE@Z-M%z-QnTfNQ~XrD3>_x0Xxt}A`L zJ-jxl&FAOOF_TwKAOiqk>0uhN9RU1rk>nysVL OP5D3Q#NQOkpZ*_D#NLwt literal 0 HcmV?d00001 From 085507b60393525506c33a00f0454c04cde7246a Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Mon, 2 Dec 2024 10:44:07 +0100 Subject: [PATCH 45/49] [kbss-cvut/termit-ui#581] Fix EntityExistsExceptions being thrown when importing translations to more complex vocabularies. --- .../cvut/kbss/termit/service/importer/excel/ExcelImporter.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java b/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java index f588798d1..5de0002d8 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java +++ b/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java @@ -256,6 +256,8 @@ public Vocabulary importTermTranslations(@Nonnull URI vocabularyIri, @Nonnull Im } mergeTranslations(t, existingTerm.get()); termService.update(existingTerm.get()); + // Flush changes to prevent EntityExistsExceptions when term is already managed in PC as different type (Term vs TermInfo) + em.flush(); }); } catch (IOException e) { throw new VocabularyImportException("Unable to read input as Excel.", e); From 83032900ac28fa03fc0d378bb7721eda07c36806 Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Mon, 2 Dec 2024 16:43:38 +0100 Subject: [PATCH 46/49] [kbss-cvut/termit-ui#581] Add a template file for term translations import. --- .../termit/rest/VocabularyController.java | 9 +++++++-- .../service/business/VocabularyService.java | 18 +++++++++++++++--- .../template/termit-translations-import.xlsx | Bin 0 -> 39672 bytes .../termit/rest/VocabularyControllerTest.java | 18 +++++++++++++++++- .../business/VocabularyServiceTest.java | 4 ++-- 5 files changed, 41 insertions(+), 8 deletions(-) create mode 100644 src/main/resources/template/termit-translations-import.xlsx diff --git a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java index 9f8475ad0..b90780d59 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java @@ -205,8 +205,13 @@ public ResponseEntity createVocabulary( @ApiResponse(responseCode = "200", description = "Template Excel file is returned as attachment") @GetMapping("/import/template") @PreAuthorize("permitAll()") - public ResponseEntity getExcelTemplateFile() { - final TypeAwareResource template = vocabularyService.getExcelTemplateFile(); + public ResponseEntity getExcelTemplateFile( + @Parameter(description = "Whether the file will be used to import only term translations") + @RequestParam(name = "translationsOnly", required = false, + defaultValue = "false") boolean translationsOnly) { + final TypeAwareResource template = + translationsOnly ? vocabularyService.getExcelTranslationsImportTemplateFile() : + vocabularyService.getExcelImportTemplateFile(); return ResponseEntity.ok() .contentType(MediaType.parseMediaType( template.getMediaType().orElse(MediaType.APPLICATION_OCTET_STREAM_VALUE))) diff --git a/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java b/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java index 4861464e2..bd250002f 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java @@ -299,18 +299,30 @@ public Vocabulary importTermTranslations(URI vocabularyIri, MultipartFile file) * * @return Template file as a resource */ - public TypeAwareResource getExcelTemplateFile() { + public TypeAwareResource getExcelImportTemplateFile() { + return getExcelTemplate("termit-import"); + } + + private TypeAwareResource getExcelTemplate(String fileName) { final Configuration config = context.getBean(Configuration.class); return config.getTemplate().getExcelImport().map(File::new) .map(f -> (TypeAwareResource) new TypeAwareFileSystemResource(f, ExportFormat.EXCEL.getMediaType())) .orElseGet(() -> { - assert getClass().getClassLoader().getResource("template/termit-import.xlsx") != null; - return new TypeAwareClasspathResource("template/termit-import.xlsx", + assert getClass().getClassLoader().getResource("template/" + fileName + ExportFormat.EXCEL.getFileExtension()) != null; + return new TypeAwareClasspathResource("template/" + fileName + ExportFormat.EXCEL.getFileExtension(), ExportFormat.EXCEL.getMediaType()); }); } + /** + * Gets an Excel template file that can be used to import term translations into TermIt. + * @return Template file as a resource + */ + public TypeAwareResource getExcelTranslationsImportTemplateFile() { + return getExcelTemplate("termit-translations-import"); + } + @Override public List getChanges(Vocabulary asset, ChangeRecordFilterDto filterDto) { return changeRecordService.getChanges(asset, filterDto); diff --git a/src/main/resources/template/termit-translations-import.xlsx b/src/main/resources/template/termit-translations-import.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..47326329bab451e2f0df4693637399bf72b2c1dd GIT binary patch literal 39672 zcmeI5c{r5q-|$JHY!#(Nw5e2PER|GDyO^X>sVtKuWEmOT7-rfesT46}tB{I}vYQzb zLYfegeXK*a8OAckZ0}WF^W5FfaXui&xOt*Z*T}o`#=?3MMHN~ zKgU)q+G%nUd0?|wxP{r|=k9%BkJZ}9HNGFsjwT1D?IbN$aJ;<$O>+z?q;@hN zD=wGUIbyyDzowt@acM*TgEjoe)WMh-<)XrVFVBmU($%EJw{)DmG3M)njvQqqEm@_H zFbg}M$fNcvABW4x20z*MgaI@{%^V4d4 z1AU8$bcd=0qfON*vZ0;qJ5p<-mMgbC3`F0-DEBxYICQvBWnqM}vsbRa0qjUFJf+<* z2I_refy3+A;h~&s5ktipd!KH^zfO@RTp1oRldSfZykN$VPBid^f12yE=<*Ow=Tc&} z${D>k@P#ktop}Fh-*nN1s10H0nw@*{6by4-Gn_+n)_t<aUytLAK#+br|f z>j&28zBaZx@ceeFkmdGdpV60teigmS%CUQH&9(9_+Nm)z9_skw{k#VYtyt$y&VDoFq3!+U#Y4 zyqtCHvdNLy%kxc2uT*?_=AkF&A4^(lT{GzzFm`yht9Fv8(aF{$raphco8vGhUGZi5 z>-HN`7AGV>Un`wC@@-B?mx1Mphn=6~*F-meX%TJ?p9q@Qeq8N*=foaLNpLdf<$Zo) z`De{U;o@yhp*PNyo~i!uYKdGGm3vj^!G_+as$2Q{YS~T6w2%_duVGDB?-9e!)NK>o z{*?DpKk#`pWybgV#VY4`a>2|H9_l0Sx6U9_C}24Hy2 zNd4UqQ}{K+P~K=aN7Ro2HhQkkBTh#psMG2!7O}?V$qwb?J_#|?dZ%4CpMEmkbxkt8 z;&5KWZ7kpIsBF$)-eov^V5Kj&?gDiX)v0vYIw3p z>!iujImu^N96y%ucKBF=+9GMGaP0{ltHlQcs~$(!-MU5&4E6Ecb^U#P!o*oP=ic(A zclK`_i+{3uY;nzwJ(kW=*Ecdtw4Xouuzhe*nvKLGNZ_*D9#c-G^7Q5x3G*+8DnC{} z-}*H8@{uF_FVSyayKiwzpV)ZI=myj+WHH=rc12#D96B$5S7W)n;(izXofTiC=MeUq z*_Z6^keK3=wob}A1bno9vYBbn@@g;LIq>V8Q>wOpC5JN7&SL!A7T%d}W_V!o^_cyQ zmmjj`2le>5t&-n!S2;Cwo~b_SG|5vw^yHo<1J`?6?y+$zZH-GG+dbg~B)BEg8dK8y z#(5V{AAm|7#ss;>4oPobaQ=4t!&B?ZrH#*MC45<~sYM;X`9ZE^$%9k5?Welm9~eIP z@ajIs_FVLJ7b9}@P|A@@`=`-sj{Bz;Uk%Z`w3?|+L@!Xo+d7IH0q&EZ5 zR7$Qgxf!mG|&ht;O@Bhke|Ar&%uH0p=5)&z@f!!{1Acx*^(e&lo&w25c*HDQA&Izzx zLlTNfbFVQO;nLeiuf4F|2VbKT_DLGmUUC#0Jot6jVAq5EZb;Uh$=%MYvlTxi!reZ9 zyT1giOP8iM3_c&(uT}+OD-BdkMAzTe5mJ>X54+ddQL zW0`m7y^7W~DEuTfuhmkJ8Bn-a=4ors;674rY^L49q{j3k8w&b@S2&WcJWa@s4`@)` zlRjefe)D>Dzp0IzY@`Ymo*i7JaM({*zZ@rCg-aJ~yUXBGW?jy;P+Hf1jUxB#(Y{?ASwI zqK^CtO<(gd3v5C65B(%a8hd8 z&P3p#R##P{Yg>ftc#^#hBmLPUM`9KBkxkUKt!k+*E6Pti@iw~-MZ7)Jkw* z>6OSo+1YFKB8#?qsPgGe$M0l`teEQ=^}NP8mZ#wE_Lke)Ov}@yW4FIrkABcnp7+8j zI8j0~Q2BCk>aHu1@88|jj@*#Ex_&{B8#T*ixvzGlu*k7B=O{56+oX5r@QEwa*BhIP z;~(x1i|vvzS?2EM;CiTuTA&s_xpd{zENbUihs4fanH(RAtNZOb8IS0N9nc&8{iEAI zT&_|jUTQ}~kZio1@Mo{QPf1oT-shwi{z9XA!?v9pf?oDY#>v6!CF91H?5MNuKRCOH zcXr34l0{R`j!C;cN9|~kjQhT1$M{OO(s5^2x3#u$vhu9duJPpwI?KL-EL9@GwbzUaj(syRgI&n?`b?V&UoOGy#)ZXmvCHn4eF=YdzND}yo>jEm#j32%#^LRT+OdjY_T7>Dl9zq2Sk(7B z`FR&TfV^G&0#l-Z7tT%5o%r7VrttWjMLQ=AR2;*bwhB)#ZoKcHcr=Mz zxAEruuG>}tSy2H2_Rx%iH}lOo+i>f;+r(k4{!9zsDr-jd$Aa9tNvC)HTnUMD@_(0UR{Y8| zzjnvJ@BDu5l*y6*wNw7+_atxSkDc=B6x&MwYny!HuDCC5k<80SER@~l@_Ogd-RJUp zg+fK#>nVG+4>F6{G_6hV)aLH$jidQ2p!q`m!m~0}8<)zgYEaG#y70(DZA+H!?TuP3 zXVtbl89#-`eQS8>aOG9xzBn@Dcr;qD+U2$6RP2l9Z3~|UK7?$ySJ?jobC@g zs0uxM=Y^1yCAeV*TMpmcV|4j%kOblv6-$+Hc9uWp-gs>?q@H> ze!*+4w@%yFCZif3#sJu;4Ygf_vvQ+ZPr19Fyp1a(#5haJA`Cj8v z%M1sbZ+_W$2ddJUo(-=uyY~Dk=1~Ea)545HUN*$|q_5w0DM2S7(FjKQc+J~m!85Cg zk&ZS*4ffM}^w)W2$u5bxeebNoke6%F7pzA6&kY9gy>|JaO&4?&zA~MNTj{VQz^=9N zYh!elr{*Ioui2iSp2}DIpXq-z|AI{<&#bxmw4A%wDtu*Fta8{nGgf_L49ep{&&M5) zwKL`F?b`3WZVPa5vZTbACv$KGJes?~*L!;Wy-WzHcWPc(YPYs25@mTzk4JT9YF*}0tbR(eIWMAVC} zn{h9=VY#c#g~xMw3uk5CPS17CkdFbrd5U;Z!e5o$+-dpj+`O?4I=i+w2z%9VZaNoJ zQcpK{&+kAIZC#p!8qUbZefru_b9?@tS*&>hoxo*2q(SED$3x%+)8I^TtX{OrM4JKxN@>V7)T?)vr&X1H0Hc1&STyg55 zU3{vKODjpMZQX^Z^50{GchT2M);(Hv zmbZG(+)6!j#T84g7n;^q`n%}*2Q56QD8KV^*L)v@Ty^8IoV=&6aM$>&rmUgf7p~~2 zd*bkCHm*tOAYG)Kie4JO?O6?%lC+mAxl4cT&3dVeGJD!(_J&f{9g~p?FFvSXwX#S0 z!PZs#@6DqIT-g^}KliL}lF^;s3@0+uN2N&b>Vmrj#swlp{V#ONcnfv#7G8Qx`Ss9vW;vb!$(k zLY^XzTtXf$_OR;@xteZSq=mz875w~OBW0C3%oCWcjk152+7AWY6#7IPAle4{^`#$&Rg7;3GJ73Z8c9jd)?^<`Zn=>^S#jT zDq1tkTzvBwQgbVh-<&hLYQy{zoSo8{!Z?`#dg-ea*op!dqp)OdSD&+6+YXnF4VxU|l9+3oF|zZq`x@iA0WYqJn5 zxvgtY(n!-c2d?I@Y5ItS@e$YarZ%`p0Asm^Fpbw z%v#3SO!nN`m@m(R?s)`X?ZSN7s`ASHq*In%c8nRN%y-%P5G$&Q!iJurtt;g%=dRn^ z$5?PswP$FdP;ND^#l<%q{q3m8u#+~2Oo?(nOFi3RK|Wi)2|$r+17u|0dn z(iR0~M+xTqXt#B2tM{`&tHc3q_dh?EQU1N%9DGpTNcW#jSMzp3sg6o;cSo9ROWP@# zNA)>jJ9nRAKcwYJqL3fW!-6pDZZIB%Xc!-BFybG`SlF`YN+EQMmV~x~;zn&GKXs>R zNmbVc^;=Kr(p!nXO*ZT1-Kmlc+q&A-Fiqu3YI9AO0^ZO8vhEP_Wa8D6+aA=E#*Xwz zM(#S*c%mg(nX@*~kTX8Coex*M)XAKoA=hBsld(>1KldKp-z2W#fm)9P?|&bc^#P=Z zx1)zQ;uPA`5oP-$uIkg)7GnBTmmHQ z+t9niL2K#p%)^S?;xZi*GT!fccKX3#|2vD1A4NU;q-OLPS&!P5Apee2aSgto;AQxl zWT1L$=b7s%imp8g5_IsFi|{3U7;%X0cUXJ_Q`7JwC$lOnFKpu*)dFs_^;ho`veGu$ zsqza)?#=sTJFm;}e%uoH=C1~`HOMQTLFoo5up=}HHJ9iUwHthSGPBEs${K07K8M`` zE{X%UDmndi4u!pg)2VNZGUp^v2tS75uCGh=J%AzDC;fNb^*?-5s`sPM$nCbM_$Hqh z-4mQ8TN6xr1hn0nT2cN%mtMzTy)tt2b-(=5HS-_q{n4Bf+Q@cEgG>{GO1u5I;XQvUnyXaF!LRnvX0l!uhcKc$7r-7J9qkRHt|Fn?HbD8Y_emxc8H3f$Yy#lXo0M5b%CH8hSFdo za6#CXL?AS2GoSl;qZpEK81wy zyl|<#f?^_fs9Vs%ZwgPs`UpELEbgc6NlFN>;dEFDV>eEBRF@?cd6gtDo$~p`> z?GpQtND7umhiD)LGu*bOe1s9Lc=o$Jmd)=SrY7?QwVnKN!4#A|HZ(mP$3(!4jY6os z9WdSq%qEegnnXeur4XQOv~YS*7mMP~PBol>p~o8=+=T;l$;-mNas-1(;qt8v2vF4_ zuld+1u5d~a4r@w-&$h+EI&nfl5pS|C&EgJ>)+rneO=Pg?4RTY0Wfu6E(dvecwHCEK zk*Wr&{O;+pY);z5a3^ji8A{_**w9aO2MX!3g&%JiA>_3-bN#C6bitsRO=c;jlN`XC zW|4-{T;F_aTS2i&qf%M5`4*~teh4(F9al(FgHbJ1xq`A_U<@hc6QKDx;VrB?-ektJ zxYjse4g!lUe95E<*$wodpo!@Yf?Wxr7{~SpdcAg}3_dy9k#3cb%rqY1R`MAAFkpP! z3D}H5!izLYh=4b_*vNJyNpUAEVRUc-zsdNLaBvMbH^eRJfpFGD4QA9+z`8R<$5Gb| zSST41j4p9<1P1Z%NEt9hE9+f`t5tsBt|4wdFC@P{gc6EWM;7yV-GWM0C1$6zhPt4P z$iSNf25`S|2G#u`8Xf`zS`RZx;uzYH*U)Rkw;gBj%j^VW#4y1aeU_K4k(k(Rkw25B zl>d@aR4$lVO6k_FsmxzUdrsobYzUxlp!dc#N5y>&NA_5G)ME>)obX{hYE&k!*9>7S zH;o$)7NpGzVuSqCD3mL;`x0*&%U~1*TV}p{YI8hCIo*jj_s9e(POFjTg$!4P4A&~L z)%py-qp}9NjCQskF>d$A77VPe-IsLJNJgWi`!cJ$f1=Dt2CX=&HexsU=&$ulZMTxF=e$((oy4%1}SXdo;!P0 zE-n=21_oA8w#NyqYtDLB46I!kyzB!`TO&yqlclnC>CZ2L*12i55K}J{?4A33;Z6V0 zZ}6%_=sfPcpI-~#cF}D5u2zih!t0wpthRfuZTYV9Xvvf7vIo~lk1sZV5IJ%Y@XjrdM_eqk+*i*zac3Xtpj6r=X#9;(sYQ48%lBxu8g9o7oElO4i6qq3ylUB< zc5&br^1kmeb79DG0aae;a$C5g3>V)`sX4ei?J_g|Zo;qf+y6!RYMSAa-7-I`?3#nR zGF|hk+3;7DlM^hn{f?E_{HpSb(fB81)Mj7BpH*&h9>^IqvW^(D2UhSQ%KyrK9nQWYjLOj%??FNuZ8z!8)4kPIF?$KLG1!)M{vi8Sn<{ z$n5s(NWrx!P?)zn{mYo7?tI!91_|68Xn;!a=Ia0Jo7dGWZ`tj1;a5B}cYJF;(hQ}* zcuaCoho@_ngYg(YPtbqoSMwf3E*g!0L`F%2@q{}M+?!mAAASZov#}1*?pxt2qZ$85}h;0P+*fP!6oJRSxRRbj>-i%EiwUvg>iT z<6#GDlGYA{Kbjc$$hq0~FUtE)*R+VrCnsbBAymxKCSWF#4-2qKf52jWK( zP+%+Z4(=Yf#EicQA~&`gZo~|zfk;?gjbY2~0nu0t^{uI*8Onf2iyYLk=^8^YO6>Cl z6fhP`z$leRuz)P7JoEbt$+RMMjgy|67EB|h3Vpmhu6JsBpk_v3YDS>uo50jJJcFK393T1? zG<{1Deam(FmSFmpP`al!-6WcDDw=R9n&2HxxD`!!9NqiX3+u`;bK{t~bId$AW=M{i zry&F~un|w#h9?}v6Heg?m+%B{eD9bS7R@mW;Fw`JW`P{DAdVTK zw#5(*#t=@$^va{KAsn+i9J5f4*vw^G)x zL(kt*qiNJ@(|mQqd{fiIti=`a@PO(oMkNtrU77M)ul-wxMd)M76g%pf#Bgf=^Tqcq#*22sSh@PN}qopce{6+cs$<1-MA$XVE+At!ekcMFI&d5~=y5mEa;lD~0(+B3UXhXj| zkK_a_5~3&c`fg}3Slra&!6lwf;so-~q)am;8Y;@wz!Ul?8p}=uUSLhzLZ*{N8~P=A zBr70Si=NPLaYGY;MdBj3NLV$q?10Qw4lWXeEIJw3(1(GG1OiwjfG6~-;382BZ0N;c zaZ`&2mv}mf6UaZ3GMPZGz6|DSzh)K<$ko*#mPR5F?8J>~O>$ARH3-iIjRr$3pfGdqnrbYT=FV|YMlWA_|JgW1_drfUT3 zqK%d{gdEl)KE~%(pGO$;cUCsB+2KvF$p(G08k?IBrO`;_pc#G#!ULML0-qjW*gVSh ztD!40)PI?|JzkhHR zH(u4o4XF%?BUVzi>^cLd(r|$jL!A^dhQy~|DGn&$%o30}1P=s7P|f%2v=#8^Q)ra1 zKN-KDMzgh>OiP=da2>Pa3<*X%8eGpO9%FaEZsy93^)z-gArN+H2I0Q!F$O_tN>0dV z+`{iBBuN5iyM;rIFbd@+wQb0XQp{tO8DYB+SpKjKkvCn*4S|ty`dE(er=LcJs()s| zJ_(6$@lt=IzxW?~3c8?A0U6&E_0y+tqqgmvfJU?@)}nob)j&G%k=2ngY|zH($-`Uj>iibZNz*g7Mmjs zoI(VUtdtC3b`NGJhB=M!$4dD;mVavR_<#2)K!5WoM5&5=3QBmH-+cY;i7=0C0r{Mu3tKg~GZ~P%p(AHoG42)Z86p472*G$>~X7)o6Ti^o4&2${X&9LYL zN&KNaYvWFMy`;{ug>jVdbHo-_x6S9mQ%bq1eq zBdjvpyuHEs|5L#U8c(tU>2{p~lQ!UDzoNTvLwKZFW?o&}MC1rvV= z6|gtTj7Gb@HW(RoS61Hq={P`8E1h`Ok%L0&a=Drrn;(jV83Mgm|FTl_jq*9a+y+my zH8J@r@~WUtAwa8TBJ~0V)F~W3T4kp@Pha|%P9ew9Y&u5ktUjnyFtvPVU|#YP)G27k zMHsm+$P?)lx*m@NTk2c|bqfDNhcJlKEu+MCBdW)kmuK+$ytY{9fv+P8mLhM!;eS#7 z&pL*GL#Gf;My&u(aOeN6PJyR)YPRXwOzL} zY?3aeE~eAvGjcm{>i&PNWB3f{6hsI85C6Z@DKy|--i4hm*GTWFJv}ayJ`;KR-_kK; z1E=;&!BhK7M&tiK=@e#bA5MgPhPa1&{)LX=-*6lJ6FP;zcN_d8It4q~ zOKTtA-+Oq@x%rni=rHD9R!U&k&WvmQJlV=y7vTGK(yj|GEw%5*8U0_^Ze4KS->(C@ z;QhZ}-|2#%nfu$N_iH zh!r)$ieh-F+0{Uy2vaD+1&RoTB3?leRnSU4ohLC7HD@Af?nIRIM3n4A)B;qzeGO9w zF<%#yqSPm%R!>B&orqdL5w!uO=2%0}LsaP@2K5m0^$}Y7 z2vhwK_x=cVCrE}qcxG9-T9aI7v&;oNvmE^d9`L#_s}YnQ$nU^UUPRfy1EmMe?vTOm z{!%YcdZ5#3Q_rmqmJ~@3$_rDR;^lVNfQO}HP^JV!)&6+!u(bI=gt`wTV;2Z+sL>>k z+AQ+}!Ale20X>CT<07yTezG2AKO_QUAcG|Xr9C1r*QTCW8{91d^9xf3lH|A|+k&jO zDPgbb<) zmqm&qPK5`YEXvXnf#LYcci#5rMPNK+@cLkBsR*oZTfe&@_?gJI;9it6@InsL05$zWpuZwI8sX)Z9<#soM;Ilragwv}1uR(D2P=xv$NX8jaA2(`}ZEcqkL=k7e z14@gs>_y;n_{j(#`%fZpHe_($aOraq*wwb4_$}C8Bt7T=B7P&s71}}%b6hf z&2WS|5t3mm>fqz6lgDNb+Yh$7p&JZ-~IjKp(dTuMtTP%!w%jIdYg*u&G}H;47+^b3w5CXoPwb zB*RtI$8DPAx3y_X2w0nFpkQkDoz0s6QtSW+ZC2nHf9 zklWo3f|vQ360WND7l2@MMua*YlHn!l<91DQwe7M2QN%^?fak=lIuY0hKY7y6{WifZV$FkCY$0}th^;B$ z&m;dsJuzbGfmrh(7F&oNB4TTb=tV34p`IAA^gyh65Q{Cu4iT|61>Dg8hk9bvftOVN zGbeb%f!A`1Z42N_vBZ7}u~9?p5E1KI{y9O7Sb8AVJcz{>Vuy&>ngVX<|3f`7cEE;S zEIk1GSgd&vi!HhiSb8AVJcz{>Vuy&> znj(71&3~vThNWlJ&+pcP8ILT9-15Teb7lptsr>pZ^YT*q;y2uUNXHVvkRU{Y{%oGO$#4wsRokm(O*_ew}0e)0EK1MBqS96RQN7%JD@Pi8|~(Z`m3U? zn{;~D&y|okCoduKL;3K>+gs`X;-Z`#9lcS|pC9!9>MmYdXSa3(MPUGL#UHl-K3fU2 z0#Jcpk^dgUU$#QZDs`AAaF0e=37}Dbe>(`&??)ic_Fj&cEWN#4Je>YAp1|)_^G^aT z76vrOqTg>IF%2~9C8Wb~FQg|5>VWie{Hs9Lv8~?E0>$EhTP*xj@etr{|5e=H)AO%N zcYl+(h6hTG1MkXzDot7ZTWJR$lsD4-uWqpG>r#CW0Nn?){hx1u2QIA-AU(VtJ-iX8 z(4LMc+rPTO?{1Gjcjw_Hf7zXSe|4ANNBE!b5`E?`@AA_MavVMnsJ4WJ{Et6CCw}|U H6cYajhMLY$ literal 0 HcmV?d00001 diff --git a/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java b/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java index 0a5a6b9e1..202354185 100644 --- a/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java +++ b/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java @@ -638,7 +638,7 @@ void getAccessLevelRetrievesAccessLevelToSpecifiedVocabulary() throws Exception @Test void getExcelTemplateFileReturnsExcelTemplateFileRetrievedFromServiceAsAttachment() throws Exception { - when(serviceMock.getExcelTemplateFile()).thenReturn(new TypeAwareFileSystemResource( + when(serviceMock.getExcelImportTemplateFile()).thenReturn(new TypeAwareFileSystemResource( new File(getClass().getClassLoader().getResource("template/termit-import.xlsx").toURI()), Constants.MediaType.EXCEL)); @@ -646,6 +646,22 @@ void getExcelTemplateFileReturnsExcelTemplateFileRetrievedFromServiceAsAttachmen assertThat(mvcResult.getResponse().getHeader(HttpHeaders.CONTENT_DISPOSITION), containsString("attachment")); assertThat(mvcResult.getResponse().getHeader(HttpHeaders.CONTENT_DISPOSITION), containsString("filename=\"termit-import.xlsx\"")); + verify(serviceMock).getExcelImportTemplateFile(); + } + + @Test + void getExcelTemplateFileReturnsExcelTermTranslationsTemplateFileRetrievedFromServiceAsAttachment() + throws Exception { + when(serviceMock.getExcelTranslationsImportTemplateFile()).thenReturn(new TypeAwareFileSystemResource( + new File(getClass().getClassLoader().getResource("template/termit-translations-import.xlsx").toURI()), + Constants.MediaType.EXCEL)); + + final MvcResult mvcResult = mockMvc.perform( + get(PATH + "/import/template").queryParam("translationsOnly", Boolean.toString(true))).andReturn(); + assertThat(mvcResult.getResponse().getHeader(HttpHeaders.CONTENT_DISPOSITION), containsString("attachment")); + assertThat(mvcResult.getResponse().getHeader(HttpHeaders.CONTENT_DISPOSITION), + containsString("filename=\"termit-translations-import.xlsx\"")); + verify(serviceMock).getExcelTranslationsImportTemplateFile(); } @Test diff --git a/src/test/java/cz/cvut/kbss/termit/service/business/VocabularyServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/business/VocabularyServiceTest.java index ca6b97ab2..9ad04a33a 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/business/VocabularyServiceTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/business/VocabularyServiceTest.java @@ -383,9 +383,9 @@ void importNewVocabularyPublishesVocabularyCreatedEvent() { } @Test - void getExcelTemplateFileReturnsResourceRepresentingExcelTemplateFile() throws Exception { + void getExcelTemplateFileReturnsResourceRepresentingExcelImportTemplateFile() throws Exception { when(appContext.getBean(Configuration.class)).thenReturn(new Configuration()); - final TypeAwareResource result = sut.getExcelTemplateFile(); + final TypeAwareResource result = sut.getExcelImportTemplateFile(); assertTrue(result.getFileExtension().isPresent()); assertEquals(ExportFormat.EXCEL.getFileExtension(), result.getFileExtension().get()); assertTrue(result.getMediaType().isPresent()); From b8641081406f71ddf40c17de797d71ee541e449c Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Tue, 3 Dec 2024 13:18:02 +0100 Subject: [PATCH 47/49] [kbss-cvut/termit-ui#581] Improve help text in translation imports Excel template. --- .../template/termit-translations-import.xlsx | Bin 39672 -> 40061 bytes .../termit/rest/VocabularyControllerTest.java | 12 ++++++------ 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/main/resources/template/termit-translations-import.xlsx b/src/main/resources/template/termit-translations-import.xlsx index 47326329bab451e2f0df4693637399bf72b2c1dd..5688ee389a86f7efd6fae88335f18a856cfa36e9 100644 GIT binary patch delta 8139 zcmaKx2UJtd_O~fglq%9xqzFN22?|ma2v$l!dJ{r#igc*~jtB@gPy&RGQmr7pCsL$^ zCPkzRkrE(;5(qti^o4tU@3+=BYo+XD&w0*o_StjJp4lj+*{z^q(S^|-;iO_@WTZM{ z8&=37LQT8>Su9I!P4oK;5=AQkgvx3U9DQ4LTMLo&{!+ZCHp1h}LnAxk;k(x{A#$Qe zhDywfOLMpasI9hB5eh$ePVC@}uGVpJ$9iz9x_3oT6(^nxcc89GR3k!F1-WdGW~NS1 z8JpJQmeL?bHrPPgb=199rK;%{X< zN0ST$>#b*t9gcHpK1zfn^;{fc{~THHgtar5-L%Re-C4{^x`9S-HXPl{9v;#*_;`kW zNOcx_+9TqvsAtDgq5=3}3pe%iv*mk9(%0$A8g3scY!nk`{B*zjnzu+c+8Jv+Q+Bd! zYV&kGbsbOH^(P>?V5txVIXAM%t zwI$UPQnDsIkDhAZxV@&>Qs_c(*3&uWT!tm569us42U3AuCa@{k$V*@v}ty-plkj z19#nfxncS;Cu5Vdc(`#7%89-*t-UF2fep zGi%FYTE*|uyNbiU6{{iEAKY}6CD3Ub)bPdGg+ZLI%Ph3s8%zlS?sYcZU2kGW6wVQ4 zAq;7lonfAclM6ROH&Z#jxKDpLL-ZUD!uF1#u1id%;K$VAv6fOP3sFhXl&>4b z-!vroN5aduTE`tg>BDN03ti8kjtor#H=_$nUaN{t`+^;OrWDS$dWJSe0Y(+!T220; zA|orGgvF!v-ph0KEyLx#j@{54HmsO*e^*!ZTx25wR852G;hac3dvxjMZSil02+eUr z$kZ&A;c2BaQRSk&-&Y@9t$ietdNiR`um0(&p_fyAN$8(uBJR}2f_oJoRIW}OYjI@+ zt-QT3^ehRymu7zQ4acgxWJD!kA9akm@|f%LDXc(zCzfIT42|}XS$k^Ws%-+tOWhZv zlL{%r17rR0kH)if6pkSFG#A*fh0jNVt~|YAaGQasph47lA|$?<&$`#ZD*rr$PUvUr zoM?!R?#H7ZMIF==%ANM+`k5W43{uroeaWAEW3M6C3V%osajSbi_B9k|1iGV_MbQk6 z-qnU)`sZ84`zPrQj>!fOYxWIcUWai0_%37j_3~BD8!Hy&-jwpOu^4eh;rYz@-zA=g@?&qN3Lsj$BP~s!CZnuj2beNx4 z8PgbUXy_-=f&osQEDgRgaiC5MZJSeZyWVA&x!Cz^=~jD7Pz*V-K9D8vblBG8wAIXA zTl&~zyCzRydZ50Ixy;tv>c)K)?GL?Tb1RgFjmeVTCtinBvim3zO7E%bDp+&1_@ZkH zduD>ow|rn2*OPmT37(@D-OQ_8VeU$&=KXpuY$b;s)hJ2y^!;4%y$O(&%W_8G#TbMk ze6OXkR!Pjm_M=I=Vl*;)L2h1a5VJt|`A31;)eB)~#^2-`-n%34W z@yJ?dt`}j_P@GirWTtXYVI^4p;k}b~D?x2{(`NH&Xbo-9qFts?aHd^Z$#qvgSnzN% z=deD=*uqa|@J&#QXC;lRb$;qY*`c*A!c))IG0Dpg`Zwul2N$HVSN0Ofli^!d&yENW6dJwX~Q3=Dh$h?dFH>PM%Lb|{tWaakx`p!q91Z_pyR7+Uh~ zT+3HB_Kj>i)?4Xd)cecxq2NkP?1isUJKX~~rsr6UhVA#>Z zPA%}-X6vtxd8?R^^p~sGW-?`ybwpXJTYGzn(P^tkOaQ`d(Fk%sZga$ePGZ6_*Y*X( zsY!fd*|}>bzev){L>gh#DWKxN09`t1R@P|7^9% z>!yNreQ?jsete`_m5fs8%wgyYYsQ_m7ZksXYxeoMnWJl@*vW19L0hGbqMJ)f`FDf$jL27Q!(R$r zb*j=v81Tjwx*0$EE@4nAFW1->`>2~te6*~SKEJA8l?kZk#+!+MZ)NUGjPdDs4;9h6 zJ2hAaWO!CJjqmYd)$Le{BBv_K_`kVaKik!6{st~~wx5HFrsMn%eKAr1`;1aVz7fmB zsEIeB;ky3B!wDh0m_oObqVt=?cC)S8D-dOpB~Ov$cKex&7=}jJySjuUYS)viAtkkw zZnzv)DxJn7{>jzea!jh5b*Wu&&x7C!R z(d^0TpH9-S>z#_`HJ9}7et9wCXo)iv5nBxp4H%qNINx@IE8fpno$CCvP8 z6R%K8L7AZ5JB>+pQr%MT@3V}Fx4l?>L(}rUfH|@TR9kDzd$T2Rc1(>%jY=`AdYg%b z5eDFQ)-NYNR8de`J{e{3_GA;m3FHTVd*TIj{nxEPw+PQ+OCj!MQ1lZQ+gm^bul-y{ zuhvcF7fClQc!U^K^6$y}KKx3Mz+=CD|MBa?;)&aLt|@8pSFtqnc5~>erQAI=O#A#S z@&VI1E(;4Ie7mwj^`H#XxnogCXkSU(&tEDOKvJ}4Kt2madD5G=L1cZuc;aO>-6}jR zeUR`X<_Rt+3g0cxSwMZgkvGGCrzl+Hvr+U#c3)3*#>*`i^#Wems}41sgEg@dd!*aX zf9lSP@bDN;a?pUkX5xLSux9MknhY2-j^G!zqn_q1DJJK13-EHhpMHPa^9;SB#tGH8;U%9J=dZGS zx1bt(&0S&NIUC=2>MJ{acCemco31DFh;$BpyZ+@~cN6}>DZSV0*T!!1yRtjfxX6|C zOi$`IJl}g-7nr6@n*8uq2SfcUzjZ5tKf?^j(v56XA00e)MOyb$VR|#1K6>`%#dA`H z4MOTJrM1yGZ-gYh4Lf_L1M~1WU9wpEn%k?fl&{xEMm$oF2r|*#!WoPN+vR#1NC_{Q zK3a6EtvE@=>BA`B3}wut>_1;IFv_uq`u6S5gO<$yGY^_k?KdPNyC9&`Ajla^Hj%9< zk2B5_az(uzM8dSnZsJbY!y~yhAATCfpnT1JJl)Ab9$@md3!@)IheCad`ax$ko}TTl z;nE)f-H~!yrhS^~bmwz!ziuY2g6t=^V{Yf&rD}%0KV0Lm;Cxh)H4^8Ht9`?J1x@|c zf`jjJ)ckkxY1)!Ya?b&v_>@ON?un0IWa^X}g01g2-FWbnGw8Xj`7kr}BRgl+&~;X1 z_)n_>h7TkCv|%gF@3Nu1759{%Wg&>aoENIV_?5a-i#ki&I}R6~pMs4RJf(eqtWK=_ zOOVS48VcPT?w=L$O_A{eUzyJSe2fn~y>1X1ZSt({qWFFJ%ns30JNB&CT&m~6g zo-jZXo0AwMWaiJ5&>cuYKoX80fJ?^tIkHlPF-&A9C))r`qEN2ahV=uZP{pRL2 z@8~hy7xHVf^ZLWtoz}IE7ve)A3DWAU1qm^yv>J`ff?5z-lKa}|8StgkK=A~hE zncKSqMfjBF1-I8a&>gCn3a}F-%kXRgBRVpzW%${^gFVAIu})gxRGqvz9`alFwo6dh z*1ocoeeM6!jcn)W|5N{FpZNIC4H!)?QtZdoO9RZe*ET^+@GF@+tAt(SA#W{~oprf+CG>Q$ngL3f#xg+x_$8eln4t zh}p&m1p9}gYY}U6>mB`(!IZ@D(Vb?Vj6N)7OnLV!iB#U5kzx5d54PL8Je^mp48Sue zLy-$mWqZu}OG59m6(tInr^aW(Ux?muSNyeu7n&)Jl+5@}d2En?TAP8HTPFAr<`Q(& zf+>$FOU7GE(B9ETRO5lLhd0&?)qW8QdF zYaSx-OH2&I@u+LelbD2uezMTZ6%&Z*USeodu z+-$+^4xhv9U{+8*yE_?J7^!)?8SCP;c3r2rI@o`!`@92crKrqo9-AjHv$3_ex4ab@ z5}1p^g;3VN4yTltvtcmXSi+P6&dUhGP#sL^tQ@Hu#R)6|l- z_lqgVSag`M$y-s@Uaa>M<_9p9l*MXn2Bt>EeQ`X9q~9`JjeWL9>I(^B*%P-K#nd7K zwXwaEellh75B3jjM&5->(0?c>k z22fTZ70qlr*aBeJ8a2M>Pg%+%yEj27(BIi%I_Js3c$JE(FO~Lxvg3C$G#mHO06x=} zIMn=u0Q0@6b$liUv%J!}i^2{s7w*_1xMD#ZQNF zFK~;zpA2J}dC&Hfp{e)lCa|qyMZs@w_0MdVtI5 z27eoZ6hpMBY2Mi2??kj)EiMmE(NMhN=f^ABa+HJVDR9UNP(ZXYMNovudry@a0$8{B z78ExMer7e4{%Rv^uTRSq=zXn z1hcO51p!AQQ(7Zay4c8kzZ7S^KhOM>Q5exw83_!jm=Wx{nh~bVU{yEayE3u9+VCF~ zE302Dn_s%XqGF48_l$K{osLIE9#?8$Ro=Os4S)8~^NoUgE}I+A0hf!rfTCU9$&6>J zoVM(g(x9NGg*yGurh+tjQCDkj6zAC65EC9v(ELeR&>aW}f#zJ`iTdpf`4%NMD3NR3 zqRQ4$w%Cl8^WJaR8SPYx?^$yJMdC>QGg2(Z z>z!sl6#onWIuki#X$sM#ob=jbR|KA>LT`0u2+y#^s$Fq%Af`qfU?qkWB@i=`3w(bD z*a|seC56QBvmwp-jgj8g{78Old!UCYRO?95@fLMKiO}{VO=({i-mLDOgsOlv)HN^0 z0yny%lgF5XTCD}u|6`}K1hu;TWm~cZwFdoV^B)gtefpO@#u3z-_?Ml|8PuA4VE+MF zatF1R{N!Bj|G|oj@E~xW8vr?cyt4Hb81QgKP5q!l31RSSeugA zoT|X23Rt7{tL04a}Hn7WA`)zG-L z!ZDo!3&d1P|J34$jeY(L^icstdhTu0oFpOwI@t>2^#C;2anXo`el{ByRf#zE%()|ZE^2rC9W5D>e+4`VEvJkvSxF>?^ z?mu9_MzborJe$d`=YX8KPuAp-%Q+z1;A1Tlz*?{a@^%kMSsHGdc&IrS7U)zZEqh4T zMv+Dy*tj2()4=$CeEq^9SsC7g>y21DAlp3Jm;4rB67D-7bNFcJ=<$@jJs@A5i?z%K zYv~`xxeuhQ1vkw-)cg(>m{BFIdq_4!k+ghl@(#%#!1&U+dW%a3qiqK7$?A{jzjrX$ zRr``ZFyZ?5!NBh;?9o2ilt=F40a<81*0Kn!Wp)_n?;vGoxM|6u=1;J|fhy_yhh%dU zsm8}<{E%D;#>dasw;z&y_TxMdQR;F)UfI`Nz?3y`AbG_H(9p5sDJwW2Q!m6?egSJ) zAIA9yNI4vCT7RhdGc1s;S~}{GY>OgE`r7cM9K^X9jQ3fnSCTu>{AxeWgAwuf56I%x z+2ut{c7q3GTVD+wM;^Ii;D9`_PwoV3IUUA%5TyJXZrXjQxdaxdRxO=Bew%`^1kLuCfvvY z`I?`G&I2B~ssqVy7Go{vZh^IY4&yuuQvLxqT{zTS4GS!EsoJe&;W4D+~yjn(o}O~t*zHr($hh%)fhs< z6DWdAAP}-fl*WQKH-{su22;zo zxjeC*H}6gHAfvYBW8@0}@9pX3^@)OW!OIPV2HdDg&BV%`v9TcU?E-nW@ez3X#u@<_ zK&c@%@pJQWo2yWi&X-6w)Owp#4B0(nXT@p?GQYK%fi?eA10cNCa`SMbDoUfWnu9qt zA6>t1t>Bv#(P#`%6Vj_wK=K+Pj-&k%tLy6%WIv#?Ht3#3{@j#xIT7jwX7eC#L~c_m z5NjI*x{xhGmjEwn9ID(%1o+3G+aIC&5Nn#i#=O%PRK_?aa1BBf__<=U_QA?3WOL#;bP0mlTSLM_wsE@+ z8DoHS$mTz5GE6d1$4yl#D)^rwfSQJrit})j7&(QK`?EL9pLm&de~;Mgzx!oK$3!W? z-#`7mw9Wf_5#c|rk{;PXe+t==$o%IRBZ;S}RgfErVgkRr{k;;z`JXPozc?XPlZ1ia zK7TJ-9RHV3@V;s9Zl~+z?%@rwbN6xp`@7!Ph5hT~&Hk?a_g6OlYHTO|>TcuV@&ESb z4>OziPW2GA-)VXOweSB|Q{#+H7WyOpav!&h+y2tRFZ%!W5#T~zJ#hNF%KG;Rb#fyg aC5!$!(%(_|w-cW!vL#uZ-aP5|8T=pF!l*w0 delta 7720 zcmaKxc|26#|Ho(SF_sW1qM{PUTGGeRqGTpi6bh3zWX(Q?xyeVAq)Zu%Eku$=A^VIa zB-s;L#!mL#7_o$!_Q4j#gXNuo5>4YIqUb_V@jYphv!*$3rUHla{R!)F(1h5vL9=Bqk^< zh&LbAw{YSPIXGXXmX`4O$mI4cThMc8(9VbMtB%!>vF^;IZ8siCypXuslNxsC+&M;T zZ0=h(71x~QgMY&wsJn)XBV0FjeQFaz0iTMFch*8gPC08It83-sBAkFbRGsPvtuo40 zR|IYbez}%#cwYBs_K7j4pzmCk(suq;XWnI5qXT-yBDTTxPOrROaCq?MbKbV#VSiUS zi2h@V*N=Ejw7oBvdqN&v((lrBd7|tVkKb))Q2oL_ff|_PnoRD@$QfOv-?)5Qo%<|+ z4t9y3;yWyO^fyI}bJ%!=nLEv=+kIT{kI(X@>X%k6 zUcGhphOpc1<2j=t7ISr3ZSM^_kIV1s6e$9du8M{h?Z2ZRfjafA+1o`-J3{6&wmIwx$`H=B~S(8%%fYa-$zcy(`b1P=8G-gay}sYFKuq z`Kt$cy5U{S>4AOW&Y!+FqWp9M)?8cGirxrkR-`#Umag!6M`^Sffb{lWT|)v#&@6yA zAjx6;JFV{rK=`ZvxT$0u7A%iB!hgD8?B@-WH~YSQO02){ojl=`WO#fEB$C2&e||2S zPiOXi=CzZEy&92U`Mmq8&SSzRzaO6*d|EUF&5Kz1%Sj<$w$i3T{#n59&`U0lBCvf^y%Jg*^iBTj7B`5e(dxj@ZmKVmqw--z4%sF?Ucr+N%@I^9n$(aGqB3TihKN5 z4<54RE)jlZBqw~$^j5xSe?BwoPFE-mB6WLAM`q!{>*}fq}bOhjeV@bA+?(LS&@@C*zk7 zL9@k9Bj^+Q{_(NcQzrJ)Qo3=MQe{fbGP{`I)%p+9`rh`r+D0ccDk*3Z3EvxjQEdWt zd@1Sq>xQXthZ<_S$Yd$;XhUA>(vNe7$AksDPKx#4j2uTiTj5@_H)h_HZEI-Ov?~Sr z6Y_P?eBay)KysMpwlD9i%D#36cGX@?@PUV@BR^c}&lqaM@EP`;8vzwv8W@3P=8+Yq z>Qh5+|Ed7x#f7|Ur+0E$SR&jUEOdHJYIFGFk7O&Np8l-;l<63j3{nb`xI=t>{4Tcg zW2hSTK)OP^V6barp0kXf8kSXhyQkoMSuCbYE8^_MyK5nxUBtv^ry}DA0e<71Zmu_7 z&U7^v%SNwA?M}^W99ZZF9UI{<@FlspJ#6E5j}_}ze-JP}d*t(-2I;a}eaM({TQ5hP z_1(&hbP3`~N7?91`KALp#|{L)83E&k5ba?6!j7YD*T#)Dw$rVTKCjxo`pS&YHO>2I z2N?fr$I->zuGNc9OGDSx4AKFK4esNMGD#XcTOT1`Vd*X7iW9EYVEkyJsm^sQi6M*;7OLa7@xc{l8E_ zr*9xqHIEpV8p20>c0K7e?~$cSYkL|@t^0`69C|P(!iB9AsI`{P<3dRQ{K0i`9?Z_D zn%3x7SF1`&6PHS5oULlwY;S%z(6Ue$MtM9F1-|2V*Svk)(ch=^DfEMQCc0U4=GA9c z-k!=kVmuj|%fI?^OD=G2KenPPc{{pGm33LXGwPjK<7G3OUEst-N#6C(-(v(&cRRzZP)r)Lph{XhdBV>%)UC7Z zA>vxw!?J0N_WDnTRo=Sv?{7-aA&n759uUX|@~>|S`xlE4I#TM80@PbeoU*G5xqIJ&1;c+r`}|DXzRfUb;*I8 zd_)8Mep)K}d2u84=RDryjvm@KM^Wcil15-M%#ifuzK^@$E34(1{$6A==4&PDJ1;!l zIk|B3qgABGoo3W^tJ!gzNg$Zvb=((aBG_M&ZL*BteRD^keNX52&e%LprRP>&8^goH z_1ULhX@4(8w_d@OVeGzYa18+K7 zkm5|!sX#HE?56wui54Th^K6lzu?*{HY|LZ#-s|Mq%8pzQV=Cjk?kONLu}%hmbxppe zw6|8V@aHf0oVWM*gE|KM%iuRO=WLDdI?LvghGRx8tbIqGY;&{!xTGp{FR}5&8%Me5 z#%K8#PVP2*&qeUK*y)w1hRD^dJ=8I9fnTz8D%gn)0d?qcjP{EXSp^z@N z!T({6$K@R)F0C7g$*f`ka{1i@7O!l07y8F2EyQ5VJw2Wr8oH`|Ot+HJ?@?yw+#TF; zMF9Wxdw=u8otnN-eYE@&Qc$kdrD88@kQcddU(&bkc;6D|bHXR9c@CR@qJ3*S!V+;% z)yx!*RDD<9hT@x-y?4MEJ_@limK+rJ`0n&vLiZ52p4cwSXZDG&0blSh8-vEd200DV zk^5)B)Ryso^mPM6l)??KDs{PO9tmz8+JoQ?pBD;2kl$f;C z2RLg61dCk~fgHOtxXl+S)YNHK@F_JLxX+MVy{7JS{jSCyPXKr2;NFb>@*5<}SgAyv zSIsn1$_X0yxc0u#c7Xc^zkVP8iASXUX8hdIL?dCV-NSrORpm}S;cX1OdosSA$J#Fi z7BTY9vBJYwvQ+Dy;A6tv{mKkg(G8&q+1G-+qqohzolZK1+0M_n`J&39no*t4M~l8V z`&{Yk5MZmNs`*jStB#aW|3vkS_@nsSmtN*&SxG&C%_zCI3~lfMSL>MI!sva5)*ts9 z`?U(t+xtvY6=Y7HjY_(sYx_DdN`IyBZPaQiZ?wqxz4Wz|;q$52hUk;+j}-U4b5<+P zs}Z<+u>428o;b^yZn_*-q@gPJv~Kl=Qrk1#BbI;bKN9{VLa(T*m?pa0kB8sOu_#pr zaH>r3ZwJkg+hgb{$qfQ^3jB|UVjpi;0z9e}B!dSCP5~auzM8w8T|TNAG~IfXpoYHb znd=%RejI1M;K5(@MpMc$|$(lv{tGPiy)G2|X@RJmw>&8+G{`zmtmVP8Q&> zV^GyJ%R0pI0qS8{W%rXuza*8{cRKrhn&alFzYxkbD|cX96<{xVr3B9(I98pVVYsW< z85WsN8yt0V?LF#zu;NHx%3yTu^hfz4v8C5`@LP^Quy7Bd*2U@(d0t5k7Qd_ueDm#Qd;|86w=Ioj4s)=2F(*rR!<M6oRim-+f#Fis-4F|&{U=7}1dXJJr^Hx7J7QDjgnH+GiATqX6j-QoDXqu)9o6bI z5iWR(#pHg;_lA2(VQrOs_LE@cDgJ|JG}Wcfz7eKU`)2#I z*+$95lspL(dNJXO`I-1({e`UULHRG4T)&0yr&v{Puw~)d;{PQaYbn{e8F;Dgc=w+O ztXGP{1-Rt?=-oxK&1!61YAG_8W-!9lQ~H3Ca#_Phb7>m0CX6{dD_TTQF9KM9W884Y z>z>3GgFr4M22=88p3I_jj0FcTuk{n`s|ZAZ62R&=Z<(nCOAv22<&ZMu1!|Y~KM2;CI0ke~Wj4p#)tjWE!!f@A=r>qTQSwq-x@lwR< z7|^(P!dz630J}vkVCp7*EPrQ+>{GS{U@Lu$?SMPlT%a{Bs#z_9~kBlzx@`0vpk!&dI?n34uZTqM2m z<$GWRjx-Qj0~W)WSsTpw;D9U=>2Ax(+8iQ zc+$i|10MZAG$gC@(s(y3BXn-S*B&d!X^Mp$`mA7|re^W6{(MzJh=9>vzD04SN8j?`qeb`TA^O;@P>-H*=L%3J%B#~NmqeYdA z^h=T?sao~3*u8QH$VY#(n&$t!ixf6MPGx_O9t=I-bj$iM4hMfSk3k$Z$Co~+DkjTgx*)1FYH>gZ-SF**0JE_>93pUfm_=Xr1H z3NpgJ-UHv8@8A5HLzbCMjAx#{OwT^dwB7-{e{5)7oAT~4wsns@K@@va!3U`D7UA?!iqk~m$|(hj5LX^8@h@-%?T~v1HPFAxfzQ$ACgL$MPe$e2 z^3yXZTu7~*0{`S4xse}t|B>(FMrz6bkvHZ+e%$v*zK92@b>P2wAU*RSFY=@Ee+$wx zalA+^wSV$=L^CMS>-0Bq=M8Vu7MjZNnv#9K~ciky7y&`P1t>zp8^cR{b;1u5kQ zz0RfU4u7Tkj;Wx=RD#D;LdI0W##A1Sd8&;W#}X`K3AbViKCy(qV+k)}fsyZC7#Aws zl?r#G!riHG4=UVK4+@<)h$HCW2u3)9C5~_lNASUoEO=p1RCpj2j;6wcsPJGa9HTd< zKEWSHkc=ZJ#u0Sl2u5)P%eWDUHzu44kD$UIQQ?oN@JK5BiQaq7iHSJEN*sYFo*)@d zP>d(&#E%^E#sDv=@K`DwM}@~x;qg>>f}X23ZsnM_LsI;vK17@obTVqZ`kc1OIk;8~ zF7LZ-yG2u2?wy!tFK#KSJLHvQNaKaDO^SP!8_wBRyL+36>PpArO-@~kd0zfEbv%ab z0fijH{+*$?AGJ-TSqU~`+kNlAHWQPxOX78jh(MsJL}TLorZtsZkK4PMZFzKBU9Z46$qPXbp37;*b?eGo;gm zB-%J+GanPtGt#kX9CFF87;+>Oa+X8RX;Xsjw(E}C(wvS6gq3Q@n{p%r=Wr_#J`QKL z$T84KwaMzZE%G6|c5Hi?sWXRcU7CS=E!5G@Arsl;^U_k89CG|*3^^VOG3Ah5I+S3# zcHQ_b%~^;*-%^dgw#eqVm31EnAb5+M3Y`?5sunoH(QIVbZt*=#?JkG>jIB9a2>YEw zhWeU_T1dxcbI7Qv7;+XAV!37KP9>OyU3d1D=C_Ez+)@pzE%FuIij%KH;U*YpK*YU= zPR>qM2W*kA+qFyl2orYUkiW1szZ0_f!67HJ$ycSNayew1=@@b$6mo6L&Oel39(LVD zTbkb^0>?@X}bE?7CDG*=dQ5XEi$MqLpo1LqMJh&@iP&1 zkdDpcko9I_$Td*NjV(KOE5Rb|x@)&Ie?SDvm1#t8k#FHv&iOeg;W-2BD|9k@~83R(1X$oBpwqJGj+WgPO)xft>+6ym>S=YA#FfL-_emgaIqU~QSk2#1Wl zr}{YmnTiYeUl#>%{)i?R{84k1bIODBxpf+N@)fk%DKEX#yx2i>X8!)uf#CcTr|h`c z7jdq9?73xdexC6UF7}08EgxRSi&cn%=6hrJ@nG{VXXQVOkp}}f+@$8bs3>*Ofe>e_ zS#iU!mVT4wIov`HMx$wmbfa~V80Om0T+_%r*?`96sD* z4k}9DSOcaTD=O$dtG-|9R70G3WwSS_xMesVKAL*{1vJ00JsMkG9 z^KTv#ncKtiWX<~%_K+$n%}ESu)IBPr!;TKj4KdP5rL4s!d16KIa#>?xN%^%sb5Pv) z_~Zt#*w9N0uMfwU)i)~J4+O1d0YS@C1EdOcIb-ZDF|e4rLGUObxFbo-CWilj9g{w` zit=WSr{hkM$#(WDSy^k#E(=!FDduc{hs)JuGs@80Zko`-aA$uP5^0~M8|`<6vM@&w zT@_-@b*eCi2q|Fp{GK({X-Fc4Hug?gk%;uA8W?5}iD69hm(kbiY2k(yfHsE8+6-_d z8?~0^OuI!P-+v#AqT+h98kx-T#w-dF>hYyzsbh@D z^2`R%w3^xJM#cas4cR*;W01IzA8lWWOF=@X%P1_GPY80LJeZP0S;(0mfRO=8hYD@s zKrosz)j5mLF{d&0NM+O(b%Sgii-g|5b##y(vBQi5FuOT+n2`(xS_UZ84fB>Rg26zR zFN2Zd#awx@++*?HoWi6T-bUE~22LoOAB6{@DNi?;>GL&Z8$-y*{|q}%)<|SHW&TTe z7Ha{#(5P(?tl0TGuuzkqW9l08kV&A#C9n0Y3G~M2U<%V&9a!%_1FSENO{6duRs@04 zdVuA5DtaRa{UUmPaW^>~6C^qW>#3()Ag2RCq9d@ndWt9cCetgr1uDIc%dutEU53t1 zYOl`+K4+nWS=Bu>(GrT)3|3+HLmAlY;P(y~Y^c6IbaUd3Y^-;rU&Btau*BV|Vt{51 zDSimqv@m~X9j8t2&3Bg#<^i{0XB0xfGWqU%=XJo!Soi0C7TOqT)rLanZxD0}d3#{BxM9{+W~o+ z$jy-s=8^>ccVp8gEI}t>jvaY7bAhma@rO6}Hq+yei?H`NZT{K!!A{4k$^S0;bJ=z$ zyV*a-2dr+w!Oi9l3A@3&vB3$-n=LlikvPjplK)g^r;M<*3H!JGe(L7B2X--G2iKY? zRsb(A@VoH$ngCWEFU1wU9czr=qwu?I=m+RNj@@*TeM result = readValue(mvcResult, new TypeReference>() { + final List result = readValue(mvcResult, new TypeReference<>() { }); assertThat(result, containsSameEntities(vocabularies)); } @@ -386,7 +386,7 @@ void getTransitiveImportsReturnsCollectionOfImportIdentifiersRetrievedFromServic final MvcResult mvcResult = mockMvc.perform(get(PATH + "/" + FRAGMENT + "/imports")).andExpect(status().isOk()) .andReturn(); - final Set result = readValue(mvcResult, new TypeReference>() { + final Set result = readValue(mvcResult, new TypeReference<>() { }); assertEquals(imports, result); verify(serviceMock).getReference(VOCABULARY_URI); @@ -403,7 +403,7 @@ void getTransitiveImportsReturnsEmptyCollectionWhenNoImportsAreFoundForVocabular final MvcResult mvcResult = mockMvc.perform(get(PATH + "/" + FRAGMENT + "/imports")).andExpect(status().isOk()) .andReturn(); - final Set result = readValue(mvcResult, new TypeReference>() { + final Set result = readValue(mvcResult, new TypeReference<>() { }); assertNotNull(result); assertTrue(result.isEmpty()); @@ -438,7 +438,7 @@ void getHistoryReturnsListOfChangeRecordsForSpecifiedVocabulary() throws Excepti mockMvc.perform(get(PATH + "/" + FRAGMENT + "/history")).andExpect(status().isOk()) .andReturn(); final List result = - readValue(mvcResult, new TypeReference>() { + readValue(mvcResult, new TypeReference<>() { }); assertNotNull(result); assertEquals(records, result); @@ -463,7 +463,7 @@ void getHistoryOfContentReturnsListOfAggregatedChangeObjectsForTermsInSpecifiedV .andExpect(status().isOk()) .andReturn(); final List result = - readValue(mvcResult, new TypeReference>() { + readValue(mvcResult, new TypeReference<>() { }); assertNotNull(result); assertEquals(changes, result); @@ -515,7 +515,7 @@ void getSnapshotsReturnsListOfVocabularySnapshotsWhenFilterInstantIsNotProvided( get(PATH + "/" + FRAGMENT + "/versions").accept(MediaType.APPLICATION_JSON_VALUE)) .andExpect(status().isOk()) .andReturn(); - final List result = readValue(mvcResult, new TypeReference>() { + final List result = readValue(mvcResult, new TypeReference<>() { }); assertThat(result, containsSameEntities(snapshots)); verify(serviceMock).findSnapshots(vocabulary); From 16bc814da3fce5ed46c18a3bd4131a04078d5020 Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Tue, 3 Dec 2024 18:43:41 +0100 Subject: [PATCH 48/49] [Upd] Update Spring Boot to 3.3.5 and JOPA to 2.2.0. --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 6a644e94d..c5e9873c2 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ org.springframework.boot spring-boot-starter-parent - 3.3.4 + 3.3.5 termit @@ -31,7 +31,7 @@ 3.0.0 1.6.2 2.6.0 - 2.1.0 + 2.2.0 0.15.0 From c1efa5edb58c06906c6233f6bea869841f9c58eb Mon Sep 17 00:00:00 2001 From: Martin Ledvinka Date: Tue, 3 Dec 2024 18:44:40 +0100 Subject: [PATCH 49/49] [3.4.0] Bump version. --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index c5e9873c2..14bbb3f47 100644 --- a/pom.xml +++ b/pom.xml @@ -11,7 +11,7 @@ termit - 3.3.0 + 3.4.0 TermIt Terminology manager based on Semantic Web technologies. ${packaging}