diff --git a/doc/setup.md b/doc/setup.md index 839fe0b3c..af068c823 100644 --- a/doc/setup.md +++ b/doc/setup.md @@ -39,6 +39,7 @@ by the application: * `lucene` - decides whether Lucene text indexing is enabled and should be used in full text search queries. * `admin-registration-only` - decides whether new users can be registered only by application admin, or whether anyone can register. * `no-cache` - disables Ehcache, which is used to cache lists of resources and vocabularies for faster retrieval, and persistence cache. +* `development` - indicates that the application is running is development. This, for example, means that mail server does not need to be configured. The `lucene` Spring profile is activated automatically by the `graphdb` Maven. `admin-registration-only` and `no-cache` have to be added either in `application.yml` directly, or one can pass the parameter to Maven build, e.g.: diff --git a/pom.xml b/pom.xml index 6a644e94d..14bbb3f47 100644 --- a/pom.xml +++ b/pom.xml @@ -7,11 +7,11 @@ org.springframework.boot spring-boot-starter-parent - 3.3.4 + 3.3.5 termit - 3.3.0 + 3.4.0 TermIt Terminology manager based on Semantic Web technologies. ${packaging} @@ -31,7 +31,7 @@ 3.0.0 1.6.2 2.6.0 - 2.1.0 + 2.2.0 0.15.0 diff --git a/src/main/java/cz/cvut/kbss/termit/dto/filter/ChangeRecordFilterDto.java b/src/main/java/cz/cvut/kbss/termit/dto/filter/ChangeRecordFilterDto.java new file mode 100644 index 000000000..f646587b4 --- /dev/null +++ b/src/main/java/cz/cvut/kbss/termit/dto/filter/ChangeRecordFilterDto.java @@ -0,0 +1,106 @@ +package cz.cvut.kbss.termit.dto.filter; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import cz.cvut.kbss.termit.util.Utils; + +import java.net.URI; +import java.util.Objects; + +/** + * Represents parameters for filtering vocabulary content changes. + */ +public class ChangeRecordFilterDto { + private String assetLabel = ""; + private String changedAttributeName = ""; + private String authorName = ""; + private URI changeType = null; + + public ChangeRecordFilterDto() { + } + + public ChangeRecordFilterDto(String changedAttributeName, String authorName, URI changeType) { + this.changedAttributeName = changedAttributeName; + this.authorName = authorName; + this.changeType = changeType; + } + + public ChangeRecordFilterDto(String assetLabel, String changedAttributeName, String authorName, URI changeType) { + this.assetLabel = assetLabel; + this.changedAttributeName = changedAttributeName; + this.authorName = authorName; + this.changeType = changeType; + } + + public String getAssetLabel() { + return assetLabel; + } + + public void setAssetLabel(String assetLabel) { + this.assetLabel = assetLabel; + } + + public String getChangedAttributeName() { + return changedAttributeName; + } + + public void setChangedAttributeName(String changedAttributeName) { + this.changedAttributeName = changedAttributeName; + } + + public String getAuthorName() { + return authorName; + } + + public void setAuthorName(String authorName) { + this.authorName = authorName; + } + + public URI getChangeType() { + return changeType; + } + + public void setChangeType(URI changeType) { + this.changeType = changeType; + } + + /** + * @return true when all attributes are empty or null + */ + @JsonIgnore + public boolean isEmpty() { + return Utils.isBlank(assetLabel) && + Utils.isBlank(changedAttributeName) && + Utils.isBlank(authorName) && + changeType == null; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof ChangeRecordFilterDto that)) return false; + return Objects.equals(assetLabel, that.assetLabel) && + Objects.equals(changedAttributeName, that.changedAttributeName) && + Objects.equals(authorName, that.authorName) && + Objects.equals(changeType, that.changeType); + } + + @Override + public int hashCode() { + return Objects.hash(assetLabel, changedAttributeName, authorName, changeType); + } + + + /** + * Constants for the Open API documentation of the REST API. + */ + public static final class ApiDoc { + public static final String TERM_NAME_DESCRIPTION = "Name of the term used for filtering."; + public static final String CHANGE_TYPE_DESCRIPTION = "Type of the change used for filtering."; + public static final String AUTHOR_NAME_DESCRIPTION = "Name of the author of the change used for filtering."; + public static final String CHANGED_ATTRIBUTE_DESCRIPTION = "Name of the changed attribute used for filtering."; + + private ApiDoc() { + throw new AssertionError(); + } + } +} diff --git a/src/main/java/cz/cvut/kbss/termit/event/BeforeAssetDeleteEvent.java b/src/main/java/cz/cvut/kbss/termit/event/BeforeAssetDeleteEvent.java new file mode 100644 index 000000000..ddbdee1e0 --- /dev/null +++ b/src/main/java/cz/cvut/kbss/termit/event/BeforeAssetDeleteEvent.java @@ -0,0 +1,19 @@ +package cz.cvut.kbss.termit.event; + +import cz.cvut.kbss.termit.model.Asset; +import org.springframework.context.ApplicationEvent; + +/** + * Event published before an asset is deleted. + */ +public class BeforeAssetDeleteEvent extends ApplicationEvent { + final Asset asset; + public BeforeAssetDeleteEvent(Object source, Asset asset) { + super(source); + this.asset = asset; + } + + public Asset getAsset() { + return asset; + } +} diff --git a/src/main/java/cz/cvut/kbss/termit/exception/UnsupportedTextAnalysisLanguageException.java b/src/main/java/cz/cvut/kbss/termit/exception/UnsupportedTextAnalysisLanguageException.java new file mode 100644 index 000000000..3ddb95c60 --- /dev/null +++ b/src/main/java/cz/cvut/kbss/termit/exception/UnsupportedTextAnalysisLanguageException.java @@ -0,0 +1,14 @@ +package cz.cvut.kbss.termit.exception; + +import cz.cvut.kbss.termit.model.Asset; +import cz.cvut.kbss.termit.model.resource.File; + +/** + * Indicates that a language is not supported by the text analysis service. + */ +public class UnsupportedTextAnalysisLanguageException extends TermItException { + + public UnsupportedTextAnalysisLanguageException(String message, Asset asset) { + super(message, asset instanceof File ? "error.annotation.file.unsupportedLanguage" : "error.annotation.term.unsupportedLanguage"); + } +} diff --git a/src/main/java/cz/cvut/kbss/termit/model/TextAnalysisRecord.java b/src/main/java/cz/cvut/kbss/termit/model/TextAnalysisRecord.java index 837e55280..fe8dfe13d 100644 --- a/src/main/java/cz/cvut/kbss/termit/model/TextAnalysisRecord.java +++ b/src/main/java/cz/cvut/kbss/termit/model/TextAnalysisRecord.java @@ -17,10 +17,12 @@ */ package cz.cvut.kbss.termit.model; +import cz.cvut.kbss.jopa.model.annotations.OWLAnnotationProperty; import cz.cvut.kbss.jopa.model.annotations.OWLClass; import cz.cvut.kbss.jopa.model.annotations.OWLDataProperty; import cz.cvut.kbss.jopa.model.annotations.OWLObjectProperty; import cz.cvut.kbss.jopa.model.annotations.ParticipationConstraints; +import cz.cvut.kbss.jopa.vocabulary.DC; import cz.cvut.kbss.termit.model.resource.Resource; import cz.cvut.kbss.termit.util.Vocabulary; @@ -44,12 +46,16 @@ public class TextAnalysisRecord extends AbstractEntity { @OWLObjectProperty(iri = Vocabulary.s_p_ma_slovnik_pro_analyzu) private Set vocabularies; + @OWLAnnotationProperty(iri = DC.Terms.LANGUAGE, simpleLiteral = true) + private String language; + public TextAnalysisRecord() { } - public TextAnalysisRecord(Instant date, Resource analyzedResource) { + public TextAnalysisRecord(Instant date, Resource analyzedResource, String language) { this.date = date; this.analyzedResource = analyzedResource; + this.language = language; } public Instant getDate() { @@ -76,6 +82,14 @@ public void setVocabularies(Set vocabularies) { this.vocabularies = vocabularies; } + public String getLanguage() { + return language; + } + + public void setLanguage(String language) { + this.language = language; + } + @Override public boolean equals(Object o) { if (this == o) { @@ -86,12 +100,13 @@ public boolean equals(Object o) { } return Objects.equals(date, that.date) && Objects.equals(analyzedResource, that.analyzedResource) && - Objects.equals(vocabularies, that.vocabularies); + Objects.equals(vocabularies, that.vocabularies) && + Objects.equals(language, that.language); } @Override public int hashCode() { - return Objects.hash(date, analyzedResource, vocabularies); + return Objects.hash(date, analyzedResource, vocabularies, language); } @Override @@ -100,6 +115,7 @@ public String toString() { "date=" + date + ",analyzedResource=" + analyzedResource + ",vocabularies=" + vocabularies + + ", language=" + language + "}"; } } diff --git a/src/main/java/cz/cvut/kbss/termit/model/changetracking/DeleteChangeRecord.java b/src/main/java/cz/cvut/kbss/termit/model/changetracking/DeleteChangeRecord.java new file mode 100644 index 000000000..1d2cdc98c --- /dev/null +++ b/src/main/java/cz/cvut/kbss/termit/model/changetracking/DeleteChangeRecord.java @@ -0,0 +1,84 @@ +package cz.cvut.kbss.termit.model.changetracking; + +import cz.cvut.kbss.jopa.model.MultilingualString; +import cz.cvut.kbss.jopa.model.annotations.OWLAnnotationProperty; +import cz.cvut.kbss.jopa.model.annotations.OWLClass; +import cz.cvut.kbss.jopa.model.annotations.ParticipationConstraints; +import cz.cvut.kbss.jopa.vocabulary.RDFS; +import cz.cvut.kbss.termit.model.Asset; +import cz.cvut.kbss.termit.util.Vocabulary; +import jakarta.annotation.Nonnull; + +import java.util.Objects; + +/** + * Represents a record of asset deletion. + */ +@OWLClass(iri = Vocabulary.s_c_smazani_entity) +public class DeleteChangeRecord extends AbstractChangeRecord { + @ParticipationConstraints(nonEmpty = true) + @OWLAnnotationProperty(iri = RDFS.LABEL) + private MultilingualString label; + + /** + * Creates a new instance. + * @param changedEntity the changed asset + * @throws IllegalArgumentException If the label type is not String or MultilingualString + */ + public DeleteChangeRecord(Asset changedEntity) { + super(changedEntity); + + if (changedEntity.getLabel() instanceof String stringLabel) { + this.label = MultilingualString.create(stringLabel, null); + } else if (changedEntity.getLabel() instanceof MultilingualString multilingualLabel) { + this.label = multilingualLabel; + } else { + throw new IllegalArgumentException("Unsupported label type: " + changedEntity.getLabel().getClass()); + } + } + + public DeleteChangeRecord() { + super(); + } + + public MultilingualString getLabel() { + return label; + } + + public void setLabel(MultilingualString label) { + this.label = label; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof DeleteChangeRecord that)) { + return false; + } + if (!super.equals(o)) { + return false; + } + return Objects.equals(label, that.label); + } + + @Override + public String toString() { + return "DeleteChangeRecord{" + + super.toString() + + ", label=" + label + + '}'; + } + + @Override + public int compareTo(@Nonnull AbstractChangeRecord o) { + if (o instanceof UpdateChangeRecord) { + return 1; + } + if (o instanceof PersistChangeRecord) { + return 1; + } + return super.compareTo(o); + } +} diff --git a/src/main/java/cz/cvut/kbss/termit/model/changetracking/PersistChangeRecord.java b/src/main/java/cz/cvut/kbss/termit/model/changetracking/PersistChangeRecord.java index 6fccde3d6..ed1c675af 100644 --- a/src/main/java/cz/cvut/kbss/termit/model/changetracking/PersistChangeRecord.java +++ b/src/main/java/cz/cvut/kbss/termit/model/changetracking/PersistChangeRecord.java @@ -42,6 +42,9 @@ public int compareTo(@Nonnull AbstractChangeRecord o) { if (o instanceof UpdateChangeRecord) { return -1; } + if (o instanceof DeleteChangeRecord) { + return -1; + } return super.compareTo(o); } } diff --git a/src/main/java/cz/cvut/kbss/termit/model/changetracking/UpdateChangeRecord.java b/src/main/java/cz/cvut/kbss/termit/model/changetracking/UpdateChangeRecord.java index e1220f9f4..93074f63e 100644 --- a/src/main/java/cz/cvut/kbss/termit/model/changetracking/UpdateChangeRecord.java +++ b/src/main/java/cz/cvut/kbss/termit/model/changetracking/UpdateChangeRecord.java @@ -105,6 +105,9 @@ public int compareTo(@Nonnull AbstractChangeRecord o) { if (o instanceof PersistChangeRecord) { return 1; } + if (o instanceof DeleteChangeRecord) { + return -1; + } return super.compareTo(o); } } diff --git a/src/main/java/cz/cvut/kbss/termit/model/resource/File.java b/src/main/java/cz/cvut/kbss/termit/model/resource/File.java index 26b45f940..c16d62a2a 100644 --- a/src/main/java/cz/cvut/kbss/termit/model/resource/File.java +++ b/src/main/java/cz/cvut/kbss/termit/model/resource/File.java @@ -21,16 +21,16 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import cz.cvut.kbss.jopa.model.annotations.FetchType; import cz.cvut.kbss.jopa.model.annotations.Inferred; +import cz.cvut.kbss.jopa.model.annotations.OWLAnnotationProperty; import cz.cvut.kbss.jopa.model.annotations.OWLClass; import cz.cvut.kbss.jopa.model.annotations.OWLObjectProperty; import cz.cvut.kbss.jopa.model.annotations.Types; +import cz.cvut.kbss.jopa.vocabulary.DC; import cz.cvut.kbss.jsonld.annotation.JsonLdAttributeOrder; -import cz.cvut.kbss.termit.exception.TermItException; import cz.cvut.kbss.termit.model.util.SupportsStorage; import cz.cvut.kbss.termit.service.IdentifierResolver; import cz.cvut.kbss.termit.util.Vocabulary; -import java.lang.reflect.Field; import java.util.Objects; import java.util.Set; @@ -43,6 +43,9 @@ public class File extends Resource implements SupportsStorage { @OWLObjectProperty(iri = Vocabulary.s_p_je_casti_dokumentu, fetch = FetchType.EAGER) private Document document; + @OWLAnnotationProperty(iri = DC.Terms.LANGUAGE, simpleLiteral = true) + private String language; + @Types private Set types; @@ -54,6 +57,14 @@ public void setDocument(Document document) { this.document = document; } + public String getLanguage() { + return language; + } + + public void setLanguage(String language) { + this.language = language; + } + public Set getTypes() { return types; } @@ -73,15 +84,11 @@ public boolean equals(Object o) { return Objects.equals(getUri(), file.getUri()); } - @Override - public int hashCode() { - return Objects.hash(getUri()); - } - @Override public String toString() { return "File{" + - super.toString() + (document != null ? "document=<" + document.getUri() + ">" : "") + '}'; + super.toString() + (language != null ? "@" + language : "") + + (document != null ? "document=<" + document.getUri() + ">" : "") + '}'; } /** @@ -109,12 +116,4 @@ public String getDirectoryName() { return IdentifierResolver.normalizeToAscii(labelPart) + '_' + getUri().hashCode(); } } - - public static Field getDocumentField() { - try { - return File.class.getDeclaredField("document"); - } catch (NoSuchFieldException e) { - throw new TermItException("Fatal error! Unable to retrieve \"document\" field.", e); - } - } } diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/BaseAssetDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/BaseAssetDao.java index bb6e26400..831961df5 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/BaseAssetDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/BaseAssetDao.java @@ -21,6 +21,7 @@ import cz.cvut.kbss.termit.dto.RecentlyCommentedAsset; import cz.cvut.kbss.termit.event.AssetPersistEvent; import cz.cvut.kbss.termit.event.AssetUpdateEvent; +import cz.cvut.kbss.termit.event.BeforeAssetDeleteEvent; import cz.cvut.kbss.termit.exception.PersistenceException; import cz.cvut.kbss.termit.model.Asset; import cz.cvut.kbss.termit.model.User; @@ -65,6 +66,12 @@ public T update(T entity) { return super.update(entity); } + @Override + public void remove(T entity) { + eventPublisher.publishEvent(new BeforeAssetDeleteEvent(this, entity)); + super.remove(entity); + } + /** * Finds unique last commented assets. * diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/TermDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/TermDao.java index 052035b25..999c2d4c4 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/TermDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/TermDao.java @@ -91,8 +91,10 @@ protected URI labelProperty() { @Override public Optional find(URI id) { try { - final Optional result = Optional.ofNullable( - em.find(Term.class, id, descriptorFactory.termDescriptor(resolveVocabularyId(id)))); + final Optional result = findTermVocabulary(id).map(vocabulary -> + em.find(Term.class, id, + descriptorFactory.termDescriptor( + vocabulary))); result.ifPresent(this::postLoad); return result; } catch (RuntimeException e) { @@ -100,14 +102,21 @@ public Optional find(URI id) { } } - private URI resolveVocabularyId(URI termId) { + /** + * Finds vocabulary to which a term with the specified id belongs. + * + * @param termId Term identifier + * @return Vocabulary identifier wrapped in {@code Optional} + */ + public Optional findTermVocabulary(URI termId) { + Objects.requireNonNull(termId); try { - return em.createNativeQuery("SELECT DISTINCT ?v WHERE { ?t ?inVocabulary ?v . }", URI.class) - .setParameter("t", termId) - .setParameter("inVocabulary", TERM_FROM_VOCABULARY) - .getSingleResult(); + return Optional.of(em.createNativeQuery("SELECT DISTINCT ?v WHERE { ?t ?inVocabulary ?v . }", URI.class) + .setParameter("t", termId) + .setParameter("inVocabulary", TERM_FROM_VOCABULARY) + .getSingleResult()); } catch (NoResultException | NoUniqueResultException e) { - throw new PersistenceException("Unable to resolve term vocabulary.", e); + return Optional.empty(); } } diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java index d0cd42ea8..42733ab3f 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDao.java @@ -19,7 +19,6 @@ import cz.cvut.kbss.jopa.model.EntityManager; import cz.cvut.kbss.jopa.model.query.Query; -import cz.cvut.kbss.jopa.model.query.TypedQuery; import cz.cvut.kbss.jopa.vocabulary.DC; import cz.cvut.kbss.jopa.vocabulary.SKOS; import cz.cvut.kbss.termit.asset.provenance.ModifiesData; @@ -28,8 +27,10 @@ import cz.cvut.kbss.termit.dto.PrefixDeclaration; import cz.cvut.kbss.termit.dto.RdfsStatement; import cz.cvut.kbss.termit.dto.Snapshot; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.event.AssetPersistEvent; import cz.cvut.kbss.termit.event.AssetUpdateEvent; +import cz.cvut.kbss.termit.event.BeforeAssetDeleteEvent; import cz.cvut.kbss.termit.event.RefreshLastModifiedEvent; import cz.cvut.kbss.termit.event.VocabularyWillBeRemovedEvent; import cz.cvut.kbss.termit.exception.PersistenceException; @@ -42,12 +43,13 @@ import cz.cvut.kbss.termit.model.validation.ValidationResult; import cz.cvut.kbss.termit.persistence.context.DescriptorFactory; import cz.cvut.kbss.termit.persistence.context.VocabularyContextMapper; +import cz.cvut.kbss.termit.persistence.dao.changetracking.ChangeRecordDao; import cz.cvut.kbss.termit.persistence.snapshot.AssetSnapshotLoader; import cz.cvut.kbss.termit.persistence.validation.VocabularyContentValidator; import cz.cvut.kbss.termit.service.snapshot.SnapshotProvider; import cz.cvut.kbss.termit.util.Configuration; import cz.cvut.kbss.termit.util.Utils; -import cz.cvut.kbss.termit.util.throttle.CacheableFuture; +import cz.cvut.kbss.termit.util.throttle.ThrottledFuture; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -61,12 +63,12 @@ import java.time.Instant; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Set; +import java.util.stream.Stream; import static cz.cvut.kbss.termit.util.Constants.DEFAULT_PAGE_SIZE; import static cz.cvut.kbss.termit.util.Constants.SKOS_CONCEPT_MATCH_RELATIONSHIPS; @@ -87,6 +89,7 @@ public class VocabularyDao extends BaseAssetDao "} GROUP BY ?date HAVING (?cnt > 0) ORDER BY ?date"; private static final String REMOVE_GLOSSARY_TERMS_QUERY_FILE = "remove/removeGlossaryTerms.ru"; + private final ChangeRecordDao changeRecordDao; private volatile long lastModified; @@ -96,11 +99,13 @@ public class VocabularyDao extends BaseAssetDao @Autowired public VocabularyDao(EntityManager em, Configuration config, DescriptorFactory descriptorFactory, - VocabularyContextMapper contextMapper, ApplicationContext context) { + VocabularyContextMapper contextMapper, ApplicationContext context, + ChangeRecordDao changeRecordDao) { super(Vocabulary.class, em, config.getPersistence(), descriptorFactory); this.contextMapper = contextMapper; refreshLastModified(); this.context = context; + this.changeRecordDao = changeRecordDao; } @Override @@ -218,16 +223,20 @@ public Vocabulary update(Vocabulary entity) { /** * Forcefully removes the specified vocabulary. *

- * This deletes the whole graph of the vocabulary, all terms in the vocabulary's glossary and then removes the vocabulary itself. Extreme caution - * should be exercised when using this method. All relevant data, including documents and files, will be dropped. + * This deletes the whole graph of the vocabulary, all terms in the vocabulary's glossary and then removes the + * vocabulary itself. Extreme caution should be exercised when using this method. All relevant data, including + * documents and files, will be dropped. *

- * Publishes {@link VocabularyWillBeRemovedEvent} before the actual removal to allow other services to clean up related resources (e.g., delete the document). + * Publishes {@link VocabularyWillBeRemovedEvent} before the actual removal to allow other services to clean up + * related resources (e.g., delete the document). + * * @param entity The vocabulary to delete */ @ModifiesData @Override public void remove(Vocabulary entity) { eventPublisher.publishEvent(new VocabularyWillBeRemovedEvent(this, entity.getUri())); + eventPublisher.publishEvent(new BeforeAssetDeleteEvent(this, entity)); this.removeVocabulary(entity, true); } @@ -236,9 +245,9 @@ public void remove(Vocabulary entity) { *

* Forcefully removes the specified vocabulary. *

- * This deletes all terms in the vocabulary's glossary and then removes the vocabulary itself. - * Extreme caution should be exercised when using this method, - * as it does not check for any references or usage and just drops all the relevant data. + * This deletes all terms in the vocabulary's glossary and then removes the vocabulary itself. Extreme caution + * should be exercised when using this method, as it does not check for any references or usage and just drops all + * the relevant data. *

* The document is not removed. */ @@ -248,19 +257,19 @@ public void removeVocabularyKeepDocument(Vocabulary entity) { /** *

- * Does not publish the {@link VocabularyWillBeRemovedEvent}.
- * You should use {@link #remove(Vocabulary)} instead. + * Does not publish the {@link VocabularyWillBeRemovedEvent}.
You should use {@link #remove(Vocabulary)} + * instead. *

* Forcefully removes the specified vocabulary. *

* This deletes all terms in the vocabulary's glossary and then removes the vocabulary itself. Extreme caution * should be exercised when using this method, as it does not check for any references or usage and just drops all * the relevant data. - * @param entity The vocabulary to delete - * @param dropGraph if false, - * executes {@code src/main/resources/query/remove/removeGlossaryTerms.ru} removing terms, - * their relations, model, glossary and vocabulary itself, keeps the document. - * When true, the whole vocabulary graph is dropped. + * + * @param entity The vocabulary to delete + * @param dropGraph if false, executes {@code src/main/resources/query/remove/removeGlossaryTerms.ru} removing + * terms, their relations, model, glossary and vocabulary itself, keeps the document. When true, + * the whole vocabulary graph is dropped. */ private void removeVocabulary(Vocabulary entity, boolean dropGraph) { Objects.requireNonNull(entity); @@ -268,7 +277,7 @@ private void removeVocabulary(Vocabulary entity, boolean dropGraph) { try { final URI vocabularyContext = contextMapper.getVocabularyContext(entity.getUri()); - if(dropGraph) { + if (dropGraph) { // drops whole named graph em.createNativeQuery("DROP GRAPH ?context") .setParameter("context", vocabularyContext) @@ -317,8 +326,8 @@ public Optional findGlossary(URI uri) { } /** - * Checks whether terms from the {@code subjectVocabulary} reference (as parent terms) any terms from the {@code - * targetVocabulary}. + * Checks whether terms from the {@code subjectVocabulary} reference (as parent terms) any terms from the + * {@code targetVocabulary}. * * @param subjectVocabulary Subject vocabulary identifier * @param targetVocabulary Target vocabulary identifier @@ -361,7 +370,7 @@ public void refreshLastModified(RefreshLastModifiedEvent event) { } @Transactional - public CacheableFuture> validateContents(URI vocabulary) { + public ThrottledFuture> validateContents(URI vocabulary) { final VocabularyContentValidator validator = context.getBean(VocabularyContentValidator.class); final Collection importClosure = getTransitivelyImportedVocabularies(vocabulary); importClosure.add(vocabulary); @@ -384,46 +393,25 @@ public List getChangesOfContent(Vocabulary vocabulary) { .setParameter("type", URI.create( cz.cvut.kbss.termit.util.Vocabulary.s_c_uprava_entity)).getResultList(); updates.forEach(u -> u.addType(cz.cvut.kbss.termit.util.Vocabulary.s_c_uprava_entity)); - final List result = new ArrayList<>(persists.size() + updates.size()); - result.addAll(persists); - result.addAll(updates); - Collections.sort(result); - return result; + final List deletitions = createContentChangesQuery(vocabulary) + .setParameter("type", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_c_smazani_entity)).getResultList(); + deletitions.forEach(d -> d.addType(cz.cvut.kbss.termit.util.Vocabulary.s_c_smazani_entity)); + return Stream.of(persists, updates, deletitions) + .flatMap(List::stream) + .sorted() + .toList(); } /** * Gets content change records of the specified vocabulary. * * @param vocabulary Vocabulary whose content changes to get - * @param pageReq Specification of the size and number of the page to return + * @param pageReq Specification of the size and number of the page to return * @return List of change records, ordered by date in descending order */ - public List getDetailedHistoryOfContent(Vocabulary vocabulary, Pageable pageReq) { + public List getDetailedHistoryOfContent(Vocabulary vocabulary, ChangeRecordFilterDto filter, Pageable pageReq) { Objects.requireNonNull(vocabulary); - return createDetailedContentChangesQuery(vocabulary, pageReq).getResultList(); - } - - private TypedQuery createDetailedContentChangesQuery(Vocabulary vocabulary, Pageable pageReq) { - return em.createNativeQuery(""" - SELECT ?record WHERE { - ?term ?inVocabulary ?vocabulary ; - a ?termType . - ?record a ?changeRecord ; - ?relatesTo ?term ; - ?hasTime ?timestamp . - OPTIONAL { ?record ?hasChangedAttribute ?attribute . } - } ORDER BY DESC(?timestamp) ?attribute - """, AbstractChangeRecord.class) - .setParameter("inVocabulary", - URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_je_pojmem_ze_slovniku)) - .setParameter("vocabulary", vocabulary) - .setParameter("termType", URI.create(SKOS.CONCEPT)) - .setParameter("changeRecord", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_c_zmena)) - .setParameter("relatesTo", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_zmenenou_entitu)) - .setParameter("hasTime", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_datum_a_cas_modifikace)) - .setParameter("hasChangedAttribute", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_zmeneny_atribut)) - .setFirstResult((int) pageReq.getOffset()) - .setMaxResults(pageReq.getPageSize()); + return changeRecordDao.findAllRelatedToType(vocabulary, filter, URI.create(SKOS.CONCEPT), pageReq); } private Query createContentChangesQuery(Vocabulary vocabulary) { @@ -576,16 +564,17 @@ public List getVocabularyRelations(Vocabulary vocabulary, Collect try { return em.createNativeQuery(""" - SELECT DISTINCT ?object ?relation ?subject { - ?object a ?vocabularyType ; - ?relation ?subject . - FILTER(?object != ?subject) . - FILTER(?relation NOT IN (?excluded)) . - } ORDER BY ?object ?relation - """, "RDFStatement") + SELECT DISTINCT ?object ?relation ?subject { + ?object a ?vocabularyType ; + ?relation ?subject . + FILTER(?object != ?subject) . + FILTER(?relation NOT IN (?excluded)) . + } ORDER BY ?object ?relation + """, "RDFStatement") .setParameter("subject", vocabularyUri) - .setParameter("excluded", excludedRelations) - .setParameter("vocabularyType", URI.create(EntityToOwlClassMapper.getOwlClassForEntity(Vocabulary.class))) + .setParameter("excluded", excludedRelations) + .setParameter("vocabularyType", + URI.create(EntityToOwlClassMapper.getOwlClassForEntity(Vocabulary.class))) .getResultList(); } catch (RuntimeException e) { throw new PersistenceException(e); @@ -603,31 +592,31 @@ public List getTermRelations(Vocabulary vocabulary) { try { return em.createNativeQuery(""" - SELECT DISTINCT ?object ?relation ?subject WHERE { - ?term a ?termType; - ?inVocabulary ?vocabulary . - - { - ?term ?relation ?secondTerm . - ?secondTerm a ?termType; - ?inVocabulary ?secondVocabulary . - - BIND(?term as ?object) - BIND(?secondTerm as ?subject) - } UNION { - ?secondTerm ?relation ?term . - ?secondTerm a ?termType; - ?inVocabulary ?secondVocabulary . - - BIND(?secondTerm as ?object) - BIND(?term as ?subject) - } - - FILTER(?relation IN (?deniedRelations)) - FILTER(?object != ?subject) - FILTER(?secondVocabulary != ?vocabulary) - } ORDER by ?object ?relation ?subject - """, "RDFStatement" + SELECT DISTINCT ?object ?relation ?subject WHERE { + ?term a ?termType; + ?inVocabulary ?vocabulary . + + { + ?term ?relation ?secondTerm . + ?secondTerm a ?termType; + ?inVocabulary ?secondVocabulary . + + BIND(?term as ?object) + BIND(?secondTerm as ?subject) + } UNION { + ?secondTerm ?relation ?term . + ?secondTerm a ?termType; + ?inVocabulary ?secondVocabulary . + + BIND(?secondTerm as ?object) + BIND(?term as ?subject) + } + + FILTER(?relation IN (?deniedRelations)) + FILTER(?object != ?subject) + FILTER(?secondVocabulary != ?vocabulary) + } ORDER by ?object ?relation ?subject + """, "RDFStatement" ).setMaxResults(DEFAULT_PAGE_SIZE) .setParameter("termType", termType) .setParameter("inVocabulary", inVocabulary) @@ -638,4 +627,32 @@ public List getTermRelations(Vocabulary vocabulary) { throw new PersistenceException(e); } } + + /** + * Returns the list of all distinct languages (language tags) used by terms in the specified vocabulary. + * + * @param vocabularyUri Vocabulary identifier + * @return List of distinct languages + */ + public List getLanguages(URI vocabularyUri) { + Objects.requireNonNull(vocabularyUri); + try { + return em.createNativeQuery(""" + SELECT DISTINCT ?lang WHERE { + ?x a ?type ; + ?inVocabulary ?vocabulary ; + ?labelProp ?label . + BIND (LANG(?label) as ?lang) + } + """, String.class) + .setParameter("type", URI.create(SKOS.CONCEPT)) + .setParameter("inVocabulary", + URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_je_pojmem_ze_slovniku)) + .setParameter("vocabulary", vocabularyUri) + .setParameter("labelProp", URI.create(SKOS.PREF_LABEL)) + .getResultList(); + } catch (RuntimeException e) { + throw new PersistenceException(e); + } + } } diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java index eadfa6b8a..7179cbd8d 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDao.java @@ -17,21 +17,28 @@ */ package cz.cvut.kbss.termit.persistence.dao.changetracking; +import cz.cvut.kbss.jopa.exceptions.NoResultException; import cz.cvut.kbss.jopa.model.EntityManager; -import cz.cvut.kbss.jopa.model.descriptors.Descriptor; import cz.cvut.kbss.jopa.model.descriptors.EntityDescriptor; +import cz.cvut.kbss.jopa.model.query.TypedQuery; +import cz.cvut.kbss.jopa.vocabulary.RDFS; +import cz.cvut.kbss.jopa.vocabulary.SKOS; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.exception.PersistenceException; import cz.cvut.kbss.termit.model.Asset; import cz.cvut.kbss.termit.model.User; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; import cz.cvut.kbss.termit.model.util.HasIdentifier; +import cz.cvut.kbss.termit.util.Utils; import cz.cvut.kbss.termit.util.Vocabulary; +import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Repository; import java.net.URI; import java.util.HashSet; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.Set; @Repository @@ -67,32 +74,165 @@ public void persist(AbstractChangeRecord record, Asset changedAsset) { } /** - * Finds all change records to the specified asset. + * Finds all change records related to the specified asset. * * @param asset The changed asset * @return List of change records ordered by timestamp (descending) */ - public List findAll(HasIdentifier asset) { - Objects.requireNonNull(asset); + public List findAll(Asset asset) { + return findAll(asset, new ChangeRecordFilterDto()); + } + + private Optional resolveChangeTrackingContext(Asset asset) { try { - final Descriptor descriptor = new EntityDescriptor(); - descriptor.setLanguage(null); - return em.createNativeQuery("SELECT ?r WHERE {" + - "?r a ?changeRecord ;" + - "?relatesTo ?asset ;" + - "?hasTime ?timestamp ." + - "OPTIONAL { ?r ?hasChangedAttribute ?attribute . }" + - "} ORDER BY DESC(?timestamp) ?attribute", AbstractChangeRecord.class) - .setParameter("changeRecord", URI.create(Vocabulary.s_c_zmena)) - .setParameter("relatesTo", URI.create(Vocabulary.s_p_ma_zmenenou_entitu)) - .setParameter("hasChangedAttribute", URI.create(Vocabulary.s_p_ma_zmeneny_atribut)) - .setParameter("hasTime", URI.create(Vocabulary.s_p_ma_datum_a_cas_modifikace)) - .setParameter("asset", asset.getUri()).setDescriptor(descriptor).getResultList(); - } catch (RuntimeException e) { - throw new PersistenceException(e); + return Optional.of(contextResolver.resolveChangeTrackingContext(asset)); + } catch (NoResultException e) { + return Optional.empty(); } } + /** + * Finds all change records related to the specified asset matching the filter. + * + * @param asset the asset + * @param filterDto filter parameters + */ + public List findAll(Asset asset, ChangeRecordFilterDto filterDto) { + return resolveChangeTrackingContext(asset).map(context -> + findAllFiltered(context, filterDto, Optional.of(asset), Optional.empty(), Pageable.unpaged())) + .orElseGet(List::of); + } + + /** + * Finds all records from change context resolved from {@code changeContextAsset} + * that are matching the filter and are related to an entity of the type {@code relatedEntityType}. + */ + public List findAllRelatedToType(Asset changeContextAsset, ChangeRecordFilterDto filterDto, URI relatedEntityType, Pageable pageable) { + return resolveChangeTrackingContext(changeContextAsset).map(context -> + findAllFiltered(context, + filterDto, + Optional.empty(), + Optional.ofNullable(relatedEntityType), + pageable + )).orElseGet(List::of); + } + + /** + * Finds all change records matching the filter. + * + * @param changeContext the context of change records + * @param filter filter parameters + * @param asset if present, only changes of the asset will be returned + * @param assetType if present, only changes related to an asset of this type will be returned. + */ + private List findAllFiltered(URI changeContext, ChangeRecordFilterDto filter, Optional> asset, Optional assetType, Pageable pageable) { + TypedQuery query = em.createNativeQuery(""" + SELECT DISTINCT ?record WHERE { +""" + /* Select anything from change context */ """ + GRAPH ?changeContext { + ?record a ?changeRecord . + } +""" + /* The record should be a subclass of changeType ("zmena") and have timestamp and author */ """ + ?changeRecord ?subClassOf+ ?changeType . + ?record ?hasChangedEntity ?asset ; + ?hasTime ?timestamp ; + ?hasAuthor ?author . +""" + /* Find an asset type if it is known (deleted assets does not have a type */ """ + BIND(?assetTypeValue as ?assetTypeVar) + OPTIONAL { + ?asset a ?assetType . + OPTIONAL { + ?asset a ?assetTypeValue + BIND(true as ?isAssetType) + } + } +""" + /* filter assets without a type (deleted) or with a matching type */ """ + FILTER(!BOUND(?assetTypeVar) || !BOUND(?assetType) || BOUND(?isAssetType)) +""" + /* Get author's name */ """ + ?author ?hasFirstName ?firstName ; + ?hasLastName ?lastName . + BIND(CONCAT(?firstName, " ", ?lastName) as ?authorFullName) +""" + /* When its update record, there will be a changed attribute */ """ + OPTIONAL { + ?record ?hasChangedAttribute ?attribute . + ?attribute ?hasRdfsLabel ?changedAttributeLabel . + } +""" + /* Get asset's name (but the asset might have been already deleted) */ """ + OPTIONAL { + ?asset ?hasLabel ?assetPrefLabel . + BIND(?assetPrefLabel as ?finalAssetLabel) + } + OPTIONAL { + ?asset ?hasRdfsLabel ?assetRdfsLabel . + BIND(?assetRdfsLabel as ?finalAssetLabel) + } +""" + /* then try to get the label from (delete) record */ """ + OPTIONAL { + ?record ?hasRdfsLabel ?recordRdfsLabel . + BIND(?recordRdfsLabel as ?finalAssetLabel) + } +""" + /* When label is still not bound, the term was probably deleted, find the delete record and get the label from it */ """ + OPTIONAL { + ?deleteRecord a ?deleteRecordType; + ?hasChangedEntity ?asset; + ?hasRdfsLabel ?deleteRecordLabel . + BIND(?deleteRecordLabel as ?finalAssetLabel) + } + BIND(?assetLabelValue as ?assetLabel) + BIND(?authorNameValue as ?authorName) + BIND(?attributeNameValue as ?changedAttributeName) + FILTER (!BOUND(?assetLabel) || CONTAINS(LCASE(?finalAssetLabel), LCASE(?assetLabel))) + FILTER (!BOUND(?authorName) || CONTAINS(LCASE(?authorFullName), LCASE(?authorName))) + FILTER (!BOUND(?changedAttributeName) || CONTAINS(LCASE(?changedAttributeLabel), LCASE(?changedAttributeName))) + } ORDER BY DESC(?timestamp) ?attribute + """, AbstractChangeRecord.class) + .setParameter("changeContext", changeContext) + .setParameter("subClassOf", URI.create(RDFS.SUB_CLASS_OF)) + .setParameter("changeType", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_c_zmena)) + .setParameter("hasChangedEntity", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_zmenenou_entitu)) + .setParameter("hasTime", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_datum_a_cas_modifikace)) + .setParameter("hasAuthor", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_editora)) // record has author + .setParameter("hasFirstName", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_krestni_jmeno)) + .setParameter("hasLastName", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_prijmeni)) + // Optional - update change record + .setParameter("hasChangedAttribute", URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_zmeneny_atribut)) + .setParameter("hasRdfsLabel", URI.create(RDFS.LABEL)) + // Optional - + .setParameter("hasLabel", URI.create(SKOS.PREF_LABEL)) + + // Optional asset label + .setParameter("deleteRecordType", URI.create(Vocabulary.s_c_smazani_entity)); + + if(asset.isPresent() && asset.get().getUri() != null) { + query = query.setParameter("asset", asset.get().getUri()); + } else if (assetType.isPresent()) { + query = query.setParameter("assetTypeValue", assetType.get()); + } + + + if(!Utils.isBlank(filter.getAssetLabel())) { + query = query.setParameter("assetLabelValue", filter.getAssetLabel().trim()); + } + if (!Utils.isBlank(filter.getAuthorName())) { + query = query.setParameter("authorNameValue", filter.getAuthorName().trim()); + } + if (filter.getChangeType() != null) { + query = query.setParameter("changeRecord", filter.getChangeType()); + } + if (!Utils.isBlank(filter.getChangedAttributeName())) { + query = query.setParameter("attributeNameValue", filter.getChangedAttributeName().trim()); + } + + query = query.setDescriptor(new EntityDescriptor().anyLanguage()); + + if(pageable.isUnpaged()) { + return query.getResultList(); + } + + return query.setFirstResult((int) pageable.getOffset()) + .setMaxResults(pageable.getPageSize()).getResultList(); + } + /** * Gets a set of authors of the specified asset. That is, this method retrieves authors of persist change records * associated with the specified asset. diff --git a/src/main/java/cz/cvut/kbss/termit/persistence/dao/skos/SKOSImporter.java b/src/main/java/cz/cvut/kbss/termit/persistence/dao/skos/SKOSImporter.java index 6d94fd9e9..7da506ad4 100644 --- a/src/main/java/cz/cvut/kbss/termit/persistence/dao/skos/SKOSImporter.java +++ b/src/main/java/cz/cvut/kbss/termit/persistence/dao/skos/SKOSImporter.java @@ -19,6 +19,7 @@ import cz.cvut.kbss.jopa.model.EntityManager; import cz.cvut.kbss.jopa.model.MultilingualString; +import cz.cvut.kbss.termit.exception.UnsupportedOperationException; import cz.cvut.kbss.termit.exception.importing.UnsupportedImportMediaTypeException; import cz.cvut.kbss.termit.exception.importing.VocabularyExistsException; import cz.cvut.kbss.termit.exception.importing.VocabularyImportException; @@ -28,6 +29,7 @@ import cz.cvut.kbss.termit.service.importer.VocabularyImporter; import cz.cvut.kbss.termit.util.Configuration; import cz.cvut.kbss.termit.util.Utils; +import jakarta.annotation.Nonnull; import jakarta.validation.constraints.NotNull; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.Literal; @@ -105,10 +107,11 @@ public SKOSImporter(Configuration config, VocabularyDao vocabularyDao, EntityMan } @Override - public Vocabulary importVocabulary(ImportConfiguration config, ImportInput data) { + public Vocabulary importVocabulary(@Nonnull ImportConfiguration config, @Nonnull ImportInput data) { Objects.requireNonNull(config); Objects.requireNonNull(data); - return importVocabulary(config.allowReIdentify(), config.vocabularyIri(), data.mediaType(), config.prePersist(), data.data()); + return importVocabulary(config.allowReIdentify(), config.vocabularyIri(), data.mediaType(), config.prePersist(), + data.data()); } private Vocabulary importVocabulary(final boolean rename, @@ -363,6 +366,12 @@ private void setVocabularyDescriptionFromGlossary(final Vocabulary vocabulary) { handleGlossaryStringProperty(DCTERMS.DESCRIPTION, vocabulary::setDescription); } + @Override + public Vocabulary importTermTranslations(@Nonnull URI vocabularyIri, @Nonnull ImportInput data) { + throw new UnsupportedOperationException( + "Importing term translations from SKOS file is currently not supported."); + } + /** * Checks whether this importer supports the specified media type. * diff --git a/src/main/java/cz/cvut/kbss/termit/rest/ResourceController.java b/src/main/java/cz/cvut/kbss/termit/rest/ResourceController.java index 11bb65415..0c659485d 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/ResourceController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/ResourceController.java @@ -18,6 +18,7 @@ package cz.cvut.kbss.termit.rest; import cz.cvut.kbss.jsonld.JsonLd; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.exception.TermItException; import cz.cvut.kbss.termit.model.TextAnalysisRecord; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; @@ -360,7 +361,7 @@ public List getHistory( required = false) Optional namespace) { final Resource resource = resourceService .getReference(resolveIdentifier(resourceNamespace(namespace), localName)); - return resourceService.getChanges(resource); + return resourceService.getChanges(resource, new ChangeRecordFilterDto()); } /** diff --git a/src/main/java/cz/cvut/kbss/termit/rest/TermController.java b/src/main/java/cz/cvut/kbss/termit/rest/TermController.java index 9fc059aa9..50bf8bcf6 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/TermController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/TermController.java @@ -18,6 +18,7 @@ package cz.cvut.kbss.termit.rest; import cz.cvut.kbss.jsonld.JsonLd; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.TermDto; import cz.cvut.kbss.termit.exception.TermItException; import cz.cvut.kbss.termit.model.Term; @@ -697,9 +698,16 @@ public List getHistory( @Parameter(description = ApiDoc.ID_TERM_LOCAL_NAME_DESCRIPTION, example = ApiDoc.ID_TERM_LOCAL_NAME_EXAMPLE) @PathVariable String termLocalName, @Parameter(description = ApiDoc.ID_NAMESPACE_DESCRIPTION, example = ApiDoc.ID_NAMESPACE_EXAMPLE) - @RequestParam(name = QueryParams.NAMESPACE, required = false) Optional namespace) { + @RequestParam(name = QueryParams.NAMESPACE, required = false) Optional namespace, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGE_TYPE_DESCRIPTION) + @RequestParam(name = "changeType", required = false) URI changeType, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.AUTHOR_NAME_DESCRIPTION) + @RequestParam(name = "author", required = false, defaultValue = "") String authorName, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGED_ATTRIBUTE_DESCRIPTION) + @RequestParam(name = "attribute", required = false, defaultValue = "") String changedAttributeName) { final URI termUri = getTermUri(localName, termLocalName, namespace); - return termService.getChanges(termService.findRequired(termUri)); + final ChangeRecordFilterDto filterDto = new ChangeRecordFilterDto(changedAttributeName, authorName, changeType); + return termService.getChanges(termService.findRequired(termUri), filterDto); } /** @@ -707,7 +715,7 @@ public List getHistory( *

* This is a convenience method to allow access without using the Term's parent Vocabulary. * - * @see #getHistory(String, String, Optional) + * @see #getHistory */ @Operation(security = {@SecurityRequirement(name = "bearer-key")}, description = "Gets a list of changes made to metadata of the term with the specified identifier.") @@ -722,9 +730,18 @@ public List getHistory(@Parameter(description = ApiDoc.ID_ @PathVariable String localName, @Parameter(description = ApiDoc.ID_STANDALONE_NAMESPACE_DESCRIPTION, example = ApiDoc.ID_STANDALONE_NAMESPACE_EXAMPLE) - @RequestParam(name = QueryParams.NAMESPACE) String namespace) { + @RequestParam(name = QueryParams.NAMESPACE) String namespace, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGE_TYPE_DESCRIPTION) + @RequestParam(name = "changeType", required = false) URI changeType, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.AUTHOR_NAME_DESCRIPTION) + @RequestParam(name = "author", required = false, + defaultValue = "") String authorName, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGED_ATTRIBUTE_DESCRIPTION) + @RequestParam(name = "attribute", required = false, + defaultValue = "") String changedAttributeName) { final URI termUri = idResolver.resolveIdentifier(namespace, localName); - return termService.getChanges(termService.findRequired(termUri)); + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(changedAttributeName, authorName, changeType); + return termService.getChanges(termService.findRequired(termUri), filter); } @Operation(security = {@SecurityRequirement(name = "bearer-key")}, diff --git a/src/main/java/cz/cvut/kbss/termit/rest/TermOccurrenceController.java b/src/main/java/cz/cvut/kbss/termit/rest/TermOccurrenceController.java index ddfa5057a..7fe2bc420 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/TermOccurrenceController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/TermOccurrenceController.java @@ -76,7 +76,6 @@ public TermOccurrenceController(IdentifierResolver idResolver, Configuration con }) @PutMapping(consumes = {JsonLd.MEDIA_TYPE, MediaType.APPLICATION_JSON_VALUE}) @ResponseStatus(HttpStatus.NO_CONTENT) - @PreAuthorize("hasRole('" + SecurityConstants.ROLE_FULL_USER + "')") public void saveOccurrence(@Parameter(description = "Term occurrence to save") @RequestBody TermOccurrence occurrence) { occurrenceService.persistOrUpdate(occurrence); @@ -91,7 +90,6 @@ public void saveOccurrence(@Parameter(description = "Term occurrence to save") }) @PutMapping(value = "/{localName}") @ResponseStatus(HttpStatus.ACCEPTED) - @PreAuthorize("hasRole('" + SecurityConstants.ROLE_FULL_USER + "')") public void approveOccurrence( @Parameter(description = TermOccurrenceControllerDoc.ID_LOCAL_NAME_DESCRIPTION, example = TermOccurrenceControllerDoc.ID_LOCAL_NAME_EXAMPLE) @@ -113,7 +111,6 @@ public void approveOccurrence( }) @DeleteMapping(value = "/{localName}") @ResponseStatus(HttpStatus.NO_CONTENT) - @PreAuthorize("hasRole('" + SecurityConstants.ROLE_FULL_USER + "')") public void removeOccurrence(@Parameter(description = TermOccurrenceControllerDoc.ID_LOCAL_NAME_DESCRIPTION, example = TermOccurrenceControllerDoc.ID_LOCAL_NAME_EXAMPLE) @PathVariable String localName, diff --git a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java index e8cd5afb4..b90780d59 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/VocabularyController.java @@ -22,6 +22,7 @@ import cz.cvut.kbss.termit.dto.RdfsStatement; import cz.cvut.kbss.termit.dto.Snapshot; import cz.cvut.kbss.termit.dto.acl.AccessControlListDto; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.VocabularyDto; import cz.cvut.kbss.termit.model.Vocabulary; import cz.cvut.kbss.termit.model.acl.AccessControlRecord; @@ -204,8 +205,13 @@ public ResponseEntity createVocabulary( @ApiResponse(responseCode = "200", description = "Template Excel file is returned as attachment") @GetMapping("/import/template") @PreAuthorize("permitAll()") - public ResponseEntity getExcelTemplateFile() { - final TypeAwareResource template = vocabularyService.getExcelTemplateFile(); + public ResponseEntity getExcelTemplateFile( + @Parameter(description = "Whether the file will be used to import only term translations") + @RequestParam(name = "translationsOnly", required = false, + defaultValue = "false") boolean translationsOnly) { + final TypeAwareResource template = + translationsOnly ? vocabularyService.getExcelTranslationsImportTemplateFile() : + vocabularyService.getExcelImportTemplateFile(); return ResponseEntity.ok() .contentType(MediaType.parseMediaType( template.getMediaType().orElse(MediaType.APPLICATION_OCTET_STREAM_VALUE))) @@ -234,12 +240,22 @@ public ResponseEntity createVocabulary( example = ApiDoc.ID_NAMESPACE_EXAMPLE) @RequestParam(name = QueryParams.NAMESPACE, required = false) Optional namespace, - @Parameter(description = "File containing a SKOS glossary in RDF.") - @RequestParam(name = "file") MultipartFile file) { + @Parameter( + description = "File containing a SKOS glossary in RDF or an Excel file with supported structure.") + @RequestParam(name = "file") MultipartFile file, + @Parameter(description = "Whether to import only translations of existing terms from the vocabulary.") + @RequestParam(name = "translationsOnly", required = false, + defaultValue = "false") boolean translationsOnly) { final URI vocabularyIri = resolveVocabularyUri(localName, namespace); - final Vocabulary vocabulary = vocabularyService.importVocabulary(vocabularyIri, file); - LOG.debug("Vocabulary {} re-imported.", vocabulary); - return ResponseEntity.created(locationWithout(generateLocation(vocabulary.getUri()), "/import/" + localName)) + final Vocabulary result; + if (translationsOnly) { + result = vocabularyService.importTermTranslations(vocabularyIri, file); + LOG.debug("Translations of terms in vocabulary {} imported.", result); + } else { + result = vocabularyService.importVocabulary(vocabularyIri, file); + LOG.debug("Vocabulary {} re-imported.", result); + } + return ResponseEntity.created(locationWithout(generateLocation(result.getUri()), "/import/" + localName)) .build(); } @@ -262,10 +278,17 @@ public List getHistory( @Parameter(description = ApiDoc.ID_NAMESPACE_DESCRIPTION, example = ApiDoc.ID_NAMESPACE_EXAMPLE) @RequestParam(name = QueryParams.NAMESPACE, - required = false) Optional namespace) { + required = false) Optional namespace, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGE_TYPE_DESCRIPTION) + @RequestParam(name = "type", required = false) URI changeType, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.AUTHOR_NAME_DESCRIPTION) + @RequestParam(name = "author", required = false, defaultValue = "") String authorName, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGED_ATTRIBUTE_DESCRIPTION) + @RequestParam(name = "attribute", required = false, defaultValue = "") String changedAttributeName) { final Vocabulary vocabulary = vocabularyService.getReference( resolveVocabularyUri(localName, namespace)); - return vocabularyService.getChanges(vocabulary); + final ChangeRecordFilterDto filterDto = new ChangeRecordFilterDto(changedAttributeName, authorName, changeType); + return vocabularyService.getChanges(vocabulary, filterDto); } @Operation(security = {@SecurityRequirement(name = "bearer-key")}, @@ -301,6 +324,18 @@ public List getDetailedHistoryOfContent( @Parameter(description = ApiDoc.ID_NAMESPACE_DESCRIPTION, example = ApiDoc.ID_NAMESPACE_EXAMPLE) @RequestParam(name = QueryParams.NAMESPACE, required = false) Optional namespace, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.TERM_NAME_DESCRIPTION) @RequestParam(name = "term", + required = false, + defaultValue = "") String termName, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGE_TYPE_DESCRIPTION) @RequestParam(name = "type", + required = false) URI changeType, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.AUTHOR_NAME_DESCRIPTION) @RequestParam( + name = "author", + required = false, + defaultValue = "") String authorName, + @Parameter(description = ChangeRecordFilterDto.ApiDoc.CHANGED_ATTRIBUTE_DESCRIPTION) @RequestParam( + name = "attribute", required = false, defaultValue = "") String changedAttributeName, + @Parameter(description = ApiDocConstants.PAGE_SIZE_DESCRIPTION) @RequestParam( name = Constants.QueryParams.PAGE_SIZE, required = false, defaultValue = DEFAULT_PAGE_SIZE) Integer pageSize, @@ -308,7 +343,25 @@ public List getDetailedHistoryOfContent( name = Constants.QueryParams.PAGE, required = false, defaultValue = DEFAULT_PAGE) Integer pageNo) { final Pageable pageReq = createPageRequest(pageSize, pageNo); final Vocabulary vocabulary = vocabularyService.getReference(resolveVocabularyUri(localName, namespace)); - return vocabularyService.getDetailedHistoryOfContent(vocabulary, pageReq); + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(termName, changedAttributeName, authorName, + changeType); + return vocabularyService.getDetailedHistoryOfContent(vocabulary, filter, pageReq); + } + + @Operation(security = {@SecurityRequirement(name = "bearer-key")}, + description = "Gets a list of languages used in the vocabulary.") + @ApiResponses({ + @ApiResponse(responseCode = "200", description = "List of languages.") + }) + @GetMapping(value = "/{localName}/languages", produces = {MediaType.APPLICATION_JSON_VALUE, JsonLd.MEDIA_TYPE}) + public List getLanguages( + @Parameter(description = ApiDoc.ID_LOCAL_NAME_DESCRIPTION, + example = ApiDoc.ID_LOCAL_NAME_EXAMPLE) @PathVariable String localName, + @Parameter(description = ApiDoc.ID_NAMESPACE_DESCRIPTION, + example = ApiDoc.ID_NAMESPACE_EXAMPLE) @RequestParam(name = QueryParams.NAMESPACE, + required = false) Optional namespace) { + final URI vocabularyUri = resolveVocabularyUri(localName, namespace); + return vocabularyService.getLanguages(vocabularyUri); } @Operation(security = {@SecurityRequirement(name = "bearer-key")}, diff --git a/src/main/java/cz/cvut/kbss/termit/rest/handler/RestExceptionHandler.java b/src/main/java/cz/cvut/kbss/termit/rest/handler/RestExceptionHandler.java index 0ea71c47c..53ba971a6 100644 --- a/src/main/java/cz/cvut/kbss/termit/rest/handler/RestExceptionHandler.java +++ b/src/main/java/cz/cvut/kbss/termit/rest/handler/RestExceptionHandler.java @@ -36,6 +36,7 @@ import cz.cvut.kbss.termit.exception.TermItException; import cz.cvut.kbss.termit.exception.UnsupportedOperationException; import cz.cvut.kbss.termit.exception.UnsupportedSearchFacetException; +import cz.cvut.kbss.termit.exception.UnsupportedTextAnalysisLanguageException; import cz.cvut.kbss.termit.exception.ValidationException; import cz.cvut.kbss.termit.exception.WebServiceIntegrationException; import cz.cvut.kbss.termit.exception.importing.UnsupportedImportMediaTypeException; @@ -99,7 +100,8 @@ private static ErrorInfo errorInfo(HttpServletRequest request, Throwable e) { } private static ErrorInfo errorInfo(HttpServletRequest request, TermItException e) { - return ErrorInfo.createParametrizedWithMessage(e.getMessage(), e.getMessageId(), request.getRequestURI(), e.getParameters()); + return ErrorInfo.createParametrizedWithMessage(e.getMessage(), e.getMessageId(), request.getRequestURI(), + e.getParameters()); } @ExceptionHandler(PersistenceException.class) @@ -290,4 +292,11 @@ public ResponseEntity uriSyntaxException(HttpServletRequest request, .addParameter("char", Character.toString(e.getInput().charAt(e.getIndex()))); return new ResponseEntity<>(errorInfo(request, exception), HttpStatus.CONFLICT); } + + @ExceptionHandler + public ResponseEntity unsupportedTextAnalysisLanguageException(HttpServletRequest request, + UnsupportedTextAnalysisLanguageException e) { + logException(e, request); + return new ResponseEntity<>(errorInfo(request, e), HttpStatus.CONFLICT); + } } diff --git a/src/main/java/cz/cvut/kbss/termit/service/business/AccessControlListService.java b/src/main/java/cz/cvut/kbss/termit/service/business/AccessControlListService.java index 4b6cdc889..c2b5772af 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/business/AccessControlListService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/business/AccessControlListService.java @@ -32,7 +32,7 @@ /** * Service for managing {@link AccessControlList}s (ACLs). *

- * Note that only management of ACLs is supported by this service. Access control itself is handled by TODO. + * Note that only management of ACLs is supported by this service. Access control itself is handled by {@link cz.cvut.kbss.termit.service.security.authorization.acl.AccessControlListBasedAuthorizationService}. */ public interface AccessControlListService { diff --git a/src/main/java/cz/cvut/kbss/termit/service/business/ResourceService.java b/src/main/java/cz/cvut/kbss/termit/service/business/ResourceService.java index f8d8f87a3..c069bf7bf 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/business/ResourceService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/business/ResourceService.java @@ -18,12 +18,14 @@ package cz.cvut.kbss.termit.service.business; import cz.cvut.kbss.termit.asset.provenance.SupportsLastModification; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.event.DocumentRenameEvent; import cz.cvut.kbss.termit.event.FileRenameEvent; import cz.cvut.kbss.termit.event.VocabularyWillBeRemovedEvent; import cz.cvut.kbss.termit.exception.InvalidParameterException; import cz.cvut.kbss.termit.exception.NotFoundException; import cz.cvut.kbss.termit.exception.UnsupportedAssetOperationException; +import cz.cvut.kbss.termit.exception.UnsupportedTextAnalysisLanguageException; import cz.cvut.kbss.termit.model.TextAnalysisRecord; import cz.cvut.kbss.termit.model.Vocabulary; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; @@ -37,6 +39,7 @@ import cz.cvut.kbss.termit.service.document.html.UnconfirmedTermOccurrenceRemover; import cz.cvut.kbss.termit.service.repository.ChangeRecordService; import cz.cvut.kbss.termit.service.repository.ResourceRepositoryService; +import cz.cvut.kbss.termit.util.Configuration; import cz.cvut.kbss.termit.util.TypeAwareResource; import jakarta.annotation.Nonnull; import org.slf4j.Logger; @@ -80,22 +83,26 @@ public class ResourceService private final ChangeRecordService changeRecordService; + private final Configuration config; + private ApplicationEventPublisher eventPublisher; @Autowired public ResourceService(ResourceRepositoryService repositoryService, DocumentManager documentManager, TextAnalysisService textAnalysisService, VocabularyService vocabularyService, - ChangeRecordService changeRecordService) { + ChangeRecordService changeRecordService, Configuration config) { this.repositoryService = repositoryService; this.documentManager = documentManager; this.textAnalysisService = textAnalysisService; this.vocabularyService = vocabularyService; this.changeRecordService = changeRecordService; + this.config = config; } /** * Ensures that document gets removed during Vocabulary removal */ + @Transactional @EventListener public void onVocabularyRemoval(VocabularyWillBeRemovedEvent event) { vocabularyService.find(event.getVocabularyIri()).ifPresent(vocabulary -> { @@ -239,6 +246,9 @@ public void addFileToDocument(Resource document, File file) { throw new UnsupportedAssetOperationException("Cannot add file to the specified resource " + document); } doc.addFile(file); + if (file.getLanguage() == null) { + file.setLanguage(config.getPersistence().getLanguage()); + } if (doc.getVocabulary() != null) { final Vocabulary vocabulary = vocabularyService.getReference(doc.getVocabulary()); repositoryService.persist(file, vocabulary); @@ -292,6 +302,7 @@ public void runTextAnalysis(Resource resource, Set vocabularies) { verifyFileOperationPossible(resource, "Text analysis"); LOG.trace("Invoking text analysis on resource {}.", resource); final File file = (File) resource; + verifyLanguageSupported(file); if (vocabularies.isEmpty()) { if (file.getDocument() == null || file.getDocument().getVocabulary() == null) { throw new UnsupportedAssetOperationException( @@ -305,6 +316,12 @@ public void runTextAnalysis(Resource resource, Set vocabularies) { } } + private void verifyLanguageSupported(File file) { + if (!textAnalysisService.supportsLanguage(file)) { + throw new UnsupportedTextAnalysisLanguageException("Text analysis service does not support language " + file.getLanguage(), file); + } + } + private Set includeImportedVocabularies(Set providedVocabularies) { final Set result = new HashSet<>(providedVocabularies); providedVocabularies.forEach(uri -> { @@ -369,8 +386,8 @@ public long getLastModified() { } @Override - public List getChanges(Resource asset) { - return changeRecordService.getChanges(asset); + public List getChanges(Resource asset, ChangeRecordFilterDto filterDto) { + return changeRecordService.getChanges(asset, filterDto); } @Override diff --git a/src/main/java/cz/cvut/kbss/termit/service/business/TermService.java b/src/main/java/cz/cvut/kbss/termit/service/business/TermService.java index b22bc1e67..1578b4888 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/business/TermService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/business/TermService.java @@ -20,6 +20,7 @@ import cz.cvut.kbss.termit.dto.RdfsResource; import cz.cvut.kbss.termit.dto.Snapshot; import cz.cvut.kbss.termit.dto.assignment.TermOccurrences; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.TermDto; import cz.cvut.kbss.termit.exception.InvalidTermStateException; import cz.cvut.kbss.termit.exception.NotFoundException; @@ -545,9 +546,9 @@ private void checkForInvalidTerminalStateAssignment(Term term, URI state) { } @Override - public List getChanges(Term term) { + public List getChanges(Term term, ChangeRecordFilterDto filterDto) { Objects.requireNonNull(term); - return changeRecordService.getChanges(term); + return changeRecordService.getChanges(term, filterDto); } /** diff --git a/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java b/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java index fe6d9b20a..bd250002f 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/business/VocabularyService.java @@ -22,6 +22,7 @@ import cz.cvut.kbss.termit.dto.RdfsStatement; import cz.cvut.kbss.termit.dto.Snapshot; import cz.cvut.kbss.termit.dto.acl.AccessControlListDto; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.TermDto; import cz.cvut.kbss.termit.dto.listing.VocabularyDto; import cz.cvut.kbss.termit.event.VocabularyContentModifiedEvent; @@ -46,8 +47,8 @@ import cz.cvut.kbss.termit.util.TypeAwareClasspathResource; import cz.cvut.kbss.termit.util.TypeAwareFileSystemResource; import cz.cvut.kbss.termit.util.TypeAwareResource; -import cz.cvut.kbss.termit.util.throttle.CacheableFuture; import cz.cvut.kbss.termit.util.throttle.Throttle; +import cz.cvut.kbss.termit.util.throttle.ThrottledFuture; import jakarta.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -280,26 +281,51 @@ public Vocabulary importVocabulary(URI vocabularyIri, MultipartFile file) { return repositoryService.importVocabulary(vocabularyIri, file); } + /** + * Imports translations of terms in the specified vocabulary from the specified file. + * + * @param vocabularyIri IRI of vocabulary for whose terms to import translations + * @param file File from which to import the translations + * @return The imported vocabulary metadata + * @throws cz.cvut.kbss.termit.exception.importing.VocabularyImportException If the import fails + */ + @PreAuthorize("@vocabularyAuthorizationService.canModify(#vocabularyIri)") + public Vocabulary importTermTranslations(URI vocabularyIri, MultipartFile file) { + return repositoryService.importTermTranslations(vocabularyIri, file); + } + /** * Gets an Excel template file that can be used to import terms into TermIt. * * @return Template file as a resource */ - public TypeAwareResource getExcelTemplateFile() { + public TypeAwareResource getExcelImportTemplateFile() { + return getExcelTemplate("termit-import"); + } + + private TypeAwareResource getExcelTemplate(String fileName) { final Configuration config = context.getBean(Configuration.class); return config.getTemplate().getExcelImport().map(File::new) .map(f -> (TypeAwareResource) new TypeAwareFileSystemResource(f, ExportFormat.EXCEL.getMediaType())) .orElseGet(() -> { - assert getClass().getClassLoader().getResource("template/termit-import.xlsx") != null; - return new TypeAwareClasspathResource("template/termit-import.xlsx", + assert getClass().getClassLoader().getResource("template/" + fileName + ExportFormat.EXCEL.getFileExtension()) != null; + return new TypeAwareClasspathResource("template/" + fileName + ExportFormat.EXCEL.getFileExtension(), ExportFormat.EXCEL.getMediaType()); }); } + /** + * Gets an Excel template file that can be used to import term translations into TermIt. + * @return Template file as a resource + */ + public TypeAwareResource getExcelTranslationsImportTemplateFile() { + return getExcelTemplate("termit-translations-import"); + } + @Override - public List getChanges(Vocabulary asset) { - return changeRecordService.getChanges(asset); + public List getChanges(Vocabulary asset, ChangeRecordFilterDto filterDto) { + return changeRecordService.getChanges(asset, filterDto); } /** @@ -316,11 +342,12 @@ public List getChangesOfContent(Vocabulary vocabulary) { * Gets content change records of the specified vocabulary. * * @param vocabulary Vocabulary whose content changes to get - * @param pageReq Specification of the size and number of the page to return + * @param pageReq Specification of the size and number of the page to return * @return List of change records, ordered by date in descending order */ - public List getDetailedHistoryOfContent(Vocabulary vocabulary, Pageable pageReq) { - return repositoryService.getDetailedHistoryOfContent(vocabulary, pageReq); + public List getDetailedHistoryOfContent(Vocabulary vocabulary, ChangeRecordFilterDto filter, + Pageable pageReq) { + return repositoryService.getDetailedHistoryOfContent(vocabulary, filter, pageReq); } /** @@ -384,7 +411,7 @@ public void remove(Vocabulary asset) { * * @param vocabulary Vocabulary to validate */ - public CacheableFuture> validateContents(URI vocabulary) { + public ThrottledFuture> validateContents(URI vocabulary) { return repositoryService.validateContents(vocabulary); } @@ -522,6 +549,17 @@ public AccessLevel getAccessLevel(Vocabulary vocabulary) { return authorizationService.getAccessLevel(vocabulary); } + /** + * Gets the list of languages used in the specified vocabulary. + * + * @param vocabularyUri Vocabulary identifier + * @return List of languages + */ + @PreAuthorize("@vocabularyAuthorizationService.canRead(#vocabularyUri)") + public List getLanguages(URI vocabularyUri) { + return repositoryService.getLanguages(vocabularyUri); + } + @Override public void setApplicationEventPublisher(@Nonnull ApplicationEventPublisher eventPublisher) { this.eventPublisher = eventPublisher; diff --git a/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeRecordProvider.java b/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeRecordProvider.java index 41396793d..d6f034dfe 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeRecordProvider.java +++ b/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeRecordProvider.java @@ -17,8 +17,9 @@ */ package cz.cvut.kbss.termit.service.changetracking; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; +import cz.cvut.kbss.termit.model.Asset; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; -import cz.cvut.kbss.termit.model.util.HasIdentifier; import java.util.List; @@ -27,7 +28,17 @@ * * @param Type of asset to get changes for */ -public interface ChangeRecordProvider { +public interface ChangeRecordProvider> { + + /** + * Gets change records of the specified asset + * filtered by {@link ChangeRecordFilterDto}. + * + * @param asset Asset to find change records for + * @param filterDto Filter parameters + * @return List of change records, ordered by record timestamp in descending order + */ + List getChanges(T asset, ChangeRecordFilterDto filterDto); /** * Gets change records of the specified asset. @@ -35,5 +46,7 @@ public interface ChangeRecordProvider { * @param asset Asset to find change records for * @return List of change records, ordered by record timestamp in descending order */ - List getChanges(T asset); + default List getChanges(T asset) { + return getChanges(asset, new ChangeRecordFilterDto()); + } } diff --git a/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeTracker.java b/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeTracker.java index b9497ab94..a7a5876b7 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeTracker.java +++ b/src/main/java/cz/cvut/kbss/termit/service/changetracking/ChangeTracker.java @@ -19,9 +19,11 @@ import cz.cvut.kbss.termit.event.AssetPersistEvent; import cz.cvut.kbss.termit.event.AssetUpdateEvent; +import cz.cvut.kbss.termit.event.BeforeAssetDeleteEvent; import cz.cvut.kbss.termit.model.Asset; import cz.cvut.kbss.termit.model.User; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; +import cz.cvut.kbss.termit.model.changetracking.DeleteChangeRecord; import cz.cvut.kbss.termit.model.changetracking.PersistChangeRecord; import cz.cvut.kbss.termit.model.changetracking.UpdateChangeRecord; import cz.cvut.kbss.termit.model.resource.File; @@ -114,4 +116,22 @@ public void onAssetPersistEvent(@Nonnull AssetPersistEvent event) { changeRecord.setTimestamp(Utils.timestamp()); changeRecordDao.persist(changeRecord, added); } + + /** + * Records an asset deletion from the repository. + * + * @param event Event representing the asset deletion + */ + @Transactional + @EventListener + public void onBeforeAssetDeleteEvent(@Nonnull BeforeAssetDeleteEvent event) { + final Asset asset = event.getAsset(); + LOG.trace("Recording deletion of asset {}.", asset); + + final AbstractChangeRecord changeRecord = new DeleteChangeRecord(asset); + changeRecord.setAuthor(securityUtils.getCurrentUser().toUser()); + changeRecord.setTimestamp(Utils.timestamp()); + + changeRecordDao.persist(changeRecord, asset); + } } diff --git a/src/main/java/cz/cvut/kbss/termit/service/document/TermOccurrenceResolver.java b/src/main/java/cz/cvut/kbss/termit/service/document/TermOccurrenceResolver.java index 616c0707d..6cb6d66ec 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/document/TermOccurrenceResolver.java +++ b/src/main/java/cz/cvut/kbss/termit/service/document/TermOccurrenceResolver.java @@ -31,7 +31,6 @@ import java.net.URI; import java.util.Collections; import java.util.List; -import java.util.function.Consumer; /** * Base class for resolving term occurrences in an annotated document. @@ -50,7 +49,7 @@ protected TermOccurrenceResolver(TermRepositoryService termService) { * Parses the specified input into some abstract representation from which new terms and term occurrences can be * extracted. *

- * Note that this method has to be called before calling {@link #findTermOccurrences(Consumer)}. + * Note that this method has to be called before calling {@link #findTermOccurrences(OccurrenceConsumer)}. * * @param input The input to parse * @param source Original source of the input. Used for term occurrence generation diff --git a/src/main/java/cz/cvut/kbss/termit/service/document/TextAnalysisService.java b/src/main/java/cz/cvut/kbss/termit/service/document/TextAnalysisService.java index adc9dfdae..6ef927e72 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/document/TextAnalysisService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/document/TextAnalysisService.java @@ -20,11 +20,15 @@ import cz.cvut.kbss.termit.dto.TextAnalysisInput; import cz.cvut.kbss.termit.event.FileTextAnalysisFinishedEvent; import cz.cvut.kbss.termit.event.TermDefinitionTextAnalysisFinishedEvent; +import cz.cvut.kbss.termit.exception.TermItException; +import cz.cvut.kbss.termit.exception.UnsupportedTextAnalysisLanguageException; import cz.cvut.kbss.termit.exception.WebServiceIntegrationException; import cz.cvut.kbss.termit.model.AbstractTerm; +import cz.cvut.kbss.termit.model.Asset; import cz.cvut.kbss.termit.model.TextAnalysisRecord; import cz.cvut.kbss.termit.model.resource.File; import cz.cvut.kbss.termit.persistence.dao.TextAnalysisRecordDao; +import cz.cvut.kbss.termit.rest.handler.ErrorInfo; import cz.cvut.kbss.termit.util.Configuration; import cz.cvut.kbss.termit.util.Utils; import cz.cvut.kbss.termit.util.throttle.Throttle; @@ -32,20 +36,24 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationEventPublisher; +import org.springframework.core.ParameterizedTypeReference; import org.springframework.core.io.Resource; import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; +import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.client.HttpClientErrorException; import org.springframework.web.client.RestTemplate; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.util.HashSet; +import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Set; @@ -67,6 +75,8 @@ public class TextAnalysisService { private final ApplicationEventPublisher eventPublisher; + private Set supportedLanguages; + @Autowired public TextAnalysisService(RestTemplate restClient, Configuration config, DocumentManager documentManager, AnnotationGenerator annotationGenerator, TextAnalysisRecordDao recordDao, @@ -107,7 +117,7 @@ private TextAnalysisInput createAnalysisInput(File file) { publicUrl.isEmpty() || publicUrl.get().isEmpty() ? config.getRepository().getUrl() : publicUrl.get() ); input.setVocabularyRepository(repositoryUrl); - input.setLanguage(config.getPersistence().getLanguage()); + input.setLanguage(file.getLanguage() != null ? file.getLanguage() : config.getPersistence().getLanguage()); input.setVocabularyRepositoryUserName(config.getRepository().getUsername()); input.setVocabularyRepositoryPassword(config.getRepository().getPassword()); return input; @@ -126,6 +136,8 @@ private void invokeTextAnalysisOnFile(File file, TextAnalysisInput input) { storeTextAnalysisRecord(file, input); } catch (WebServiceIntegrationException e) { throw e; + } catch (HttpClientErrorException e) { + throw handleTextAnalysisInvocationClientException(e, file); } catch (RuntimeException e) { throw new WebServiceIntegrationException("Text analysis invocation failed.", e); } catch (IOException e) { @@ -140,11 +152,10 @@ private Optional invokeTextAnalysisService(TextAnalysisInput input) { return Optional.empty(); } final HttpHeaders headers = new HttpHeaders(); - headers.add(HttpHeaders.ACCEPT, MediaType.APPLICATION_XML_VALUE); - LOG.debug("Invoking text analysis service at '{}' on input: {}", config.getTextAnalysis().getUrl(), input); - final ResponseEntity resp = restClient - .exchange(config.getTextAnalysis().getUrl(), HttpMethod.POST, - new HttpEntity<>(input, headers), Resource.class); + headers.addAll(HttpHeaders.ACCEPT, List.of(MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE)); + LOG.debug("Invoking text analysis service at '{}' on input: {}", taUrl, input); + final ResponseEntity resp = restClient.exchange(taUrl, HttpMethod.POST, + new HttpEntity<>(input, headers), Resource.class); if (!resp.hasBody()) { throw new WebServiceIntegrationException("Text analysis service returned empty response."); } @@ -156,11 +167,21 @@ private void storeTextAnalysisRecord(File file, TextAnalysisInput config) { LOG.trace("Creating record of text analysis event for file {}.", file); assert config.getVocabularyContexts() != null; - final TextAnalysisRecord record = new TextAnalysisRecord(Utils.timestamp(), file); + final TextAnalysisRecord record = new TextAnalysisRecord(Utils.timestamp(), file, config.getLanguage()); record.setVocabularies(new HashSet<>(config.getVocabularyContexts())); recordDao.persist(record); } + private TermItException handleTextAnalysisInvocationClientException(HttpClientErrorException ex, Asset asset) { + if (ex.getStatusCode() == HttpStatus.CONFLICT) { + final ErrorInfo errorInfo = ex.getResponseBodyAs(ErrorInfo.class); + if (errorInfo != null && errorInfo.getMessage().contains("language")) { + throw new UnsupportedTextAnalysisLanguageException(errorInfo.getMessage(),asset); + } + } + throw new WebServiceIntegrationException("Text analysis invocation failed.", ex); + } + /** * Gets the latest {@link TextAnalysisRecord} for the specified Resource. * @@ -205,10 +226,56 @@ private void invokeTextAnalysisOnTerm(AbstractTerm term, TextAnalysisInput input } } catch (WebServiceIntegrationException e) { throw e; + } catch (HttpClientErrorException e) { + throw handleTextAnalysisInvocationClientException(e, term); } catch (RuntimeException e) { throw new WebServiceIntegrationException("Text analysis invocation failed.", e); } catch (IOException e) { throw new WebServiceIntegrationException("Unable to read text analysis result from response.", e); } } + + /** + * Checks whether the text analysis service supports the language of the specified file. + *

+ * If the text analysis service does not provide endpoint for getting supported languages (or it is not configured), + * it is assumed that any language is supported. + *

+ * If the file does not have language set, it is assumed that it is supported as well. + * + * @param file File to be analyzed + * @return {@code true} if the file language is supported, {@code false} otherwise + */ + public boolean supportsLanguage(File file) { + Objects.requireNonNull(file); + return file.getLanguage() == null || getSupportedLanguages().isEmpty() || getSupportedLanguages().contains( + file.getLanguage()); + } + + private synchronized Set getSupportedLanguages() { + if (supportedLanguages != null) { + return supportedLanguages; + } + final String languagesEndpointUrl = config.getTextAnalysis().getLanguagesUrl(); + if (languagesEndpointUrl == null || languagesEndpointUrl.isBlank()) { + LOG.warn( + "Text analysis service languages endpoint URL not configured. Assuming any language is supported."); + this.supportedLanguages = Set.of(); + } else { + try { + LOG.debug("Getting list of supported languages from text analysis service at '{}'.", + languagesEndpointUrl); + ResponseEntity> response = restClient.exchange(languagesEndpointUrl, HttpMethod.GET, null, + new ParameterizedTypeReference<>() { + }); + this.supportedLanguages = response.getBody(); + LOG.trace("Text analysis supported languages: {}", supportedLanguages); + } catch (RuntimeException e) { + LOG.error("Unable to get list of supported languages from text analysis service at '{}'.", + languagesEndpointUrl, e); + this.supportedLanguages = Set.of(); + } + } + return supportedLanguages; + } } diff --git a/src/main/java/cz/cvut/kbss/termit/service/importer/VocabularyImporter.java b/src/main/java/cz/cvut/kbss/termit/service/importer/VocabularyImporter.java index b82b49a0d..bd71b8375 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/importer/VocabularyImporter.java +++ b/src/main/java/cz/cvut/kbss/termit/service/importer/VocabularyImporter.java @@ -2,7 +2,7 @@ import cz.cvut.kbss.termit.exception.importing.VocabularyExistsException; import cz.cvut.kbss.termit.model.Vocabulary; -import jakarta.validation.constraints.NotNull; +import jakarta.annotation.Nonnull; import java.io.InputStream; import java.net.URI; @@ -26,7 +26,21 @@ public interface VocabularyImporter { * @throws IllegalArgumentException Indicates invalid input data, e.g., no input streams, missing language tags * etc. */ - Vocabulary importVocabulary(@NotNull ImportConfiguration config, @NotNull ImportInput data); + Vocabulary importVocabulary(@Nonnull ImportConfiguration config, @Nonnull ImportInput data); + + /** + * Imports term translations from the specified data into the specified vocabulary. + *

+ * Only translations of existing terms are imported, no new terms are created. Only translations of multilingual + * attributes are imported. If a value in the specified language exists in the repository, it is preserved. + * + * @param vocabularyIri Vocabulary identifier + * @param data Data to import + * @return Vocabulary whose content was changed + * @throws IllegalArgumentException Indicates invalid input data, e.g., no input streams, missing language tags + * etc. + */ + Vocabulary importTermTranslations(@Nonnull URI vocabularyIri, @Nonnull ImportInput data); /** * Vocabulary import configuration. @@ -38,7 +52,7 @@ public interface VocabularyImporter { * @param prePersist Procedure to call before persisting the resulting vocabulary */ record ImportConfiguration(boolean allowReIdentify, URI vocabularyIri, - @NotNull Consumer prePersist) { + @Nonnull Consumer prePersist) { } /** @@ -47,6 +61,6 @@ record ImportConfiguration(boolean allowReIdentify, URI vocabularyIri, * @param mediaType Media type of the imported data * @param data Streams containing the data */ - record ImportInput(@NotNull String mediaType, InputStream... data) { + record ImportInput(@Nonnull String mediaType, InputStream... data) { } } diff --git a/src/main/java/cz/cvut/kbss/termit/service/importer/VocabularyImporters.java b/src/main/java/cz/cvut/kbss/termit/service/importer/VocabularyImporters.java index 5eb792580..1859ba7bb 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/importer/VocabularyImporters.java +++ b/src/main/java/cz/cvut/kbss/termit/service/importer/VocabularyImporters.java @@ -8,6 +8,8 @@ import org.springframework.context.ApplicationContext; import org.springframework.stereotype.Component; +import java.net.URI; + /** * Ensures correct importer is invoked for provided media types. */ @@ -22,14 +24,22 @@ public VocabularyImporters(ApplicationContext appContext) { @Override public Vocabulary importVocabulary(@Nonnull ImportConfiguration config, @Nonnull ImportInput data) { - if (SKOSImporter.supportsMediaType(data.mediaType())) { - return getSkosImporter().importVocabulary(config, data); - } - if (ExcelImporter.supportsMediaType(data.mediaType())) { - return getExcelImporter().importVocabulary(config, data); + return resolveImporter(data.mediaType()).importVocabulary(config, data); + } + + private VocabularyImporter resolveImporter(String mediaType) { + if (SKOSImporter.supportsMediaType(mediaType)) { + return getSkosImporter(); + } else if (ExcelImporter.supportsMediaType(mediaType)) { + return getExcelImporter(); } throw new UnsupportedImportMediaTypeException( - "Unsupported media type '" + data.mediaType() + "' for vocabulary import."); + "Unsupported media type '" + mediaType + "' for vocabulary import."); + } + + @Override + public Vocabulary importTermTranslations(@Nonnull URI vocabularyIri, @Nonnull ImportInput data) { + return resolveImporter(data.mediaType()).importTermTranslations(vocabularyIri, data); } private VocabularyImporter getSkosImporter() { diff --git a/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java b/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java index 1a8ed5f68..5de0002d8 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java +++ b/src/main/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporter.java @@ -1,6 +1,7 @@ package cz.cvut.kbss.termit.service.importer.excel; import cz.cvut.kbss.jopa.model.EntityManager; +import cz.cvut.kbss.jopa.model.MultilingualString; import cz.cvut.kbss.termit.exception.NotFoundException; import cz.cvut.kbss.termit.exception.importing.VocabularyDoesNotExistException; import cz.cvut.kbss.termit.exception.importing.VocabularyImportException; @@ -21,6 +22,7 @@ import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.xssf.usermodel.XSSFWorkbook; +import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.config.ConfigurableBeanFactory; @@ -92,7 +94,7 @@ public ExcelImporter(VocabularyDao vocabularyDao, TermRepositoryService termServ } @Override - public Vocabulary importVocabulary(ImportConfiguration config, ImportInput data) { + public Vocabulary importVocabulary(@Nonnull ImportConfiguration config, @Nonnull ImportInput data) { Objects.requireNonNull(config); Objects.requireNonNull(data); if (config.vocabularyIri() == null || !vocabularyDao.exists(config.vocabularyIri())) { @@ -100,6 +102,7 @@ public Vocabulary importVocabulary(ImportConfiguration config, ImportInput data) } final Vocabulary targetVocabulary = vocabularyDao.find(config.vocabularyIri()).orElseThrow( () -> NotFoundException.create(Vocabulary.class, config.vocabularyIri())); + LOG.debug("Importing terms from Excel into vocabulary {}.", targetVocabulary); try { List terms = Collections.emptyList(); Set rawDataToInsert = new HashSet<>(); @@ -119,43 +122,8 @@ public Vocabulary importVocabulary(ImportConfiguration config, ImportInput data) terms = sheetImporter.resolveTermsFromSheet(sheet); rawDataToInsert.addAll(sheetImporter.getRawDataToInsert()); } - terms.stream().peek(t -> t.setUri(resolveTermIdentifier(targetVocabulary, t))) - .peek(t -> t.getLabel().getValue().forEach((lang, value) -> { - final Optional existingUri = termService.findIdentifierByLabel(value, - targetVocabulary, - lang); - if (existingUri.isPresent() && !existingUri.get().equals(t.getUri())) { - throw new VocabularyImportException( - "Vocabulary already contains a term with label '" + value + "' with a different identifier than the imported one.", - "error.vocabulary.import.excel.labelWithDifferentIdentifierExists") - .addParameter("label", value) - .addParameter("existingUri", Utils.uriToString(existingUri.get())); - } - })) - .filter(t -> termService.exists(t.getUri())).forEach(t -> { - LOG.trace("Term {} already exists. Removing old version.", t); - termService.forceRemove(termService.findRequired(t.getUri())); - // Flush changes to prevent EntityExistsExceptions when term is already managed in PC as different type (Term vs TermInfo) - em.flush(); - }); - // Ensure all parents are saved before we start adding children - terms.stream().filter(t -> Utils.emptyIfNull(t.getParentTerms()).isEmpty()) - .forEach(root -> { - LOG.trace("Persisting root term {}.", root); - termService.addRootTermToVocabulary(root, targetVocabulary); - root.setVocabulary(targetVocabulary.getUri()); - }); - terms.stream().filter(t -> !Utils.emptyIfNull(t.getParentTerms()).isEmpty()) - .forEach(t -> { - t.setVocabulary(targetVocabulary.getUri()); - LOG.trace("Persisting child term {}.", t); - termService.addChildTerm(t, t.getParentTerms().iterator().next()); - }); - // Insert term relationships as raw data because of possible object conflicts in the persistence context - - // the same term being as multiple types (Term, TermInfo) in the same persistence context - dataDao.insertRawData(rawDataToInsert.stream().map(tr -> new Quad(tr.subject().getUri(), tr.property(), - tr.object().getUri(), - targetVocabulary.getUri())).toList()); + prepareTermsForPersist(terms, targetVocabulary); + persistNewTerms(terms, targetVocabulary, rawDataToInsert); } } catch (IOException e) { throw new VocabularyImportException("Unable to read input as Excel.", e); @@ -174,30 +142,17 @@ private PrefixMap resolvePrefixMap(Workbook excel) { } /** - * Resolves namespace for identifiers of terms in the specified vocabulary. - *

- * It uses the vocabulary identifier and the configured term namespace separator. - * - * @param vocabulary Vocabulary whose term identifier namespace to resolve - * @return Resolved namespace - */ - private String resolveVocabularyTermNamespace(Vocabulary vocabulary) { - return idResolver.buildNamespace(vocabulary.getUri().toString(), - config.getNamespace().getTerm().getSeparator()); - } - - /** - * Resolves term identifier. + * Resolves term identifier w.r.t. the target vocabulary. *

* If the term does not have an identifier, it is generated so that existing instance can be removed before * inserting the imported term. If the term has an identifier, but it does not match the expected vocabulary-based * namespace, it is adjusted so that it does. Otherwise, the identifier is used. * - * @param vocabulary Vocabulary into which the term will be added * @param term The imported term + * @param vocabulary Vocabulary into which the term will be added * @return Term identifier */ - private URI resolveTermIdentifier(Vocabulary vocabulary, Term term) { + private URI resolveTermIdentifierWrtVocabulary(Term term, Vocabulary vocabulary) { final String termNamespace = resolveVocabularyTermNamespace(vocabulary); if (term.getUri() == null) { return idResolver.generateDerivedIdentifier(vocabulary.getUri(), @@ -215,6 +170,173 @@ private URI resolveTermIdentifier(Vocabulary vocabulary, Term term) { return term.getUri(); } + /** + * Resolves namespace for identifiers of terms in the specified vocabulary. + *

+ * It uses the vocabulary identifier and the configured term namespace separator. + * + * @param vocabulary Vocabulary whose term identifier namespace to resolve + * @return Resolved namespace + */ + private String resolveVocabularyTermNamespace(Vocabulary vocabulary) { + return idResolver.buildNamespace(vocabulary.getUri().toString(), + config.getNamespace().getTerm().getSeparator()); + } + + /** + * Prepares terms for persist by: + *

    + *
  • Resolving their identifiers and harmonizing them with vocabulary namespace
  • + *
  • Removing possibly pre-existing terms
  • + *
+ * + * @param terms Terms to process + * @param targetVocabulary Target vocabulary + */ + private void prepareTermsForPersist(List terms, Vocabulary targetVocabulary) { + terms.stream().peek(t -> t.setUri(resolveTermIdentifierWrtVocabulary(t, targetVocabulary))) + .peek(t -> t.getLabel().getValue().forEach((lang, value) -> { + final Optional existingUri = termService.findIdentifierByLabel(value, + targetVocabulary, + lang); + if (existingUri.isPresent() && !existingUri.get().equals(t.getUri())) { + throw new VocabularyImportException( + "Vocabulary already contains a term with label '" + value + "' with a different identifier than the imported one.", + "error.vocabulary.import.excel.labelWithDifferentIdentifierExists") + .addParameter("label", value) + .addParameter("existingUri", Utils.uriToString(existingUri.get())); + } + })) + .filter(t -> termService.exists(t.getUri())).forEach(t -> { + LOG.trace("Term {} already exists. Removing old version.", t); + termService.forceRemove(termService.findRequired(t.getUri())); + // Flush changes to prevent EntityExistsExceptions when term is already managed in PC as different type (Term vs TermInfo) + em.flush(); + }); + } + + private void persistNewTerms(List terms, Vocabulary targetVocabulary, Set rawDataToInsert) { + // Ensure all parents are saved before we start adding children + terms.stream().filter(t -> Utils.emptyIfNull(t.getParentTerms()).isEmpty()) + .forEach(root -> { + LOG.trace("Persisting root term {}.", root); + termService.addRootTermToVocabulary(root, targetVocabulary); + root.setVocabulary(targetVocabulary.getUri()); + }); + terms.stream().filter(t -> !Utils.emptyIfNull(t.getParentTerms()).isEmpty()) + .forEach(t -> { + t.setVocabulary(targetVocabulary.getUri()); + LOG.trace("Persisting child term {}.", t); + termService.addChildTerm(t, t.getParentTerms().iterator().next()); + }); + // Insert term relationships as raw data because of possible object conflicts in the persistence context - + // the same term being as multiple types (Term, TermInfo) in the same persistence context + dataDao.insertRawData(rawDataToInsert.stream().map(tr -> new Quad(tr.subject().getUri(), tr.property(), + tr.object().getUri(), + targetVocabulary.getUri())).toList()); + } + + @Override + public Vocabulary importTermTranslations(@Nonnull URI vocabularyIri, @Nonnull ImportInput data) { + Objects.requireNonNull(vocabularyIri); + Objects.requireNonNull(data); + final Vocabulary targetVocabulary = vocabularyDao.find(vocabularyIri).orElseThrow( + () -> NotFoundException.create(Vocabulary.class, vocabularyIri)); + LOG.debug("Importing translations for terms in vocabulary {}.", vocabularyIri); + try { + final List terms = readTermsFromSheet(data); + terms.forEach(t -> { + identifyTermByLabelIfNecessary(t, targetVocabulary); + final Optional existingTerm = termService.find(t.getUri()); + if (existingTerm.isEmpty() || !existingTerm.get().getVocabulary().equals(vocabularyIri)) { + LOG.warn( + "Term with identifier '{}' not found in vocabulary '{}'. Skipping record resolved from Excel file.", + t.getUri(), vocabularyIri); + return; + } + mergeTranslations(t, existingTerm.get()); + termService.update(existingTerm.get()); + // Flush changes to prevent EntityExistsExceptions when term is already managed in PC as different type (Term vs TermInfo) + em.flush(); + }); + } catch (IOException e) { + throw new VocabularyImportException("Unable to read input as Excel.", e); + } + return targetVocabulary; + } + + private void identifyTermByLabelIfNecessary(Term t, Vocabulary targetVocabulary) { + if (t.getUri() == null) { + final String termLabel = t.getLabel().get(config.getPersistence().getLanguage()); + if (termLabel == null) { + throw new VocabularyImportException( + "Unable to identify terms in Excel - it contains neither term identifiers nor labels in primary language.", + "error.vocabulary.import.excel.missingIdentifierOrLabel"); + } + t.setUri(idResolver.generateDerivedIdentifier(targetVocabulary.getUri(), + config.getNamespace().getTerm().getSeparator(), + termLabel)); + } + } + + private List readTermsFromSheet(@NotNull ImportInput data) throws IOException { + List terms = Collections.emptyList(); + for (InputStream input : data.data()) { + final Workbook workbook = new XSSFWorkbook(input); + assert workbook.getNumberOfSheets() > 0; + PrefixMap prefixMap = resolvePrefixMap(workbook); + for (int i = 0; i < workbook.getNumberOfSheets(); i++) { + final Sheet sheet = workbook.getSheetAt(i); + if (ExcelVocabularyExporter.PREFIX_SHEET_NAME.equals(sheet.getSheetName())) { + // Skip already processed prefix sheet + continue; + } + final LocalizedSheetImporter sheetImporter = new LocalizedSheetImporter( + new LocalizedSheetImporter.Services(termService, languageService), + prefixMap, terms); + terms = sheetImporter.resolveTermsFromSheet(sheet); + } + } + return terms; + } + + private void mergeTranslations(Term source, Term target) { + target.setLabel(mergeSingularTranslations(source.getLabel(), target.getLabel())); + target.setDefinition(mergeSingularTranslations(source.getDefinition(), target.getDefinition())); + target.setDescription(mergeSingularTranslations(source.getDescription(), target.getDescription())); + assert target.getAltLabels() != null; + mergePluralTranslations(source.getAltLabels(), target.getAltLabels()); + assert target.getHiddenLabels() != null; + mergePluralTranslations(source.getHiddenLabels(), target.getHiddenLabels()); + assert target.getExamples() != null; + mergePluralTranslations(source.getExamples(), target.getExamples()); + } + + private MultilingualString mergeSingularTranslations(MultilingualString source, MultilingualString target) { + if (target == null) { + return source; + } + if (source == null) { + return target; + } + source.getValue().forEach((lang, value) -> { + if (!target.contains(lang)) { + target.set(lang, value); + } + }); + return target; + } + + private void mergePluralTranslations(Set source, Set target) { + if (Utils.emptyIfNull(source).isEmpty()) { + return; + } + // Remove just the existing language values + target.forEach(t -> t.getLanguages().forEach(lang -> source.forEach(mls -> mls.remove(lang)))); + // Add the remainder + target.addAll(source.stream().filter(mls -> !mls.isEmpty()).toList()); + } + /** * Checks whether this importer supports the specified media type. * diff --git a/src/main/java/cz/cvut/kbss/termit/service/importer/excel/LocalizedSheetImporter.java b/src/main/java/cz/cvut/kbss/termit/service/importer/excel/LocalizedSheetImporter.java index 67187fc3e..17ba9dc02 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/importer/excel/LocalizedSheetImporter.java +++ b/src/main/java/cz/cvut/kbss/termit/service/importer/excel/LocalizedSheetImporter.java @@ -84,7 +84,7 @@ class LocalizedSheetImporter { * @return Terms resolved from the sheet */ List resolveTermsFromSheet(Sheet sheet) { - LOG.debug("Importing terms from sheet '{}'.", sheet.getSheetName()); + LOG.debug("Reading terms from sheet '{}'.", sheet.getSheetName()); this.rawDataToInsert = new ArrayList<>(); final Optional lang = resolveLanguage(sheet); if (lang.isEmpty()) { diff --git a/src/main/java/cz/cvut/kbss/termit/service/mail/Postman.java b/src/main/java/cz/cvut/kbss/termit/service/mail/Postman.java index cb66781e9..04cd590ce 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/mail/Postman.java +++ b/src/main/java/cz/cvut/kbss/termit/service/mail/Postman.java @@ -19,6 +19,7 @@ import cz.cvut.kbss.termit.exception.PostmanException; import cz.cvut.kbss.termit.exception.ValidationException; +import cz.cvut.kbss.termit.util.Utils; import jakarta.mail.MessagingException; import jakarta.mail.internet.InternetAddress; import jakarta.mail.internet.MimeMessage; @@ -65,7 +66,12 @@ public Postman(Environment env, @Autowired(required = false) JavaMailSender mail @PostConstruct public void postConstruct() { - if(mailSender == null) { + if (mailSender == null) { + if (Utils.isDevelopmentProfile(env.getActiveProfiles())) { + LOG.warn( + "Mail server not configured but running in development mode. Will not be able to send messages."); + return; + } throw new ValidationException("Mail server not configured."); } } @@ -86,7 +92,8 @@ public void sendMessage(Message message) { final MimeMessage mail = mailSender.createMimeMessage(); final MimeMessageHelper helper = new MimeMessageHelper(mail, true); - helper.setFrom(new InternetAddress(sender != null ? sender : senderUsername, FROM_NICKNAME, StandardCharsets.UTF_8.toString())); + helper.setFrom(new InternetAddress(sender != null ? sender : senderUsername, FROM_NICKNAME, + StandardCharsets.UTF_8.toString())); helper.setTo(message.getRecipients().toArray(new String[]{})); helper.setSubject(message.getSubject()); helper.setText(message.getContent(), true); diff --git a/src/main/java/cz/cvut/kbss/termit/service/repository/ChangeRecordService.java b/src/main/java/cz/cvut/kbss/termit/service/repository/ChangeRecordService.java index 6dc0c6ad8..5dc24350b 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/repository/ChangeRecordService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/repository/ChangeRecordService.java @@ -17,6 +17,8 @@ */ package cz.cvut.kbss.termit.service.repository; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; +import cz.cvut.kbss.termit.model.Asset; import cz.cvut.kbss.termit.model.User; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; import cz.cvut.kbss.termit.model.util.HasIdentifier; @@ -29,7 +31,7 @@ import java.util.Set; @Service -public class ChangeRecordService implements ChangeRecordProvider { +public class ChangeRecordService implements ChangeRecordProvider> { private final ChangeRecordDao changeRecordDao; @@ -39,8 +41,8 @@ public ChangeRecordService(ChangeRecordDao changeRecordDao) { } @Override - public List getChanges(HasIdentifier asset) { - return changeRecordDao.findAll(asset); + public List getChanges(Asset asset, ChangeRecordFilterDto filterDto) { + return changeRecordDao.findAll(asset, filterDto); } /** diff --git a/src/main/java/cz/cvut/kbss/termit/service/repository/TermOccurrenceRepositoryService.java b/src/main/java/cz/cvut/kbss/termit/service/repository/TermOccurrenceRepositoryService.java index 88940766e..104cac82a 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/repository/TermOccurrenceRepositoryService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/repository/TermOccurrenceRepositoryService.java @@ -30,6 +30,7 @@ import org.springframework.retry.annotation.Retryable; import org.springframework.scheduling.annotation.Async; import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -58,6 +59,7 @@ public TermOccurrenceRepositoryService(TermOccurrenceDao termOccurrenceDao, Term this.resourceService = resourceService; } + @PreAuthorize("@termOccurrenceAuthorizationService.canModify(#occurrence)") @Transactional @Override public void persist(TermOccurrence occurrence) { @@ -78,6 +80,7 @@ private void checkTermExists(TermOccurrence occurrence) { } } + @PreAuthorize("@termOccurrenceAuthorizationService.canModify(#occurrence)") @Transactional @Override public void persistOrUpdate(TermOccurrence occurrence) { @@ -95,6 +98,7 @@ public void persistOrUpdate(TermOccurrence occurrence) { } } + @PreAuthorize("@termOccurrenceAuthorizationService.canModify(#occurrenceId)") @Async // Retry in case the occurrence has not been persisted, yet (see AsynchronousTermOccurrenceSaver) @Retryable(retryFor = NotFoundException.class, maxAttempts = 3, backoff = @Backoff(delay = 30000L)) @@ -108,6 +112,7 @@ public void approve(URI occurrenceId) { toApprove.markApproved(); } + @PreAuthorize("@termOccurrenceAuthorizationService.canModify(#occurrenceId)") @Transactional @Override public void remove(URI occurrenceId) { diff --git a/src/main/java/cz/cvut/kbss/termit/service/repository/TermRepositoryService.java b/src/main/java/cz/cvut/kbss/termit/service/repository/TermRepositoryService.java index 15b11b1f8..488b3edb3 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/repository/TermRepositoryService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/repository/TermRepositoryService.java @@ -392,6 +392,17 @@ public List getDefinitionallyRelatedOf(Term instance) { return termOccurrenceDao.findAllDefinitionalOf(instance); } + /** + * Gets the identifier of a vocabulary to which a term with the specified id belongs. + * + * @param termId Term identifier + * @return Vocabulary identifier wrapped in {@code Optional} + */ + @Transactional(readOnly = true) + public Optional findTermVocabulary(URI termId) { + return termDao.findTermVocabulary(termId); + } + /** * Checks that a term can be removed. *

diff --git a/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java b/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java index 6be0b86d4..55efa3c65 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/repository/VocabularyRepositoryService.java @@ -21,6 +21,7 @@ import cz.cvut.kbss.termit.dto.PrefixDeclaration; import cz.cvut.kbss.termit.dto.RdfsStatement; import cz.cvut.kbss.termit.dto.Snapshot; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.VocabularyDto; import cz.cvut.kbss.termit.dto.mapper.DtoMapper; import cz.cvut.kbss.termit.exception.AssetRemovalException; @@ -42,7 +43,7 @@ import cz.cvut.kbss.termit.util.Configuration; import cz.cvut.kbss.termit.util.Constants; import cz.cvut.kbss.termit.util.Utils; -import cz.cvut.kbss.termit.util.throttle.CacheableFuture; +import cz.cvut.kbss.termit.util.throttle.ThrottledFuture; import cz.cvut.kbss.termit.workspace.EditableVocabularies; import jakarta.annotation.Nonnull; import jakarta.validation.Validator; @@ -228,8 +229,8 @@ public List getChangesOfContent(Vocabulary vocabulary) { * @return List of change records, ordered by date in descending order */ @Transactional(readOnly = true) - public List getDetailedHistoryOfContent(Vocabulary vocabulary, Pageable pageReq) { - return vocabularyDao.getDetailedHistoryOfContent(vocabulary, pageReq); + public List getDetailedHistoryOfContent(Vocabulary vocabulary, ChangeRecordFilterDto filter, Pageable pageReq) { + return vocabularyDao.getDetailedHistoryOfContent(vocabulary, filter, pageReq); } @CacheEvict(allEntries = true) @@ -244,7 +245,7 @@ public Vocabulary importVocabulary(boolean rename, MultipartFile file) { } catch (VocabularyImportException e) { throw e; } catch (Exception e) { - throw new VocabularyImportException("Unable to import vocabulary, because of: " + e.getMessage()); + throw new VocabularyImportException("Unable to import vocabulary. Cause: " + e.getMessage()); } } @@ -258,6 +259,7 @@ private static String resolveContentType(MultipartFile file) throws IOException @CacheEvict(allEntries = true) @Transactional public Vocabulary importVocabulary(URI vocabularyIri, MultipartFile file) { + Objects.requireNonNull(vocabularyIri); Objects.requireNonNull(file); try { String contentType = resolveContentType(file); @@ -267,7 +269,21 @@ public Vocabulary importVocabulary(URI vocabularyIri, MultipartFile file) { } catch (VocabularyImportException e) { throw e; } catch (Exception e) { - throw new VocabularyImportException("Unable to import vocabulary, because of: " + e.getMessage(), e); + throw new VocabularyImportException("Unable to import vocabulary. Cause: " + e.getMessage(), e); + } + } + + @Transactional + public Vocabulary importTermTranslations(URI vocabularyIri, MultipartFile file) { + Objects.requireNonNull(vocabularyIri); + Objects.requireNonNull(file); + try { + String contentType = resolveContentType(file); + return importers.importTermTranslations(vocabularyIri, new VocabularyImporter.ImportInput(contentType, file.getInputStream())); + } catch (VocabularyImportException e) { + throw e; + } catch (Exception e) { + throw new VocabularyImportException("Unable to import vocabulary. Cause: " + e.getMessage(), e); } } @@ -334,7 +350,7 @@ private void ensureNoTermRelationsExists(Vocabulary vocabulary) throws AssetRemo } } - public CacheableFuture> validateContents(URI vocabulary) { + public ThrottledFuture> validateContents(URI vocabulary) { return vocabularyDao.validateContents(vocabulary); } @@ -372,4 +388,15 @@ public Vocabulary findVersionValidAt(Vocabulary vocabulary, Instant at) { public PrefixDeclaration resolvePrefix(URI vocabularyUri) { return vocabularyDao.resolvePrefix(vocabularyUri); } + + /** + * Returns the list of all distinct languages (language tags) used by terms in the specified vocabulary. + * + * @param vocabularyUri Vocabulary identifier + * @return List of distinct languages + */ + @Transactional(readOnly = true) + public List getLanguages(URI vocabularyUri) { + return vocabularyDao.getLanguages(vocabularyUri); + } } diff --git a/src/main/java/cz/cvut/kbss/termit/service/security/authorization/ResourceAuthorizationService.java b/src/main/java/cz/cvut/kbss/termit/service/security/authorization/ResourceAuthorizationService.java index 631790307..f0152280a 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/security/authorization/ResourceAuthorizationService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/security/authorization/ResourceAuthorizationService.java @@ -49,11 +49,10 @@ public boolean canModify(Resource asset) { } private Optional resolveVocabulary(Resource resource) { - if (resource instanceof Document) { - final URI vocIri = ((Document) resource).getVocabulary(); + if (resource instanceof Document document) { + final URI vocIri = document.getVocabulary(); return vocIri != null ? Optional.of(new Vocabulary(vocIri)) : Optional.empty(); - } else if (resource instanceof File) { - final File f = (File) resource; + } else if (resource instanceof File f) { return f.getDocument() != null ? getDocumentVocabulary(f.getDocument()) : Optional.empty(); } return Optional.empty(); diff --git a/src/main/java/cz/cvut/kbss/termit/service/security/authorization/TermOccurrenceAuthorizationService.java b/src/main/java/cz/cvut/kbss/termit/service/security/authorization/TermOccurrenceAuthorizationService.java new file mode 100644 index 000000000..f3c063cd0 --- /dev/null +++ b/src/main/java/cz/cvut/kbss/termit/service/security/authorization/TermOccurrenceAuthorizationService.java @@ -0,0 +1,61 @@ +package cz.cvut.kbss.termit.service.security.authorization; + +import cz.cvut.kbss.termit.model.Vocabulary; +import cz.cvut.kbss.termit.model.assignment.TermDefinitionalOccurrence; +import cz.cvut.kbss.termit.model.assignment.TermFileOccurrence; +import cz.cvut.kbss.termit.model.assignment.TermOccurrence; +import cz.cvut.kbss.termit.model.resource.Resource; +import cz.cvut.kbss.termit.persistence.dao.TermOccurrenceDao; +import cz.cvut.kbss.termit.service.repository.ResourceRepositoryService; +import cz.cvut.kbss.termit.service.repository.TermRepositoryService; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import java.net.URI; +import java.util.Objects; +import java.util.Optional; + +@Service +public class TermOccurrenceAuthorizationService { + + private final TermOccurrenceDao dao; + + private final TermRepositoryService termService; + + private final ResourceRepositoryService resourceService; + + private final VocabularyAuthorizationService vocabularyAuthorizationService; + + private final ResourceAuthorizationService resourceAuthorizationService; + + public TermOccurrenceAuthorizationService(TermOccurrenceDao dao, TermRepositoryService termService, + ResourceRepositoryService resourceService, + VocabularyAuthorizationService vocabularyAuthorizationService, + ResourceAuthorizationService resourceAuthorizationService) { + this.dao = dao; + this.termService = termService; + this.resourceService = resourceService; + this.vocabularyAuthorizationService = vocabularyAuthorizationService; + this.resourceAuthorizationService = resourceAuthorizationService; + } + + @Transactional(readOnly = true) + public boolean canModify(TermOccurrence occurrence) { + Objects.requireNonNull(occurrence); + if (occurrence instanceof TermDefinitionalOccurrence definitionalOccurrence) { + final Optional vocabularyUri = termService.findTermVocabulary( + definitionalOccurrence.getTarget().getSource()); + return vocabularyUri.map(vUri -> vocabularyAuthorizationService.canModify(new Vocabulary(vUri))) + .orElse(false); + } else { + final TermFileOccurrence fo = (TermFileOccurrence) occurrence; + final Optional file = resourceService.find(fo.getTarget().getSource()); + return file.map(resourceAuthorizationService::canModify).orElse(false); + } + } + + @Transactional(readOnly = true) + public boolean canModify(URI occurrenceId) { + return dao.find(occurrenceId).map(this::canModify).orElse(false); + } +} diff --git a/src/main/java/cz/cvut/kbss/termit/service/security/authorization/VocabularyAuthorizationService.java b/src/main/java/cz/cvut/kbss/termit/service/security/authorization/VocabularyAuthorizationService.java index 37f99ff3d..777f7413c 100644 --- a/src/main/java/cz/cvut/kbss/termit/service/security/authorization/VocabularyAuthorizationService.java +++ b/src/main/java/cz/cvut/kbss/termit/service/security/authorization/VocabularyAuthorizationService.java @@ -165,6 +165,12 @@ public boolean canRead(VocabularyDto dto) { return canRead(new Vocabulary(dto.getUri())); } + public boolean canModify(URI vocabularyIri) { + Objects.requireNonNull(vocabularyIri); + final Vocabulary vocabulary = new Vocabulary(vocabularyIri); + return canModify(vocabulary); + } + @Override public boolean canModify(Vocabulary asset) { Objects.requireNonNull(asset); diff --git a/src/main/java/cz/cvut/kbss/termit/util/Configuration.java b/src/main/java/cz/cvut/kbss/termit/util/Configuration.java index 8a655df59..4785f9eb6 100644 --- a/src/main/java/cz/cvut/kbss/termit/util/Configuration.java +++ b/src/main/java/cz/cvut/kbss/termit/util/Configuration.java @@ -673,6 +673,11 @@ public static class TextAnalysis { */ private String url; + /** + * URL of the endpoint providing list of languages supported by the text analysis service. + */ + private String languagesUrl; + /** * Score threshold for a term occurrence for it to be saved into the repository. */ @@ -693,6 +698,14 @@ public void setUrl(String url) { this.url = url; } + public String getLanguagesUrl() { + return languagesUrl; + } + + public void setLanguagesUrl(String languagesUrl) { + this.languagesUrl = languagesUrl; + } + public String getTermOccurrenceMinScore() { return termOccurrenceMinScore; } diff --git a/src/main/java/cz/cvut/kbss/termit/util/Constants.java b/src/main/java/cz/cvut/kbss/termit/util/Constants.java index 5d7ead6a9..7cb925992 100644 --- a/src/main/java/cz/cvut/kbss/termit/util/Constants.java +++ b/src/main/java/cz/cvut/kbss/termit/util/Constants.java @@ -153,6 +153,23 @@ public class Constants { "Notation", "Example", "References") ); + + /** + * the maximum amount of data to buffer when sending messages to a WebSocket session + */ + public static final int WEBSOCKET_SEND_BUFFER_SIZE_LIMIT = Integer.MAX_VALUE; + + /** + * Set the maximum time allowed in milliseconds after the WebSocket connection is established + * and before the first sub-protocol message is received. + */ + public static final int WEBSOCKET_TIME_TO_FIRST_MESSAGE = 15 * 1000 /* 15s */; + + /** + * Development Spring profile. + */ + public static final String DEVELOPMENT_PROFILE = "development"; + private Constants() { throw new AssertionError(); } @@ -247,32 +264,4 @@ private QueryParams() { throw new AssertionError(); } } - - public static final class DebouncingGroups { - - /** - * Text analysis of all terms in specific vocabulary - */ - public static final String TEXT_ANALYSIS_VOCABULARY_TERMS_ALL_DEFINITIONS = "TEXT_ANALYSIS_VOCABULARY_TERMS_ALL_DEFINITIONS"; - - /** - * Text analysis of all vocabularies - */ - public static final String TEXT_ANALYSIS_VOCABULARY = "TEXT_ANALYSIS_VOCABULARY"; - - private DebouncingGroups() { - throw new AssertionError(); - } - } - - /** - * the maximum amount of data to buffer when sending messages to a WebSocket session - */ - public static final int WEBSOCKET_SEND_BUFFER_SIZE_LIMIT = Integer.MAX_VALUE; - - /** - * Set the maximum time allowed in milliseconds after the WebSocket connection is established - * and before the first sub-protocol message is received. - */ - public static final int WEBSOCKET_TIME_TO_FIRST_MESSAGE = 15 * 1000 /* 15s */; } diff --git a/src/main/java/cz/cvut/kbss/termit/util/Utils.java b/src/main/java/cz/cvut/kbss/termit/util/Utils.java index f8857028d..7adf76742 100644 --- a/src/main/java/cz/cvut/kbss/termit/util/Utils.java +++ b/src/main/java/cz/cvut/kbss/termit/util/Utils.java @@ -44,6 +44,7 @@ import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; @@ -194,13 +195,20 @@ public static String getVocabularyIri(final Set conceptUris, String term if (conceptUris.isEmpty()) { throw new IllegalArgumentException("No namespace candidate."); } - final Iterator i = conceptUris.iterator(); - final String conceptUri = i.next(); + final String namespace = extractNamespace(termSeparator, conceptUri); + for (final String s : conceptUris) { + if (!s.startsWith(namespace)) { + throw new IllegalArgumentException( + "Not all Concept IRIs have the same namespace: " + conceptUri + " vs. " + namespace); + } + } + return namespace; + } + private static String extractNamespace(String termSeparator, String conceptUri) { final String separator; - if (conceptUri.lastIndexOf(termSeparator) > 0) { separator = termSeparator; } else if (conceptUri.lastIndexOf("#") > 0) { @@ -210,16 +218,7 @@ public static String getVocabularyIri(final Set conceptUris, String term } else { throw new IllegalArgumentException("The IRI does not have a proper format: " + conceptUri); } - - final String namespace = conceptUri.substring(0, conceptUri.lastIndexOf(separator)); - - for (final String s : conceptUris) { - if (!s.startsWith(namespace)) { - throw new IllegalArgumentException( - "Not all Concept IRIs have the same namespace: " + conceptUri + " vs. " + namespace); - } - } - return namespace; + return conceptUri.substring(0, conceptUri.lastIndexOf(separator)); } /** @@ -402,15 +401,25 @@ public static void pruneBlankTranslations(MultilingualString str) { /** * Converts the map into a string - * @return Empty string when the map is {@code null}, otherwise the String in format - * {@code {key=value, key=value}} + * + * @return Empty string when the map is {@code null}, otherwise the String in format {@code {key=value, key=value}} */ public static String mapToString(Map map) { if (map == null) { return ""; } return map.keySet().stream() - .map(key -> key + "=" + map.get(key)) - .collect(Collectors.joining(", ", "{", "}")); + .map(key -> key + "=" + map.get(key)) + .collect(Collectors.joining(", ", "{", "}")); + } + + /** + * Checks whether the {@code development} profile is active. + * + * @param activeProfiles Array of active profiles + * @return {@code true} if the {@code development} profile is active, {@code false} otherwise + */ + public static boolean isDevelopmentProfile(String[] activeProfiles) { + return Arrays.binarySearch(activeProfiles, Constants.DEVELOPMENT_PROFILE) != -1; } } diff --git a/src/main/java/cz/cvut/kbss/termit/util/throttle/CacheableFuture.java b/src/main/java/cz/cvut/kbss/termit/util/throttle/CacheableFuture.java index f1dd254a5..b6afe3872 100644 --- a/src/main/java/cz/cvut/kbss/termit/util/throttle/CacheableFuture.java +++ b/src/main/java/cz/cvut/kbss/termit/util/throttle/CacheableFuture.java @@ -11,7 +11,7 @@ * A future which can provide a cached result before its completion. * @see Future */ -public interface CacheableFuture extends ChainableFuture { +public interface CacheableFuture extends Future { /** * @return the cached result when available diff --git a/src/main/java/cz/cvut/kbss/termit/util/throttle/ChainableFuture.java b/src/main/java/cz/cvut/kbss/termit/util/throttle/ChainableFuture.java index 0d8b63d6c..831f00d52 100644 --- a/src/main/java/cz/cvut/kbss/termit/util/throttle/ChainableFuture.java +++ b/src/main/java/cz/cvut/kbss/termit/util/throttle/ChainableFuture.java @@ -3,14 +3,17 @@ import java.util.concurrent.Future; import java.util.function.Consumer; -public interface ChainableFuture extends Future { +public interface ChainableFuture> extends Future { /** - * Executes this action once the future is completed normally. - * Action is not executed on exceptional completion. + * Executes this action once the future is completed. + * Action is executed no matter if the future is completed successfully, exceptionally or cancelled. *

- * If the future is already completed, action is executed synchronously. - * @param action action to be executed + * If the future is already completed, it is executed synchronously. + *

+ * Note that you must use the future passed as the parameter and not the original future object. + * @param action action receiving this future after completion + * @return this future */ - ChainableFuture then(Consumer action); + ChainableFuture then(Consumer action); } diff --git a/src/main/java/cz/cvut/kbss/termit/util/throttle/ThrottledFuture.java b/src/main/java/cz/cvut/kbss/termit/util/throttle/ThrottledFuture.java index e32f8ef40..045d06cdf 100644 --- a/src/main/java/cz/cvut/kbss/termit/util/throttle/ThrottledFuture.java +++ b/src/main/java/cz/cvut/kbss/termit/util/throttle/ThrottledFuture.java @@ -1,6 +1,5 @@ package cz.cvut.kbss.termit.util.throttle; -import cz.cvut.kbss.termit.exception.TermItException; import cz.cvut.kbss.termit.util.Utils; import cz.cvut.kbss.termit.util.longrunning.LongRunningTask; import jakarta.annotation.Nonnull; @@ -20,7 +19,7 @@ import java.util.function.Consumer; import java.util.function.Supplier; -public class ThrottledFuture implements CacheableFuture, LongRunningTask { +public class ThrottledFuture implements CacheableFuture, ChainableFuture>, LongRunningTask { private final ReentrantLock lock = new ReentrantLock(); private final ReentrantLock callbackLock = new ReentrantLock(); @@ -33,7 +32,7 @@ public class ThrottledFuture implements CacheableFuture, LongRunningTask { private @Nullable Supplier task; - private final List> onCompletion = new ArrayList<>(); + private final List>> onCompletion = new ArrayList<>(); private final AtomicReference startedAt = new AtomicReference<>(null); @@ -90,7 +89,17 @@ public ThrottledFuture setCachedResult(@Nullable final T cachedResult) { @Override public boolean cancel(boolean mayInterruptIfRunning) { - return future.cancel(mayInterruptIfRunning); + final boolean wasCanceled = isCancelled(); + if(!future.cancel(mayInterruptIfRunning)) { + return false; + } + + if (!wasCanceled && task != null) { + callbackLock.lock(); + onCompletion.forEach(c -> c.accept(this)); + callbackLock.unlock(); + } + return true; } @Override @@ -124,7 +133,7 @@ public T get(long timeout, @Nonnull TimeUnit unit) * @return If the current task is already running, was canceled or already completed, returns a new future for the given task. * Otherwise, replaces the current task and returns self. */ - protected ThrottledFuture update(Supplier task, @Nonnull List> onCompletion) { + protected ThrottledFuture update(Supplier task, @Nonnull List>> onCompletion) { boolean locked = false; try { locked = lock.tryLock(); @@ -201,14 +210,16 @@ protected void run(@Nullable Consumer> startedCallback) { T result = null; if (task != null) { result = task.get(); - final T finalResult = result; - callbackLock.lock(); - onCompletion.forEach(c -> c.accept(finalResult)); - callbackLock.unlock(); } future.complete(result); } catch (Exception e) { future.completeExceptionally(e); + } finally { + if (task != null) { + callbackLock.lock(); + onCompletion.forEach(c -> c.accept(this)); + callbackLock.unlock(); + } } } finally { if (locked) { @@ -242,18 +253,11 @@ public boolean isRunning() { } @Override - public ThrottledFuture then(Consumer action) { + public ThrottledFuture then(Consumer> action) { try { callbackLock.lock(); - if (future.isDone() && !future.isCancelled()) { - try { - action.accept(future.get()); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new TermItException(e); - } catch (ExecutionException e) { - throw new TermItException(e); - } + if (future.isDone()) { + action.accept(this); } else { onCompletion.add(action); } @@ -262,4 +266,12 @@ public ThrottledFuture then(Consumer action) { } return this; } + + /** + * @return {@code true} if this future completed + * exceptionally or was cancelled. + */ + public boolean isCompletedExceptionally() { + return future.isCompletedExceptionally(); + } } diff --git a/src/main/java/cz/cvut/kbss/termit/websocket/VocabularySocketController.java b/src/main/java/cz/cvut/kbss/termit/websocket/VocabularySocketController.java index 00c2e8b83..f244358ed 100644 --- a/src/main/java/cz/cvut/kbss/termit/websocket/VocabularySocketController.java +++ b/src/main/java/cz/cvut/kbss/termit/websocket/VocabularySocketController.java @@ -11,7 +11,7 @@ import cz.cvut.kbss.termit.service.business.VocabularyService; import cz.cvut.kbss.termit.util.Configuration; import cz.cvut.kbss.termit.util.Constants; -import cz.cvut.kbss.termit.util.throttle.CacheableFuture; +import cz.cvut.kbss.termit.util.throttle.ThrottledFuture; import jakarta.annotation.Nonnull; import org.springframework.context.event.EventListener; import org.springframework.messaging.MessageHeaders; @@ -53,7 +53,7 @@ public void validateVocabulary(@DestinationVariable String localName, final URI identifier = resolveIdentifier(namespace.orElse(config.getNamespace().getVocabulary()), localName); final Vocabulary vocabulary = vocabularyService.getReference(identifier); - final CacheableFuture> future = vocabularyService.validateContents(vocabulary.getUri()); + final ThrottledFuture> future = vocabularyService.validateContents(vocabulary.getUri()); future.getNow().ifPresentOrElse(validationResults -> // if there is a result present (returned from cache), send it @@ -66,14 +66,15 @@ public void validateVocabulary(@DestinationVariable String localName, messageHeaders ), () -> // otherwise reply will be sent once the future is resolved - future.then(results -> + future.then(completedFuture -> + completedFuture.getNow().ifPresent(results -> sendToSession( WebSocketDestinations.VOCABULARIES_VALIDATION, results, getHeaders(identifier, Map.of("cached", false)), messageHeaders - )) + ))) ); } diff --git a/src/main/java/cz/cvut/kbss/termit/websocket/handler/WebSocketExceptionHandler.java b/src/main/java/cz/cvut/kbss/termit/websocket/handler/WebSocketExceptionHandler.java index c5869701b..c6042bb9a 100644 --- a/src/main/java/cz/cvut/kbss/termit/websocket/handler/WebSocketExceptionHandler.java +++ b/src/main/java/cz/cvut/kbss/termit/websocket/handler/WebSocketExceptionHandler.java @@ -19,6 +19,7 @@ import cz.cvut.kbss.termit.exception.TermItException; import cz.cvut.kbss.termit.exception.UnsupportedOperationException; import cz.cvut.kbss.termit.exception.UnsupportedSearchFacetException; +import cz.cvut.kbss.termit.exception.UnsupportedTextAnalysisLanguageException; import cz.cvut.kbss.termit.exception.ValidationException; import cz.cvut.kbss.termit.exception.WebServiceIntegrationException; import cz.cvut.kbss.termit.exception.importing.UnsupportedImportMediaTypeException; @@ -87,7 +88,8 @@ private static ErrorInfo errorInfo(Message message, Throwable e) { } private static ErrorInfo errorInfo(Message message, TermItException e) { - return ErrorInfo.createParametrizedWithMessage(e.getMessage(), e.getMessageId(), destination(message), e.getParameters()); + return ErrorInfo.createParametrizedWithMessage(e.getMessage(), e.getMessageId(), destination(message), + e.getParameters()); } @MessageExceptionHandler @@ -95,7 +97,7 @@ public void messageDeliveryException(Message message, MessageDeliveryExceptio // messages without destination will be logged only on trace (hasDestination(message) ? LOG.atError() : LOG.atTrace()) .setMessage("Failed to send message with destination {}: {}") - .addArgument(()-> destination(message)) + .addArgument(() -> destination(message)) .addArgument(e.getMessage()) .setCause(e.getCause()) .log(); @@ -226,7 +228,8 @@ public ErrorInfo invalidParameter(Message message, InvalidParameterException @MessageExceptionHandler public ErrorInfo maxUploadSizeExceededException(Message message, MaxUploadSizeExceededException e) { logException(e, message); - return ErrorInfo.createWithMessageAndMessageId(e.getMessage(), "error.file.maxUploadSizeExceeded", destination(message)); + return ErrorInfo.createWithMessageAndMessageId(e.getMessage(), "error.file.maxUploadSizeExceeded", + destination(message)); } @MessageExceptionHandler @@ -271,4 +274,11 @@ public ErrorInfo uriSyntaxException(Message message, URISyntaxException e) { logException(e, message); return errorInfo(message, e); } + + @MessageExceptionHandler + public ErrorInfo unsupportedTextAnalysisLanguageException(Message message, + UnsupportedTextAnalysisLanguageException e) { + logException(e, message); + return errorInfo(message, e); + } } diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml index 8d9cae801..655043d51 100644 --- a/src/main/resources/application.yml +++ b/src/main/resources/application.yml @@ -66,8 +66,6 @@ termit: separator: /verze file: storage: /tmp/termit - textAnalysis: - url: http://localhost:8081/annotace/annotate changetracking: context: extension: /zmeny diff --git a/src/main/resources/template/termit-translations-import.xlsx b/src/main/resources/template/termit-translations-import.xlsx new file mode 100644 index 000000000..5688ee389 Binary files /dev/null and b/src/main/resources/template/termit-translations-import.xlsx differ diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/TextAnalysisRecordDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/TextAnalysisRecordDaoTest.java index 7eb5a23e8..cc0c320d3 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/TextAnalysisRecordDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/TextAnalysisRecordDaoTest.java @@ -63,9 +63,10 @@ void setUp() { @Test void findLatestGetsLatestTextAnalysisRecordForResource() { final URI vocabulary = Generator.generateUri(); - final TextAnalysisRecord old = new TextAnalysisRecord(Instant.ofEpochMilli(System.currentTimeMillis() - 10000), resource); + final TextAnalysisRecord old = new TextAnalysisRecord(Instant.ofEpochMilli(System.currentTimeMillis() - 10000), + resource, Environment.LANGUAGE); old.setVocabularies(Collections.singleton(vocabulary)); - final TextAnalysisRecord latest = new TextAnalysisRecord(Utils.timestamp(), resource); + final TextAnalysisRecord latest = new TextAnalysisRecord(Utils.timestamp(), resource, Environment.LANGUAGE); latest.setVocabularies(Collections.singleton(vocabulary)); transactional(() -> { sut.persist(old); diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/context/DescriptorFactoryTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/context/DescriptorFactoryTest.java index c22fc8a49..621c8d823 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/context/DescriptorFactoryTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/context/DescriptorFactoryTest.java @@ -98,7 +98,7 @@ void termDescriptorCreatesDescriptorWithExactMatchesContextSetToDefaultToAllowEx } @Test - void fileDescriptorContainsAlsoDescriptorForDocument() { + void fileDescriptorContainsAlsoDescriptorForDocument() throws Exception { final File file = Generator.generateFileWithId("test.html"); final Document doc = Generator.generateDocumentWithId(); doc.addFile(file); @@ -106,7 +106,7 @@ void fileDescriptorContainsAlsoDescriptorForDocument() { doc.setVocabulary(Generator.generateUri()); final Descriptor result = sut.fileDescriptor(doc.getVocabulary()); final FieldSpecification docFieldSpec = mock(FieldSpecification.class); - when(docFieldSpec.getJavaField()).thenReturn(File.getDocumentField()); + when(docFieldSpec.getJavaField()).thenReturn(File.class.getDeclaredField("document")); final Descriptor docDescriptor = result.getAttributeDescriptor(docFieldSpec); assertNotNull(docDescriptor); } diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/TermDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/TermDaoTest.java index 036c8bcf4..75ac29892 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/TermDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/TermDaoTest.java @@ -82,6 +82,7 @@ import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertInstanceOf; @@ -1394,4 +1395,12 @@ void findByIdLoadsTermFromVocabularyContextOnly() { assertTrue(result.isPresent()); assertFalse(result.get().getProperties().containsKey(property)); } + + @Test + void findByIdReturnsOptionalEmptyWhenTermDoesNotExists() { + final Term term = Generator.generateTermWithId(vocabulary.getUri()); + // trying to find a non-existing term + final Optional empty = assertDoesNotThrow(()-> sut.find(term.getUri())); + assertTrue(empty.isEmpty()); + } } diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java index 23b72777c..e2277c8b9 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/VocabularyDaoTest.java @@ -25,11 +25,13 @@ import cz.cvut.kbss.termit.dto.PrefixDeclaration; import cz.cvut.kbss.termit.dto.RdfsStatement; import cz.cvut.kbss.termit.dto.Snapshot; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.environment.Environment; import cz.cvut.kbss.termit.environment.Generator; import cz.cvut.kbss.termit.event.AssetPersistEvent; import cz.cvut.kbss.termit.event.AssetUpdateEvent; import cz.cvut.kbss.termit.event.RefreshLastModifiedEvent; +import cz.cvut.kbss.termit.event.VocabularyEvent; import cz.cvut.kbss.termit.event.VocabularyWillBeRemovedEvent; import cz.cvut.kbss.termit.model.Glossary; import cz.cvut.kbss.termit.model.Model; @@ -43,6 +45,7 @@ import cz.cvut.kbss.termit.model.resource.File; import cz.cvut.kbss.termit.model.util.EntityToOwlClassMapper; import cz.cvut.kbss.termit.persistence.context.DescriptorFactory; +import cz.cvut.kbss.termit.persistence.dao.changetracking.ChangeRecordDao; import cz.cvut.kbss.termit.util.Constants; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.ValueFactory; @@ -57,8 +60,10 @@ import org.mockito.ArgumentCaptor; import org.mockito.Spy; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.mock.mockito.SpyBean; import org.springframework.context.ApplicationEvent; import org.springframework.context.ApplicationEventPublisher; +import org.springframework.data.domain.Pageable; import org.springframework.test.annotation.DirtiesContext; import java.net.URI; @@ -83,6 +88,7 @@ import static cz.cvut.kbss.termit.environment.util.ContainsSameEntities.containsSameEntities; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -90,6 +96,7 @@ import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.atLeastOnce; +import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.verify; @DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_EACH_TEST_METHOD) @@ -108,6 +115,9 @@ class VocabularyDaoTest extends BaseDaoTestRunner { @Autowired private VocabularyDao sut; + @SpyBean + private ChangeRecordDao changeRecordDao; + private User author; @BeforeEach @@ -761,10 +771,14 @@ void removePublishesEventAndDropsGraph() { transactional(() -> sut.remove(vocabulary)); - ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(VocabularyWillBeRemovedEvent.class); - verify(eventPublisher).publishEvent(eventCaptor.capture()); + ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(VocabularyWillBeRemovedEvent.class); + verify(eventPublisher, atLeastOnce()).publishEvent(eventCaptor.capture()); - VocabularyWillBeRemovedEvent event = eventCaptor.getValue(); + VocabularyWillBeRemovedEvent event = (VocabularyWillBeRemovedEvent) eventCaptor + .getAllValues().stream() + .filter(e -> e instanceof VocabularyWillBeRemovedEvent) + .findAny().orElseThrow(); + assertNotNull(event); assertEquals(event.getVocabularyIri(), vocabulary.getUri()); @@ -927,4 +941,39 @@ void getAnyExternalRelationsReturnsTermsWithBothRelations(URI termRelation) { } }); } + + @Test + void getDetailedHistoryOfContentCallsChangeRecordDaoWithFilter() { + final Vocabulary vocabulary = Generator.generateVocabularyWithId(); + final List records = List.of(); + final URI skosConcept = URI.create(SKOS.CONCEPT); + final Pageable unpaged = Pageable.unpaged(); + final ChangeRecordFilterDto filterDto = new ChangeRecordFilterDto(); + filterDto.setAuthorName("Name of the author"); + + doReturn(records).when(changeRecordDao).findAllRelatedToType(vocabulary, filterDto, skosConcept, unpaged); + + sut.getDetailedHistoryOfContent(vocabulary, filterDto, unpaged); + + verify(changeRecordDao).findAllRelatedToType(vocabulary, filterDto, skosConcept, unpaged); + } + + @Test + void getLanguagesReturnsDistinctLanguagesUsedByVocabularyTerms() { + final Vocabulary vocabulary = Generator.generateVocabularyWithId(); + final Term term = Generator.generateTermWithId(vocabulary.getUri()); + final Term term2 = Generator.generateTermWithId(vocabulary.getUri()); + term2.getLabel().set("cs", "Název v češtině"); + transactional(() -> { + em.persist(vocabulary, descriptorFor(vocabulary)); + em.persist(term, descriptorFactory.termDescriptor(term)); + em.persist(term2, descriptorFactory.termDescriptor(term2)); + Generator.addTermInVocabularyRelationship(term, vocabulary.getUri(), em); + Generator.addTermInVocabularyRelationship(term2, vocabulary.getUri(), em); + }); + + final List languages = sut.getLanguages(vocabulary.getUri()); + assertEquals(2, languages.size()); + assertThat(languages, hasItems(Environment.LANGUAGE, "cs")); + } } diff --git a/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java b/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java index 5f43ef096..2aaffee88 100644 --- a/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java +++ b/src/test/java/cz/cvut/kbss/termit/persistence/dao/changetracking/ChangeRecordDaoTest.java @@ -19,34 +19,46 @@ import cz.cvut.kbss.jopa.model.EntityManager; import cz.cvut.kbss.jopa.model.MultilingualString; +import cz.cvut.kbss.jopa.model.annotations.OWLClass; import cz.cvut.kbss.jopa.model.descriptors.Descriptor; import cz.cvut.kbss.jopa.model.descriptors.EntityDescriptor; +import cz.cvut.kbss.jopa.vocabulary.RDFS; import cz.cvut.kbss.jopa.vocabulary.SKOS; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; +import cz.cvut.kbss.termit.environment.Environment; import cz.cvut.kbss.termit.environment.Generator; import cz.cvut.kbss.termit.model.Term; import cz.cvut.kbss.termit.model.User; import cz.cvut.kbss.termit.model.Vocabulary; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; +import cz.cvut.kbss.termit.model.changetracking.DeleteChangeRecord; import cz.cvut.kbss.termit.model.changetracking.PersistChangeRecord; import cz.cvut.kbss.termit.model.changetracking.UpdateChangeRecord; import cz.cvut.kbss.termit.persistence.dao.BaseDaoTestRunner; import cz.cvut.kbss.termit.util.Utils; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Pageable; import org.springframework.test.annotation.DirtiesContext; import java.net.URI; import java.time.Instant; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Optional; +import java.util.Random; import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.stream.IntStream; +import java.util.stream.Stream; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.instanceOf; @@ -59,6 +71,7 @@ @DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_EACH_TEST_METHOD) class ChangeRecordDaoTest extends BaseDaoTestRunner { + private static final URI SKOS_CONCEPT = URI.create(SKOS.CONCEPT); @Autowired private ChangeTrackingContextResolver contextResolver; @@ -113,11 +126,16 @@ private UpdateChangeRecord generateUpdateRecord(Instant timestamp, URI changedOb @Test void findAllRetrievesChangeRecordsRelatedToSpecifiedAsset() { enableRdfsInference(em); - final Term asset = Generator.generateTermWithId(); + final Term asset = Generator.generateTermWithId(vocabulary.getUri()); + transactional(() -> { + em.persist(vocabulary); + em.persist(asset, persistDescriptor(vocabulary.getUri())); + }); final List records = IntStream.range(0, 5).mapToObj( - i -> generateUpdateRecord(Instant.ofEpochMilli(System.currentTimeMillis() - i * 10000L), + i -> generateUpdateRecord(Utils.timestamp().minusSeconds(i * 10L), asset.getUri())).collect(Collectors.toList()); - transactional(() -> records.forEach(r -> em.persist(r, persistDescriptor(vocabulary.getUri())))); + final URI changeContext = contextResolver.resolveChangeTrackingContext(vocabulary); + transactional(() -> records.forEach(r -> em.persist(r, persistDescriptor(changeContext)))); final List result = sut.findAll(asset); assertEquals(records.size(), result.size()); @@ -134,11 +152,16 @@ private Descriptor persistDescriptor(URI context) { @Test void findAllReturnsChangeRecordsOrderedByTimestampDescending() { enableRdfsInference(em); - final Term asset = Generator.generateTermWithId(); + final Term asset = Generator.generateTermWithId(vocabulary.getUri()); final List records = IntStream.range(0, 5).mapToObj( - i -> generateUpdateRecord(Instant.ofEpochMilli(System.currentTimeMillis() + i * 10000L), + i -> generateUpdateRecord(Utils.timestamp().plusSeconds(i * 10L), asset.getUri())).collect(Collectors.toList()); - transactional(() -> records.forEach(r -> em.persist(r, persistDescriptor(vocabulary.getUri())))); + final URI changeContext = contextResolver.resolveChangeTrackingContext(vocabulary); + transactional(() -> { + em.persist(vocabulary); + em.persist(asset, persistDescriptor(vocabulary.getUri())); + records.forEach(r -> em.persist(r, persistDescriptor(changeContext))); + }); final List result = sut.findAll(asset); records.sort(Comparator.comparing(AbstractChangeRecord::getTimestamp).reversed()); @@ -148,15 +171,18 @@ void findAllReturnsChangeRecordsOrderedByTimestampDescending() { @Test void findAllReturnsChangeRecordsOrderedByTimestampDescendingAndChangedAttributeId() { enableRdfsInference(em); - final Term asset = Generator.generateTermWithId(); + final Term asset = Generator.generateTermWithId(vocabulary.getUri()); final Instant now = Utils.timestamp(); final UpdateChangeRecord rOne = generateUpdateRecord(now, asset.getUri()); rOne.setChangedAttribute(URI.create(SKOS.PREF_LABEL)); final UpdateChangeRecord rTwo = generateUpdateRecord(now, asset.getUri()); rTwo.setChangedAttribute(URI.create(SKOS.DEFINITION)); + final Descriptor changeContextDescriptor = persistDescriptor(contextResolver.resolveChangeTrackingContext(vocabulary)); transactional(() -> { - em.persist(rOne, persistDescriptor(vocabulary.getUri())); - em.persist(rTwo, persistDescriptor(vocabulary.getUri())); + em.persist(vocabulary); + em.persist(asset, persistDescriptor(vocabulary.getUri())); + em.persist(rOne, changeContextDescriptor); + em.persist(rTwo, changeContextDescriptor); }); final List result = sut.findAll(asset); @@ -260,4 +286,266 @@ void getAuthorsRetrievesUsersAssociatedWithPersistChangeRecordsOfSpecifiedAsset( final Set result = sut.getAuthors(asset); assertEquals(Collections.singleton(author), result); } + + @Test + void findAllRelatedToTypeReturnsChangeRecordsWithoutVocabularyChanges() { + enableRdfsInference(em); + + final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); + final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); + + final List firstChanges = Generator.generateChangeRecords(firstTerm, author); + final List secondChanges = Generator.generateChangeRecords(secondTerm, author); + + final List vocabularyChanges = Generator.generateChangeRecords(vocabulary, author); + + final Descriptor changeContextDescriptor = persistDescriptor(contextResolver.resolveChangeTrackingContext(vocabulary)); + final Descriptor vocabularyDescriptor = persistDescriptor(vocabulary.getUri()); + + transactional(() -> { + em.persist(vocabulary, vocabularyDescriptor); + em.persist(firstTerm, vocabularyDescriptor); + em.persist(secondTerm, vocabularyDescriptor); + + Stream.of(firstChanges, secondChanges, vocabularyChanges) + .flatMap(Collection::stream) + .forEach(r -> em.persist(r, changeContextDescriptor)); + }); + + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); + + final int recordsCount = firstChanges.size() + secondChanges.size(); + final Pageable pageable = Pageable.unpaged(); + + final List contentChanges = sut.findAllRelatedToType(vocabulary, filter, SKOS_CONCEPT, pageable); + + assertEquals(recordsCount, contentChanges.size()); + final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); + final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); + final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); + // check that all changes are related to the first or the second term + assertTrue(contentChanges.stream() + .allMatch(ch -> firstTerm.getUri().equals(ch.getChangedEntity()) || + secondTerm.getUri().equals(ch.getChangedEntity()))); + assertEquals(2, persistCount); + assertEquals(recordsCount - 2, updatesCount); // -2 persist records + assertEquals(0, deleteCount); + } + + @Test + void findAllRelatedToTypeReturnsRecordsOfExistingTermFilteredByTermName() { + enableRdfsInference(em); + + final String needle = "needle"; + final String haystack = "A label that contains needle somewhere"; + final String mud = "The n3edle is not here"; + + // needle is inside the label of first and the second term + final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); + firstTerm.getLabel().set(Environment.LANGUAGE, haystack); + final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); + secondTerm.getLabel().set(mud + needle); + final Term thirdTerm = Generator.generateTermWithId(vocabulary.getUri()); + thirdTerm.getLabel().set(Environment.LANGUAGE, mud); + + final List firstChanges = Generator.generateChangeRecords(firstTerm, author); + final List secondChanges = Generator.generateChangeRecords(secondTerm, author); + final List thirdChanges = Generator.generateChangeRecords(thirdTerm, author); + + final Descriptor changeContextDescriptor = persistDescriptor(contextResolver.resolveChangeTrackingContext(vocabulary)); + final Descriptor vocabularyDescriptor = persistDescriptor(vocabulary.getUri()); + + transactional(() -> { + em.persist(vocabulary, vocabularyDescriptor); + + em.persist(firstTerm, vocabularyDescriptor); + em.persist(secondTerm, vocabularyDescriptor); + em.persist(thirdTerm, vocabularyDescriptor); + + Stream.of(firstChanges, secondChanges, thirdChanges) + .flatMap(Collection::stream) + .forEach(r -> em.persist(r, changeContextDescriptor)); + }); + + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); + filter.setAssetLabel(needle); + + // needle is inside the label of first and the second term + final int recordsCount = firstChanges.size() + secondChanges.size(); + final Pageable pageable = Pageable.ofSize(recordsCount * 2); + + final List contentChanges = sut.findAllRelatedToType(vocabulary, filter, SKOS_CONCEPT, pageable); + + assertEquals(recordsCount, contentChanges.size()); + final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); + final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); + final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); + assertEquals(2, persistCount); + assertEquals(recordsCount - 2, updatesCount); // -2 persist records + assertEquals(0, deleteCount); + } + + + @Test + void findAllRelatedToTypeReturnsRecordsOfExistingTermFilteredByChangedAttributeName() { + enableRdfsInference(em); + + final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); + final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); + + final List firstChanges = Generator.generateChangeRecords(firstTerm, author); + final List secondChanges = Generator.generateChangeRecords(secondTerm, author); + + final Random random = new Random(); + final AtomicInteger recordCount = new AtomicInteger(0); + final URI changedAttribute = URI.create(SKOS.DEFINITION); + final URI anotherChangedAttribute = URI.create(RDFS.LABEL); + final String changedAttributeName = "definition"; + + final Descriptor changeContextDescriptor = persistDescriptor(contextResolver.resolveChangeTrackingContext(vocabulary)); + final Descriptor vocabularyDescriptor = persistDescriptor(vocabulary.getUri()); + + // randomize changed attributes + Stream.of(firstChanges, secondChanges).flatMap(Collection::stream) + .filter(r -> r instanceof UpdateChangeRecord) + .map(r -> (UpdateChangeRecord) r) + .forEach(r -> { + // ensuring at least one has the "changedAttribute" + if(random.nextBoolean() || recordCount.get() == 0) { + r.setChangedAttribute(changedAttribute); + recordCount.incrementAndGet(); + } else { + r.setChangedAttribute(anotherChangedAttribute); + } + }); + + transactional(() -> { + em.persist(vocabulary); + + em.persist(firstTerm, vocabularyDescriptor); + em.persist(secondTerm, vocabularyDescriptor); + + Stream.of(firstChanges, secondChanges) + .flatMap(Collection::stream) + .forEach(r -> em.persist(r, changeContextDescriptor)); + }); + + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); + filter.setChangedAttributeName(changedAttributeName); + + final Pageable pageable = Pageable.unpaged(); + + final List contentChanges = sut.findAllRelatedToType(vocabulary, filter, SKOS_CONCEPT, pageable); + + assertEquals(recordCount.get(), contentChanges.size()); + final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); + final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); + final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); + assertEquals(0, persistCount); + assertEquals(recordCount.get(), updatesCount); + assertEquals(0, deleteCount); + } + + @Test + void findAllRelatedToTypeReturnsRecordsOfExistingTermFilteredByAuthorName() { + enableRdfsInference(em); + + final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); + final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); + + final List firstChanges = Generator.generateChangeRecords(firstTerm, author); + final List secondChanges = Generator.generateChangeRecords(secondTerm, author); + + // make new author + final User anotherAuthor = Generator.generateUserWithId(); + anotherAuthor.setFirstName("Karel"); + anotherAuthor.setLastName("Novák"); + transactional(() -> em.persist(anotherAuthor)); + Environment.setCurrentUser(anotherAuthor); + + final int recordCount = 2; + // author is this.author (Environment current user) + firstChanges.add(Generator.generateUpdateChange(firstTerm)); + secondChanges.add(Generator.generateUpdateChange(secondTerm)); + + final Descriptor changeContextDescriptor = persistDescriptor(contextResolver.resolveChangeTrackingContext(vocabulary)); + final Descriptor vocabularyDescriptor = persistDescriptor(vocabulary.getUri()); + + transactional(() -> { + em.persist(vocabulary); + + em.persist(firstTerm, vocabularyDescriptor); + em.persist(secondTerm, vocabularyDescriptor); + + Stream.of(firstChanges, secondChanges) + .flatMap(Collection::stream) + .forEach(r -> em.persist(r, changeContextDescriptor)); + }); + + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); + // full name without first two and last two characters + filter.setAuthorName(anotherAuthor.getFullName().substring(2, anotherAuthor.getFullName().length() - 2)); + + final Pageable pageable = Pageable.unpaged(); + + final List contentChanges = sut.findAllRelatedToType(vocabulary, filter, SKOS_CONCEPT, pageable); + + assertEquals(recordCount, contentChanges.size()); + final long persistCount = contentChanges.stream().filter(ch -> ch instanceof PersistChangeRecord).count(); + final long updatesCount = contentChanges.stream().filter(ch -> ch instanceof UpdateChangeRecord).count(); + final long deleteCount = contentChanges.stream().filter(ch -> ch instanceof DeleteChangeRecord).count(); + assertEquals(0, persistCount); + assertEquals(recordCount, updatesCount); + assertEquals(0, deleteCount); + } + + @ParameterizedTest + @ValueSource(classes = { + UpdateChangeRecord.class, + PersistChangeRecord.class, + DeleteChangeRecord.class + }) + void findAllRelatedToTypeReturnsRecordsOfExistingTermFilteredByChangeType(Class typeClass) { + enableRdfsInference(em); + final URI typeUri = URI.create(typeClass.getAnnotation(OWLClass.class).iri()); + + final Term firstTerm = Generator.generateTermWithId(vocabulary.getUri()); + final Term secondTerm = Generator.generateTermWithId(vocabulary.getUri()); + + final List firstChanges = Generator.generateChangeRecords(firstTerm, author); + final List secondChanges = Generator.generateChangeRecords(secondTerm, author); + final DeleteChangeRecord deleteChangeRecord = new DeleteChangeRecord(); + deleteChangeRecord.setChangedEntity(secondTerm.getUri()); + deleteChangeRecord.setTimestamp(Utils.timestamp()); + deleteChangeRecord.setAuthor(author); + deleteChangeRecord.setLabel(secondTerm.getLabel()); + + final int recordCount = (int) Stream.of(firstChanges, secondChanges, List.of(deleteChangeRecord)).flatMap(List::stream).filter(typeClass::isInstance).count(); + + final Descriptor changeContextDescriptor = persistDescriptor(contextResolver.resolveChangeTrackingContext(vocabulary)); + final Descriptor vocabularyDescriptor = persistDescriptor(vocabulary.getUri()); + + transactional(() -> { + em.persist(vocabulary); + + em.persist(firstTerm, vocabularyDescriptor); + em.persist(secondTerm, vocabularyDescriptor); + + Stream.of(firstChanges, secondChanges, List.of(deleteChangeRecord)) + .flatMap(Collection::stream) + .forEach(r -> em.persist(r, changeContextDescriptor)); + }); + + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); + // full name without first two and last two characters + filter.setChangeType(typeUri); + + final Pageable pageable = Pageable.unpaged(); + + final List contentChanges = sut.findAllRelatedToType(vocabulary, filter, SKOS_CONCEPT, pageable); + + assertEquals(recordCount, contentChanges.size()); + assertTrue(contentChanges.stream().allMatch(typeClass::isInstance)); + } + } diff --git a/src/test/java/cz/cvut/kbss/termit/rest/ResourceControllerTest.java b/src/test/java/cz/cvut/kbss/termit/rest/ResourceControllerTest.java index bd50b7258..0062417ab 100644 --- a/src/test/java/cz/cvut/kbss/termit/rest/ResourceControllerTest.java +++ b/src/test/java/cz/cvut/kbss/termit/rest/ResourceControllerTest.java @@ -19,6 +19,7 @@ import com.fasterxml.jackson.core.type.TypeReference; import cz.cvut.kbss.jsonld.JsonLd; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.environment.Environment; import cz.cvut.kbss.termit.environment.Generator; import cz.cvut.kbss.termit.exception.NotFoundException; @@ -326,7 +327,7 @@ void getLatestTextAnalysisRecordRetrievesAnalysisRecordFromService() throws Exce final File file = generateFile(); when(identifierResolverMock.resolveIdentifier(RESOURCE_NAMESPACE, FILE_NAME)).thenReturn(file.getUri()); when(resourceServiceMock.findRequired(file.getUri())).thenReturn(file); - final TextAnalysisRecord record = new TextAnalysisRecord(Utils.timestamp(), file); + final TextAnalysisRecord record = new TextAnalysisRecord(Utils.timestamp(), file, Environment.LANGUAGE); record.setVocabularies(Collections.singleton(Generator.generateUri())); when(resourceServiceMock.findLatestTextAnalysisRecord(file)).thenReturn(record); final MvcResult mvcResult = mockMvc.perform(get(PATH + "/" + FILE_NAME + "/text-analysis/records/latest") @@ -396,7 +397,8 @@ void getHistoryReturnsListOfChangeRecordsForSpecifiedVocabulary() throws Excepti when(identifierResolverMock.resolveIdentifier(RESOURCE_NAMESPACE, RESOURCE_NAME)).thenReturn(resource.getUri()); when(resourceServiceMock.getReference(RESOURCE_URI)).thenReturn(resource); final List records = Collections.singletonList(Generator.generatePersistChange(resource)); - when(resourceServiceMock.getChanges(resource)).thenReturn(records); + final ChangeRecordFilterDto emptyFilter = new ChangeRecordFilterDto(); + when(resourceServiceMock.getChanges(resource, emptyFilter)).thenReturn(records); final MvcResult mvcResult = mockMvc .perform(get(PATH + "/" + RESOURCE_NAME + "/history").param(QueryParams.NAMESPACE, RESOURCE_NAMESPACE)) @@ -406,7 +408,7 @@ void getHistoryReturnsListOfChangeRecordsForSpecifiedVocabulary() throws Excepti }); assertNotNull(result); assertEquals(records, result); - verify(resourceServiceMock).getChanges(resource); + verify(resourceServiceMock).getChanges(resource, emptyFilter); } @Test diff --git a/src/test/java/cz/cvut/kbss/termit/rest/TermControllerTest.java b/src/test/java/cz/cvut/kbss/termit/rest/TermControllerTest.java index cedfb8e06..55554e910 100644 --- a/src/test/java/cz/cvut/kbss/termit/rest/TermControllerTest.java +++ b/src/test/java/cz/cvut/kbss/termit/rest/TermControllerTest.java @@ -23,6 +23,7 @@ import cz.cvut.kbss.jopa.vocabulary.SKOS; import cz.cvut.kbss.jsonld.JsonLd; import cz.cvut.kbss.termit.dto.Snapshot; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.TermDto; import cz.cvut.kbss.termit.environment.Environment; import cz.cvut.kbss.termit.environment.Generator; @@ -803,7 +804,8 @@ void getHistoryReturnsListOfChangeRecordsForSpecifiedTerm() throws Exception { term.setUri(termUri); when(termServiceMock.findRequired(term.getUri())).thenReturn(term); final List records = generateChangeRecords(term); - when(termServiceMock.getChanges(term)).thenReturn(records); + final ChangeRecordFilterDto emptyFilter = new ChangeRecordFilterDto(); + when(termServiceMock.getChanges(term, emptyFilter)).thenReturn(records); final MvcResult mvcResult = mockMvc .perform(get(PATH + VOCABULARY_NAME + "/terms/" + TERM_NAME + "/history")) @@ -812,6 +814,7 @@ void getHistoryReturnsListOfChangeRecordsForSpecifiedTerm() throws Exception { }); assertNotNull(result); assertEquals(records, result); + verify(termServiceMock).getChanges(term, emptyFilter); } private List generateChangeRecords(Term term) { @@ -833,7 +836,8 @@ void getHistoryStandaloneReturnsListOfChangeRecordsForSpecifiedTerm() throws Exc when(idResolverMock.resolveIdentifier(NAMESPACE, TERM_NAME)).thenReturn(termUri); when(termServiceMock.findRequired(termUri)).thenReturn(term); final List records = generateChangeRecords(term); - when(termServiceMock.getChanges(term)).thenReturn(records); + final ChangeRecordFilterDto emptyFilter = new ChangeRecordFilterDto(); + when(termServiceMock.getChanges(term, emptyFilter)).thenReturn(records); final MvcResult mvcResult = mockMvc .perform(get("/terms/" + TERM_NAME + "/history").param(QueryParams.NAMESPACE, NAMESPACE)) @@ -843,6 +847,7 @@ void getHistoryStandaloneReturnsListOfChangeRecordsForSpecifiedTerm() throws Exc }); assertNotNull(result); assertEquals(records, result); + verify(termServiceMock).getChanges(term, emptyFilter); } @Test diff --git a/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java b/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java index 0d1c7444d..8c03e5d03 100644 --- a/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java +++ b/src/test/java/cz/cvut/kbss/termit/rest/VocabularyControllerTest.java @@ -22,11 +22,13 @@ import cz.cvut.kbss.termit.dto.AggregatedChangeInfo; import cz.cvut.kbss.termit.dto.Snapshot; import cz.cvut.kbss.termit.dto.acl.AccessControlListDto; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.VocabularyDto; import cz.cvut.kbss.termit.environment.Environment; import cz.cvut.kbss.termit.environment.Generator; import cz.cvut.kbss.termit.exception.AssetRemovalException; import cz.cvut.kbss.termit.exception.importing.VocabularyImportException; +import cz.cvut.kbss.termit.model.Term; import cz.cvut.kbss.termit.model.User; import cz.cvut.kbss.termit.model.Vocabulary; import cz.cvut.kbss.termit.model.acl.AccessControlList; @@ -50,6 +52,7 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.domain.Pageable; import org.springframework.http.HttpHeaders; import org.springframework.http.MediaType; import org.springframework.mock.web.MockMultipartFile; @@ -79,6 +82,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyBoolean; +import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; @@ -128,7 +132,7 @@ void getAllReturnsAllExistingVocabularies() throws Exception { final MvcResult mvcResult = mockMvc.perform(get(PATH)).andExpect(status().isOk()).andReturn(); - final List result = readValue(mvcResult, new TypeReference>() { + final List result = readValue(mvcResult, new TypeReference<>() { }); assertThat(result, containsSameEntities(vocabularies)); } @@ -382,7 +386,7 @@ void getTransitiveImportsReturnsCollectionOfImportIdentifiersRetrievedFromServic final MvcResult mvcResult = mockMvc.perform(get(PATH + "/" + FRAGMENT + "/imports")).andExpect(status().isOk()) .andReturn(); - final Set result = readValue(mvcResult, new TypeReference>() { + final Set result = readValue(mvcResult, new TypeReference<>() { }); assertEquals(imports, result); verify(serviceMock).getReference(VOCABULARY_URI); @@ -399,7 +403,7 @@ void getTransitiveImportsReturnsEmptyCollectionWhenNoImportsAreFoundForVocabular final MvcResult mvcResult = mockMvc.perform(get(PATH + "/" + FRAGMENT + "/imports")).andExpect(status().isOk()) .andReturn(); - final Set result = readValue(mvcResult, new TypeReference>() { + final Set result = readValue(mvcResult, new TypeReference<>() { }); assertNotNull(result); assertTrue(result.isEmpty()); @@ -427,17 +431,18 @@ void getHistoryReturnsListOfChangeRecordsForSpecifiedVocabulary() throws Excepti final Vocabulary vocabulary = generateVocabularyAndInitReferenceResolution(); final List records = Generator.generateChangeRecords(vocabulary, user); - when(serviceMock.getChanges(vocabulary)).thenReturn(records); + final ChangeRecordFilterDto emptyFilter = new ChangeRecordFilterDto(); + when(serviceMock.getChanges(vocabulary, emptyFilter)).thenReturn(records); final MvcResult mvcResult = mockMvc.perform(get(PATH + "/" + FRAGMENT + "/history")).andExpect(status().isOk()) .andReturn(); final List result = - readValue(mvcResult, new TypeReference>() { + readValue(mvcResult, new TypeReference<>() { }); assertNotNull(result); assertEquals(records, result); - verify(serviceMock).getChanges(vocabulary); + verify(serviceMock).getChanges(vocabulary, emptyFilter); } @Test @@ -458,7 +463,7 @@ void getHistoryOfContentReturnsListOfAggregatedChangeObjectsForTermsInSpecifiedV .andExpect(status().isOk()) .andReturn(); final List result = - readValue(mvcResult, new TypeReference>() { + readValue(mvcResult, new TypeReference<>() { }); assertNotNull(result); assertEquals(changes, result); @@ -510,7 +515,7 @@ void getSnapshotsReturnsListOfVocabularySnapshotsWhenFilterInstantIsNotProvided( get(PATH + "/" + FRAGMENT + "/versions").accept(MediaType.APPLICATION_JSON_VALUE)) .andExpect(status().isOk()) .andReturn(); - final List result = readValue(mvcResult, new TypeReference>() { + final List result = readValue(mvcResult, new TypeReference<>() { }); assertThat(result, containsSameEntities(snapshots)); verify(serviceMock).findSnapshots(vocabulary); @@ -633,7 +638,7 @@ void getAccessLevelRetrievesAccessLevelToSpecifiedVocabulary() throws Exception @Test void getExcelTemplateFileReturnsExcelTemplateFileRetrievedFromServiceAsAttachment() throws Exception { - when(serviceMock.getExcelTemplateFile()).thenReturn(new TypeAwareFileSystemResource( + when(serviceMock.getExcelImportTemplateFile()).thenReturn(new TypeAwareFileSystemResource( new File(getClass().getClassLoader().getResource("template/termit-import.xlsx").toURI()), Constants.MediaType.EXCEL)); @@ -641,5 +646,79 @@ void getExcelTemplateFileReturnsExcelTemplateFileRetrievedFromServiceAsAttachmen assertThat(mvcResult.getResponse().getHeader(HttpHeaders.CONTENT_DISPOSITION), containsString("attachment")); assertThat(mvcResult.getResponse().getHeader(HttpHeaders.CONTENT_DISPOSITION), containsString("filename=\"termit-import.xlsx\"")); + verify(serviceMock).getExcelImportTemplateFile(); + } + + @Test + void getExcelTemplateFileReturnsExcelTermTranslationsTemplateFileRetrievedFromServiceAsAttachment() + throws Exception { + when(serviceMock.getExcelTranslationsImportTemplateFile()).thenReturn(new TypeAwareFileSystemResource( + new File(getClass().getClassLoader().getResource("template/termit-translations-import.xlsx").toURI()), + Constants.MediaType.EXCEL)); + + final MvcResult mvcResult = mockMvc.perform( + get(PATH + "/import/template").queryParam("translationsOnly", Boolean.toString(true))).andReturn(); + assertThat(mvcResult.getResponse().getHeader(HttpHeaders.CONTENT_DISPOSITION), containsString("attachment")); + assertThat(mvcResult.getResponse().getHeader(HttpHeaders.CONTENT_DISPOSITION), + containsString("filename=\"termit-translations-import.xlsx\"")); + verify(serviceMock).getExcelTranslationsImportTemplateFile(); + } + + @Test + void getDetailedHistoryOfContentReturnsListOfChangeRecordsWhenNoFilterIsSpecified() throws Exception { + final int pageSize = Integer.parseInt(VocabularyController.DEFAULT_PAGE_SIZE); + final Vocabulary vocabulary = generateVocabularyAndInitReferenceResolution(); + final Term term = Generator.generateTermWithId(); + final List changeRecords = IntStream.range(0, 5).mapToObj( + i -> Generator.generateChangeRecords(term, user)).flatMap(List::stream).toList(); + final ChangeRecordFilterDto filter = new ChangeRecordFilterDto(); + final Pageable pageable = Pageable.ofSize(pageSize); + + doReturn(changeRecords).when(serviceMock).getDetailedHistoryOfContent(vocabulary, filter, pageable); + + final MvcResult mvcResult = mockMvc.perform(get(PATH + "/" + FRAGMENT + "/history-of-content/detail")) + .andExpect(status().isOk()).andReturn(); + final List result = + readValue(mvcResult, new TypeReference<>() { + }); + assertNotNull(result); + assertEquals(changeRecords, result); + verify(serviceMock).getDetailedHistoryOfContent(vocabulary, filter, pageable); + } + + @Test + void getLanguagesRetrievesAndReturnsListOfLanguagesUsedInVocabulary() throws Exception { + when(idResolverMock.resolveIdentifier(NAMESPACE, FRAGMENT)).thenReturn(VOCABULARY_URI); + final List languages = List.of(Environment.LANGUAGE, "cs", "de"); + when(serviceMock.getLanguages(VOCABULARY_URI)).thenReturn(languages); + + final MvcResult mvcResult = mockMvc.perform( + get(PATH + "/" + FRAGMENT + "/languages").queryParam(QueryParams.NAMESPACE, NAMESPACE)).andReturn(); + final List result = readValue(mvcResult, new TypeReference<>() { + }); + assertEquals(languages, result); + verify(serviceMock).getLanguages(VOCABULARY_URI); + } + + @Test + void reImportVocabularyRunsTermTranslationsImportForUploadedFileWhenTranslationsOnlyIsSpecified() throws Exception { + when(configMock.getNamespace().getVocabulary()).thenReturn(NAMESPACE); + final Vocabulary vocabulary = Generator.generateVocabulary(); + vocabulary.setUri(URI.create(NAMESPACE + FRAGMENT)); + when(idResolverMock.resolveIdentifier(NAMESPACE, FRAGMENT)).thenReturn(vocabulary.getUri()); + when(serviceMock.importTermTranslations(any(URI.class), any())).thenReturn(vocabulary); + final MockMultipartFile upload = new MockMultipartFile("file", "vocabulary.xlsx", + Constants.MediaType.EXCEL, + Environment.loadFile("data/import-simple-en-cs.xlsx")); + final MvcResult mvcResult = mockMvc.perform(multipart(PATH + "/" + FRAGMENT + "/import").file(upload) + .queryParam( + "translationsOnly", + "true")) + .andExpect(status().isCreated()) + .andReturn(); + verifyLocationEquals(PATH + "/" + FRAGMENT, mvcResult); + assertThat(mvcResult.getResponse().getHeader(HttpHeaders.LOCATION), + containsString(QueryParams.NAMESPACE + "=" + NAMESPACE)); + verify(serviceMock).importTermTranslations(vocabulary.getUri(), upload); } } diff --git a/src/test/java/cz/cvut/kbss/termit/service/VocabularyRepositoryServiceImportTest.java b/src/test/java/cz/cvut/kbss/termit/service/VocabularyRepositoryServiceImportTest.java index 276a146a9..3abc0872b 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/VocabularyRepositoryServiceImportTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/VocabularyRepositoryServiceImportTest.java @@ -34,10 +34,12 @@ import org.springframework.web.multipart.MultipartFile; import java.io.IOException; +import java.net.URI; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -65,4 +67,20 @@ void passesInputStreamFromProvidedInputFileToImporter() throws IOException { assertNotNull(captor.getValue()); assertEquals(vocabulary, result); } + + @Test + void importTermTranslationsInvokesImporterWithProvidedData() throws IOException { + final MultipartFile input = new MockMultipartFile("vocabulary.xlsx", "vocabulary.xlsx", + Constants.MediaType.EXCEL, + Environment.loadFile("data/import-simple-en-cs.xlsx")); + final Vocabulary vocabulary = Generator.generateVocabularyWithId(); + when(importer.importTermTranslations(any(URI.class), any(VocabularyImporter.ImportInput.class))).thenReturn( + vocabulary); + final Vocabulary result = sut.importTermTranslations(vocabulary.getUri(), input); + final ArgumentCaptor captor = ArgumentCaptor.forClass( + VocabularyImporter.ImportInput.class); + verify(importer).importTermTranslations(eq(vocabulary.getUri()), captor.capture()); + assertNotNull(captor.getValue()); + assertEquals(vocabulary, result); + } } diff --git a/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java index 6119b0f90..d80f772db 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/business/ResourceServiceTest.java @@ -17,6 +17,7 @@ */ package cz.cvut.kbss.termit.service.business; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.environment.Environment; import cz.cvut.kbss.termit.environment.Generator; import cz.cvut.kbss.termit.event.DocumentRenameEvent; @@ -24,6 +25,7 @@ import cz.cvut.kbss.termit.exception.NotFoundException; import cz.cvut.kbss.termit.exception.TermItException; import cz.cvut.kbss.termit.exception.UnsupportedAssetOperationException; +import cz.cvut.kbss.termit.exception.UnsupportedTextAnalysisLanguageException; import cz.cvut.kbss.termit.model.TextAnalysisRecord; import cz.cvut.kbss.termit.model.Vocabulary; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; @@ -35,6 +37,7 @@ import cz.cvut.kbss.termit.service.document.TextAnalysisService; import cz.cvut.kbss.termit.service.repository.ChangeRecordService; import cz.cvut.kbss.termit.service.repository.ResourceRepositoryService; +import cz.cvut.kbss.termit.util.Configuration; import cz.cvut.kbss.termit.util.TypeAwareByteArrayResource; import cz.cvut.kbss.termit.util.TypeAwareResource; import cz.cvut.kbss.termit.util.Utils; @@ -47,6 +50,7 @@ import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; +import org.mockito.Spy; import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.context.ApplicationEventPublisher; import org.springframework.http.MediaType; @@ -96,6 +100,9 @@ class ResourceServiceTest { @Mock private ApplicationEventPublisher eventPublisher; + @Spy + private Configuration config = new Configuration(); + @InjectMocks private ResourceService sut; @@ -197,6 +204,7 @@ void runTextAnalysisInvokesTextAnalysisWithVocabularyRelatedToFilesDocument() { file.setDocument(Generator.generateDocumentWithId()); final Vocabulary vocabulary = Generator.generateVocabularyWithId(); file.getDocument().setVocabulary(vocabulary.getUri()); + when(textAnalysisService.supportsLanguage(file)).thenReturn(true); sut.runTextAnalysis(file, Collections.emptySet()); verify(textAnalysisService).analyzeFile(file, Collections.singleton(vocabulary.getUri())); } @@ -212,6 +220,7 @@ void runTextAnalysisThrowsUnsupportedAssetOperationWhenResourceIsNotFile() { @Test void runTextAnalysisThrowsUnsupportedAssetOperationWhenFileHasNoVocabularyAndNoVocabulariesAreSpecifiedEither() { final File file = Generator.generateFileWithId("test.html"); + when(textAnalysisService.supportsLanguage(file)).thenReturn(true); assertThrows(UnsupportedAssetOperationException.class, () -> sut.runTextAnalysis(file, Collections.emptySet())); verify(textAnalysisService, never()).analyzeFile(any(), anySet()); @@ -221,6 +230,7 @@ void runTextAnalysisThrowsUnsupportedAssetOperationWhenFileHasNoVocabularyAndNoV void runTextAnalysisInvokesAnalysisWithCustomVocabulariesWhenSpecified() { final File file = Generator.generateFileWithId("test.html"); final Set vocabularies = new HashSet<>(Arrays.asList(Generator.generateUri(), Generator.generateUri())); + when(textAnalysisService.supportsLanguage(file)).thenReturn(true); sut.runTextAnalysis(file, vocabularies); verify(textAnalysisService).analyzeFile(file, vocabularies); } @@ -234,6 +244,7 @@ void runTextAnalysisInvokesAnalysisAlsoWithImportedVocabulariesOfVocabularyRElat final Set imported = new HashSet<>(Arrays.asList(Generator.generateUri(), Generator.generateUri())); when(vocabularyService.getReference(vocabulary.getUri())).thenReturn(vocabulary); when(vocabularyService.getTransitivelyImportedVocabularies(vocabulary)).thenReturn(imported); + when(textAnalysisService.supportsLanguage(file)).thenReturn(true); sut.runTextAnalysis(file, Collections.emptySet()); final Set expected = new HashSet<>(imported); @@ -253,6 +264,7 @@ void runTextAnalysisInvokesAnalysisWithProvidedVocabulariesAndTheirImports() { when(vocabularyService.getTransitivelyImportedVocabularies(vOne)).thenReturn(vOneImports); when(vocabularyService.getReference(vTwo.getUri())).thenReturn(vTwo); when(vocabularyService.getTransitivelyImportedVocabularies(vTwo)).thenReturn(vTwoImports); + when(textAnalysisService.supportsLanguage(file)).thenReturn(true); sut.runTextAnalysis(file, new HashSet<>(Arrays.asList(vOne.getUri(), vTwo.getUri()))); final Set expected = new HashSet<>(vOneImports); @@ -376,7 +388,7 @@ void removeFileThrowsTermItExceptionWhenFileIsNotLinkedToDocument() { @Test void findLatestTextAnalysisRecordRetrievesLatestTextAnalysisRecordForResource() { final File file = Generator.generateFileWithId("test.html"); - final TextAnalysisRecord record = new TextAnalysisRecord(Utils.timestamp(), file); + final TextAnalysisRecord record = new TextAnalysisRecord(Utils.timestamp(), file, Environment.LANGUAGE); when(textAnalysisService.findLatestAnalysisRecord(file)).thenReturn(Optional.of(record)); final TextAnalysisRecord result = sut.findLatestTextAnalysisRecord(file); @@ -426,9 +438,10 @@ void getLastModifiedReturnsValueFromRepositoryService() { void getChangesLoadsChangeRecordsForSpecifiedAssetFromChangeRecordService() { final Resource resource = Generator.generateResourceWithId(); final List records = Collections.singletonList(Generator.generatePersistChange(resource)); - when(changeRecordService.getChanges(resource)).thenReturn(records); + final ChangeRecordFilterDto filterDto = new ChangeRecordFilterDto(); + when(changeRecordService.getChanges(resource, filterDto)).thenReturn(records); assertEquals(records, sut.getChanges(resource)); - verify(changeRecordService).getChanges(resource); + verify(changeRecordService).getChanges(resource, filterDto); } @Test @@ -515,4 +528,49 @@ void getContentWithoutUnconfirmedOccurrencesRemovesUnconfirmedOccurrencesFromFil final org.jsoup.nodes.Document doc = Jsoup.parse(result.getInputStream(), StandardCharsets.UTF_8.name(), ""); assertTrue(doc.select("span[score]").isEmpty()); } + + @Test + void addFileToDocumentSetsFileLanguageToDefaultConfiguredWhenNotProvided() { + config.getPersistence().setLanguage(Environment.LANGUAGE); + final Vocabulary vocabulary = Generator.generateVocabularyWithId(); + final Document document = Generator.generateDocumentWithId(); + document.setVocabulary(vocabulary.getUri()); + final File file = Generator.generateFileWithId("test.hml"); + when(resourceRepositoryService.exists(document.getUri())).thenReturn(true); + when(resourceRepositoryService.findRequired(document.getUri())).thenReturn(document); + when(vocabularyService.getReference(vocabulary.getUri())).thenReturn(vocabulary); + + sut.addFileToDocument(document, file); + verify(resourceRepositoryService).persist(file, vocabulary); + assertEquals(config.getPersistence().getLanguage(), file.getLanguage()); + } + + @Test + void addFileToDocumentDoesNotModifyLanguageWhenItIsAlreadySet() { + config.getPersistence().setLanguage(Environment.LANGUAGE); + final Vocabulary vocabulary = Generator.generateVocabularyWithId(); + final Document document = Generator.generateDocumentWithId(); + document.setVocabulary(vocabulary.getUri()); + final File file = Generator.generateFileWithId("test.hml"); + file.setLanguage("cs"); + when(resourceRepositoryService.exists(document.getUri())).thenReturn(true); + when(resourceRepositoryService.findRequired(document.getUri())).thenReturn(document); + when(vocabularyService.getReference(vocabulary.getUri())).thenReturn(vocabulary); + + sut.addFileToDocument(document, file); + verify(resourceRepositoryService).persist(file, vocabulary); + assertEquals("cs", file.getLanguage()); + } + + @Test + void runTextAnalysisThrowsUnsupportedTextAnalysisExceptionWhenTextAnalysisServiceDoesNotSupportFileLanguage() { + final File file = Generator.generateFileWithId("test.html"); + file.setDocument(Generator.generateDocumentWithId()); + final Vocabulary vocabulary = Generator.generateVocabularyWithId(); + file.getDocument().setVocabulary(vocabulary.getUri()); + file.setLanguage("sk"); + when(textAnalysisService.supportsLanguage(file)).thenReturn(false); + assertThrows(UnsupportedTextAnalysisLanguageException.class, () -> sut.runTextAnalysis(file, Set.of(vocabulary.getUri()))); + verify(textAnalysisService).supportsLanguage(file); + } } diff --git a/src/test/java/cz/cvut/kbss/termit/service/business/TermServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/business/TermServiceTest.java index 5ea15780f..4a222635f 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/business/TermServiceTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/business/TermServiceTest.java @@ -21,6 +21,7 @@ import cz.cvut.kbss.termit.dto.RdfsResource; import cz.cvut.kbss.termit.dto.TermInfo; import cz.cvut.kbss.termit.dto.assignment.TermOccurrences; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.TermDto; import cz.cvut.kbss.termit.environment.Environment; import cz.cvut.kbss.termit.environment.Generator; @@ -396,7 +397,7 @@ void setTermDefinitionReplacesExistingTermDefinition() { void getChangesRetrievesChangeRecordsFromChangeRecordService() { final Term asset = Generator.generateTermWithId(); sut.getChanges(asset); - verify(changeRecordService).getChanges(asset); + verify(changeRecordService).getChanges(asset, new ChangeRecordFilterDto()); } @Test diff --git a/src/test/java/cz/cvut/kbss/termit/service/business/VocabularyServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/business/VocabularyServiceTest.java index 6cc2d505d..9ad04a33a 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/business/VocabularyServiceTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/business/VocabularyServiceTest.java @@ -19,6 +19,7 @@ import cz.cvut.kbss.termit.dto.Snapshot; import cz.cvut.kbss.termit.dto.acl.AccessControlListDto; +import cz.cvut.kbss.termit.dto.filter.ChangeRecordFilterDto; import cz.cvut.kbss.termit.dto.listing.TermDto; import cz.cvut.kbss.termit.dto.listing.VocabularyDto; import cz.cvut.kbss.termit.environment.Environment; @@ -184,10 +185,11 @@ void getChangesRetrievesChangesForVocabulary() { final Vocabulary vocabulary = Generator.generateVocabularyWithId(); final List records = Generator.generateChangeRecords(vocabulary, Generator.generateUserWithId()); - when(changeRecordService.getChanges(vocabulary)).thenReturn(records); + final ChangeRecordFilterDto emptyFilter = new ChangeRecordFilterDto(); + when(changeRecordService.getChanges(vocabulary, emptyFilter)).thenReturn(records); final List result = sut.getChanges(vocabulary); assertEquals(records, result); - verify(changeRecordService).getChanges(vocabulary); + verify(changeRecordService).getChanges(vocabulary, emptyFilter); } @Test @@ -381,9 +383,9 @@ void importNewVocabularyPublishesVocabularyCreatedEvent() { } @Test - void getExcelTemplateFileReturnsResourceRepresentingExcelTemplateFile() throws Exception { + void getExcelTemplateFileReturnsResourceRepresentingExcelImportTemplateFile() throws Exception { when(appContext.getBean(Configuration.class)).thenReturn(new Configuration()); - final TypeAwareResource result = sut.getExcelTemplateFile(); + final TypeAwareResource result = sut.getExcelImportTemplateFile(); assertTrue(result.getFileExtension().isPresent()); assertEquals(ExportFormat.EXCEL.getFileExtension(), result.getFileExtension().get()); assertTrue(result.getMediaType().isPresent()); diff --git a/src/test/java/cz/cvut/kbss/termit/service/changetracking/ChangeTrackingTest.java b/src/test/java/cz/cvut/kbss/termit/service/changetracking/ChangeTrackingTest.java index 05069c385..d570a1f85 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/changetracking/ChangeTrackingTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/changetracking/ChangeTrackingTest.java @@ -27,6 +27,7 @@ import cz.cvut.kbss.termit.model.User; import cz.cvut.kbss.termit.model.Vocabulary; import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; +import cz.cvut.kbss.termit.model.changetracking.DeleteChangeRecord; import cz.cvut.kbss.termit.model.changetracking.PersistChangeRecord; import cz.cvut.kbss.termit.model.changetracking.UpdateChangeRecord; import cz.cvut.kbss.termit.model.resource.File; @@ -52,6 +53,8 @@ import static org.hamcrest.Matchers.nullValue; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertNotNull; public class ChangeTrackingTest extends BaseServiceTestRunner { @@ -141,7 +144,7 @@ void updatingVocabularyReferenceAndLiteralAttributesCreatesTwoUpdateRecords() { assertEquals(vocabulary.getUri(), chr.getChangedEntity()); assertThat(result.get(0), instanceOf(UpdateChangeRecord.class)); assertThat(((UpdateChangeRecord) chr).getChangedAttribute().toString(), anyOf(equalTo(DC.Terms.TITLE), - equalTo(cz.cvut.kbss.termit.util.Vocabulary.s_p_importuje_slovnik))); + equalTo(cz.cvut.kbss.termit.util.Vocabulary.s_p_importuje_slovnik))); }); } @@ -214,7 +217,7 @@ void updatingTermLiteralAttributesCreatesChangeRecordWithOriginalAndNewValue() { final List result = changeRecordDao.findAll(term); assertEquals(1, result.size()); assertEquals(Collections.singleton(originalDefinition), - ((UpdateChangeRecord) result.get(0)).getOriginalValue()); + ((UpdateChangeRecord) result.get(0)).getOriginalValue()); assertEquals(Collections.singleton(newDefinition), ((UpdateChangeRecord) result.get(0)).getNewValue()); } @@ -271,4 +274,24 @@ void updatingTermStateCreatesUpdateChangeRecord() { assertEquals(URI.create(cz.cvut.kbss.termit.util.Vocabulary.s_p_ma_stav_pojmu), ((UpdateChangeRecord) result.get(0)).getChangedAttribute()); } + + @Test + void deletingTermCreatesDeleteChangeRecord() { + enableRdfsInference(em); + final Term term = Generator.generateTermWithId(vocabulary.getUri()); + transactional(()-> { + em.persist(vocabulary, descriptorFactory.vocabularyDescriptor(vocabulary)); + term.setGlossary(vocabulary.getGlossary().getUri()); + em.persist(term, descriptorFactory.termDescriptor(vocabulary)); + Generator.addTermInVocabularyRelationship(term, vocabulary.getUri(), em); + }); + + termService.remove(term); + final List result = changeRecordDao.findAll(term); + assertEquals(1, result.size()); + final DeleteChangeRecord record = assertInstanceOf(DeleteChangeRecord.class, result.get(0)); + assertEquals(term.getUri(), record.getChangedEntity()); + assertNotNull(record.getLabel()); + assertEquals(term.getLabel(), record.getLabel()); + } } diff --git a/src/test/java/cz/cvut/kbss/termit/service/document/TextAnalysisServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/document/TextAnalysisServiceTest.java index aa431671e..9a049a40a 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/document/TextAnalysisServiceTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/document/TextAnalysisServiceTest.java @@ -27,12 +27,14 @@ import cz.cvut.kbss.termit.event.FileTextAnalysisFinishedEvent; import cz.cvut.kbss.termit.event.TermDefinitionTextAnalysisFinishedEvent; import cz.cvut.kbss.termit.exception.NotFoundException; +import cz.cvut.kbss.termit.exception.UnsupportedTextAnalysisLanguageException; import cz.cvut.kbss.termit.exception.WebServiceIntegrationException; import cz.cvut.kbss.termit.model.Term; import cz.cvut.kbss.termit.model.TextAnalysisRecord; import cz.cvut.kbss.termit.model.Vocabulary; import cz.cvut.kbss.termit.model.resource.File; import cz.cvut.kbss.termit.persistence.dao.TextAnalysisRecordDao; +import cz.cvut.kbss.termit.rest.handler.ErrorInfo; import cz.cvut.kbss.termit.service.BaseServiceTestRunner; import cz.cvut.kbss.termit.util.Configuration; import cz.cvut.kbss.termit.util.Utils; @@ -70,6 +72,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -84,8 +87,10 @@ import static org.mockito.Mockito.when; import static org.springframework.test.web.client.match.MockRestRequestMatchers.content; import static org.springframework.test.web.client.match.MockRestRequestMatchers.header; +import static org.springframework.test.web.client.match.MockRestRequestMatchers.jsonPath; import static org.springframework.test.web.client.match.MockRestRequestMatchers.method; import static org.springframework.test.web.client.match.MockRestRequestMatchers.requestTo; +import static org.springframework.test.web.client.response.MockRestResponseCreators.withRequestConflict; import static org.springframework.test.web.client.response.MockRestResponseCreators.withServerError; import static org.springframework.test.web.client.response.MockRestResponseCreators.withSuccess; @@ -143,14 +148,14 @@ void setUp() throws Exception { doCallRealMethod().when(documentManagerSpy).loadFileContent(any()); doNothing().when(documentManagerSpy).createBackup(any()); this.sut = new TextAnalysisService(restTemplate, config, documentManagerSpy, annotationGeneratorMock, - textAnalysisRecordDao, eventPublisher); + textAnalysisRecordDao, eventPublisher); } @Test void analyzeFileInvokesTextAnalysisServiceWithDocumentContent() { mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) - .andExpect(method(HttpMethod.POST)).andExpect(content().string(containsString(CONTENT))) - .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); + .andExpect(method(HttpMethod.POST)).andExpect(content().string(containsString(CONTENT))) + .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); mockServer.verify(); } @@ -159,7 +164,8 @@ private void generateFile() throws IOException { final java.io.File dir = Files.createTempDirectory("termit").toFile(); dir.deleteOnExit(); config.getFile().setStorage(dir.getAbsolutePath()); - final java.io.File docDir = new java.io.File(dir.getAbsolutePath() + java.io.File.separator + file.getDirectoryName()); + final java.io.File docDir = new java.io.File( + dir.getAbsolutePath() + java.io.File.separator + file.getDirectoryName()); Files.createDirectory(docDir.toPath()); docDir.deleteOnExit(); final java.io.File content = new java.io.File( @@ -172,9 +178,9 @@ private void generateFile() throws IOException { void analyzeFilePassesRepositoryAndVocabularyContextToService() throws Exception { final TextAnalysisInput input = textAnalysisInput(); mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) - .andExpect(method(HttpMethod.POST)) - .andExpect(content().string(objectMapper.writeValueAsString(input))) - .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); + .andExpect(method(HttpMethod.POST)) + .andExpect(content().string(objectMapper.writeValueAsString(input))) + .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); mockServer.verify(); } @@ -184,8 +190,8 @@ private TextAnalysisInput textAnalysisInput() { input.setContent(CONTENT); input.addVocabularyContext(vocabulary.getUri()); URI repositoryUrl = URI.create( - config.getRepository().getPublicUrl() - .orElse(config.getRepository().getUrl()) + config.getRepository().getPublicUrl() + .orElse(config.getRepository().getUrl()) ); input.setVocabularyRepository(repositoryUrl); input.setLanguage(config.getPersistence().getLanguage()); @@ -198,11 +204,11 @@ private TextAnalysisInput textAnalysisInput() { void analyzeFilePassesContentTypeAndAcceptHeadersToService() throws Exception { final TextAnalysisInput input = textAnalysisInput(); mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) - .andExpect(method(HttpMethod.POST)) - .andExpect(content().string(objectMapper.writeValueAsString(input))) - .andExpect(header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE)) - .andExpect(header(HttpHeaders.ACCEPT, MediaType.APPLICATION_XML_VALUE)) - .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); + .andExpect(method(HttpMethod.POST)) + .andExpect(content().string(objectMapper.writeValueAsString(input))) + .andExpect(header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE)) + .andExpect(header(HttpHeaders.ACCEPT,MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE)) + .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); mockServer.verify(); } @@ -228,11 +234,11 @@ void analyzeFilePassesRepositoryUsernameAndPasswordToServiceWhenProvided() throw void analyzeFileThrowsWebServiceIntegrationExceptionOnError() throws Exception { final TextAnalysisInput input = textAnalysisInput(); mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) - .andExpect(method(HttpMethod.POST)) - .andExpect(content().string(objectMapper.writeValueAsString(input))) - .andRespond(withServerError()); + .andExpect(method(HttpMethod.POST)) + .andExpect(content().string(objectMapper.writeValueAsString(input))) + .andRespond(withServerError()); assertThrows(WebServiceIntegrationException.class, - () -> sut.analyzeFile(file, Collections.singleton(vocabulary.getUri()))); + () -> sut.analyzeFile(file, Collections.singleton(vocabulary.getUri()))); mockServer.verify(); } @@ -256,7 +262,8 @@ void analyzeFileInvokesAnnotationGeneratorWithResultFromTextAnalysisService() th void analyzeFileThrowsNotFoundExceptionWhenFileCannotBeFound() { file.setLabel("unknown.html"); final NotFoundException result = assertThrows(NotFoundException.class, - () -> sut.analyzeFile(file, Collections.singleton(vocabulary.getUri()))); + () -> sut.analyzeFile(file, Collections.singleton( + vocabulary.getUri()))); assertThat(result.getMessage(), containsString("not found on file system")); } @@ -264,11 +271,12 @@ void analyzeFileThrowsNotFoundExceptionWhenFileCannotBeFound() { void analyzeFileThrowsWebServiceIntegrationExceptionWhenRemoteServiceReturnsEmptyBody() throws Exception { final TextAnalysisInput input = textAnalysisInput(); mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) - .andExpect(method(HttpMethod.POST)) - .andExpect(content().string(objectMapper.writeValueAsString(input))) - .andRespond(withSuccess()); + .andExpect(method(HttpMethod.POST)) + .andExpect(content().string(objectMapper.writeValueAsString(input))) + .andRespond(withSuccess()); final WebServiceIntegrationException result = assertThrows(WebServiceIntegrationException.class, - () -> sut.analyzeFile(file, Collections.singleton(vocabulary.getUri()))); + () -> sut.analyzeFile(file, Collections.singleton( + vocabulary.getUri()))); assertThat(result.getMessage(), containsString("empty response")); mockServer.verify(); } @@ -290,13 +298,13 @@ void analyzeFileCreatesFileBackupBeforeInvokingAnnotationGenerator() throws Exce @Test void analyzeFilePassesRepositoryAndSpecifiedVocabularyContextsToService() throws Exception { final Set vocabs = IntStream.range(0, 5).mapToObj(i -> Generator.generateUri()) - .collect(Collectors.toSet()); + .collect(Collectors.toSet()); final TextAnalysisInput expected = textAnalysisInput(); expected.setVocabularyContexts(vocabs); mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) - .andExpect(method(HttpMethod.POST)) - .andExpect(content().string(objectMapper.writeValueAsString(expected))) - .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); + .andExpect(method(HttpMethod.POST)) + .andExpect(content().string(objectMapper.writeValueAsString(expected))) + .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); sut.analyzeFile(file, vocabs); mockServer.verify(); } @@ -305,9 +313,9 @@ void analyzeFilePassesRepositoryAndSpecifiedVocabularyContextsToService() throws void analyzeFileBacksUpFileContentBeforeSavingNewAnalyzedContent() throws Exception { final TextAnalysisInput input = textAnalysisInput(); mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) - .andExpect(method(HttpMethod.POST)) - .andExpect(content().string(objectMapper.writeValueAsString(input))) - .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); + .andExpect(method(HttpMethod.POST)) + .andExpect(content().string(objectMapper.writeValueAsString(input))) + .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); mockServer.verify(); final InOrder inOrder = Mockito.inOrder(documentManagerSpy, annotationGeneratorMock); @@ -317,19 +325,21 @@ void analyzeFileBacksUpFileContentBeforeSavingNewAnalyzedContent() throws Except @Test void analyzeFileCreatesTextAnalysisRecord() { + file.setLanguage("cs"); mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) - .andExpect(method(HttpMethod.POST)).andExpect(content().string(containsString(CONTENT))) - .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); + .andExpect(method(HttpMethod.POST)).andExpect(content().string(containsString(CONTENT))) + .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); final ArgumentCaptor captor = ArgumentCaptor.forClass(TextAnalysisRecord.class); verify(textAnalysisRecordDao).persist(captor.capture()); assertEquals(file, captor.getValue().getAnalyzedResource()); assertEquals(Collections.singleton(vocabulary.getUri()), captor.getValue().getVocabularies()); + assertEquals(file.getLanguage(), captor.getValue().getLanguage()); } @Test void findLatestAnalysisRecordFindsLatestTextAnalysisRecordForResource() { - final TextAnalysisRecord record = new TextAnalysisRecord(Utils.timestamp(), file); + final TextAnalysisRecord record = new TextAnalysisRecord(Utils.timestamp(), file, Environment.LANGUAGE); record.setVocabularies(Collections.singleton(vocabulary.getUri())); when(textAnalysisRecordDao.findLatest(file)).thenReturn(Optional.of(record)); @@ -424,7 +434,8 @@ void analyzeFilePublishesAnalysisFinishedEvent() { .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); - ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(FileTextAnalysisFinishedEvent.class); + ArgumentCaptor eventCaptor = ArgumentCaptor.forClass( + FileTextAnalysisFinishedEvent.class); verify(eventPublisher).publishEvent(eventCaptor.capture()); assertNotNull(eventCaptor.getValue()); assertEquals(file.getUri(), eventCaptor.getValue().getFileUri()); @@ -444,10 +455,80 @@ void analyzeTermDefinitionPublishesAnalysisFinishedEvent() throws JsonProcessing sut.analyzeTermDefinition(term, vocabulary.getUri()); - ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(TermDefinitionTextAnalysisFinishedEvent.class); + ArgumentCaptor eventCaptor = ArgumentCaptor.forClass( + TermDefinitionTextAnalysisFinishedEvent.class); verify(eventPublisher).publishEvent(eventCaptor.capture()); assertNotNull(eventCaptor.getValue()); assertEquals(term.getUri(), eventCaptor.getValue().getTermUri()); assertEquals(vocabulary.getUri(), eventCaptor.getValue().getVocabularyIri()); } + + @Test + void analyzeFileSetsFileLanguageInTextAnalysisInvocationInput() { + file.setLanguage("cs"); + mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) + .andExpect(method(HttpMethod.POST)) + .andExpect(jsonPath("$.language").value("cs")) + .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); + sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); + mockServer.verify(); + } + + @Test + void analyzeFileUsesConfiguredPersistenceLanguageInTextAnalysisInvocationInputWhenFileLanguageIsNotSet() { + file.setLanguage(null); + mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) + .andExpect(method(HttpMethod.POST)) + .andExpect(jsonPath("$.language").value(Environment.LANGUAGE)) + .andRespond(withSuccess(CONTENT, MediaType.APPLICATION_XML)); + sut.analyzeFile(file, Collections.singleton(vocabulary.getUri())); + mockServer.verify(); + } + + @Test + void analyzeFileThrowsUnsupportedLanguageExceptionWhenTextAnalysisInvocationReturnsConflictWithUnsupportedLanguageError() + throws Exception { + file.setLanguage("de"); + final ErrorInfo respBody = ErrorInfo.createWithMessage("No taggers for language 'de' available.", + "/annotace/annotate"); + mockServer.expect(requestTo(config.getTextAnalysis().getUrl())) + .andExpect(method(HttpMethod.POST)) + .andRespond(withRequestConflict().body(objectMapper.writeValueAsString(respBody)) + .contentType(MediaType.APPLICATION_JSON)); + + final UnsupportedTextAnalysisLanguageException ex = assertThrows(UnsupportedTextAnalysisLanguageException.class, + () -> sut.analyzeFile(file, + Collections.singleton( + vocabulary.getUri()))); + assertEquals("error.annotation.file.unsupportedLanguage", ex.getMessageId()); + } + + @Test + void supportsLanguageGetsListOfSupportedLanguagesFromTextAnalysisServiceAndChecksIfFileLanguageIsAmongThem() { + file.setLanguage("cs"); + mockServer.expect(requestTo(config.getTextAnalysis().getLanguagesUrl())) + .andExpect(method(HttpMethod.GET)) + .andRespond(withSuccess("[\"cs\", \"en\"]", MediaType.APPLICATION_JSON)); + assertTrue(sut.supportsLanguage(file)); + mockServer.verify(); + + file.setLanguage("de"); + assertFalse(sut.supportsLanguage(file)); + } + + @Test + void supportsLanguageReturnsTrueWhenTextAnalysisServiceLanguagesEndpointUrlIsNotConfigured() { + String endpointUrl = config.getTextAnalysis().getLanguagesUrl(); + file.setLanguage(Environment.LANGUAGE); + config.getTextAnalysis().setLanguagesUrl(null); + assertTrue(sut.supportsLanguage(file)); + // Reset configuration state + config.getTextAnalysis().setLanguagesUrl(endpointUrl); + } + + @Test + void supportsLanguageReturnsTrueWhenFileHasNoLanguageSet() { + file.setLanguage(null); + assertTrue(sut.supportsLanguage(file)); + } } diff --git a/src/test/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporterTest.java b/src/test/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporterTest.java index 5804ca6e8..eb682d4ae 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporterTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/importer/excel/ExcelImporterTest.java @@ -38,6 +38,7 @@ import java.io.ByteArrayOutputStream; import java.net.URI; import java.util.Collection; +import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Optional; @@ -86,7 +87,7 @@ class ExcelImporterTest { @SuppressWarnings("unused") @Spy - private IdentifierResolver idResolver = new IdentifierResolver(new Configuration()); + private IdentifierResolver idResolver = new IdentifierResolver(config); @InjectMocks private ExcelImporter sut; @@ -97,6 +98,7 @@ class ExcelImporterTest { void setUp() { this.vocabulary = Generator.generateVocabularyWithId(); config.getNamespace().getTerm().setSeparator("/terms"); + config.getPersistence().setLanguage(Environment.LANGUAGE); } @ParameterizedTest @@ -350,9 +352,7 @@ void importFallsBackToEnglishColumnLabelsForUnknownLanguages() { @Test void importSupportsTermIdentifiers() { - vocabulary.setUri(URI.create("http://example.com")); - when(vocabularyDao.exists(vocabulary.getUri())).thenReturn(true); - when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + initVocabularyResolution(); final Vocabulary result = sut.importVocabulary( new VocabularyImporter.ImportConfiguration(false, vocabulary.getUri(), prePersist), @@ -378,11 +378,15 @@ void importSupportsTermIdentifiers() { building.get().getUri(), vocabulary.getUri())), quadsCaptor.getValue()); } - @Test - void importSupportsPrefixedTermIdentifiers() { + private void initVocabularyResolution() { vocabulary.setUri(URI.create("http://example.com")); when(vocabularyDao.exists(vocabulary.getUri())).thenReturn(true); when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + } + + @Test + void importSupportsPrefixedTermIdentifiers() { + initVocabularyResolution(); final Vocabulary result = sut.importVocabulary( new VocabularyImporter.ImportConfiguration(false, vocabulary.getUri(), prePersist), @@ -431,9 +435,7 @@ void importAdjustsTermIdentifiersToUseExistingVocabularyIdentifierAndSeparatorAs @Test void importRemovesExistingInstanceWhenImportedTermAlreadyExists() { - vocabulary.setUri(URI.create("http://example.com")); - when(vocabularyDao.exists(vocabulary.getUri())).thenReturn(true); - when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + initVocabularyResolution(); final Term existingBuilding = Generator.generateTermWithId(); existingBuilding.setUri(URI.create("http://example.com/terms/building")); final Term existingConstruction = Generator.generateTermWithId(); @@ -457,9 +459,7 @@ void importRemovesExistingInstanceWhenImportedTermAlreadyExists() { @Test void importSupportsReferencesToOtherVocabulariesViaTermIdentifiersWhenReferencedTermsExist() { - vocabulary.setUri(URI.create("http://example.com")); - when(vocabularyDao.exists(vocabulary.getUri())).thenReturn(true); - when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + initVocabularyResolution(); when(termService.exists(any())).thenReturn(false); when(termService.exists(URI.create("http://example.com/another-vocabulary/terms/relatedMatch"))).thenReturn( true); @@ -568,9 +568,7 @@ void importThrowsVocabularyImportExceptionWhenSheetContainsDuplicateLabels() thr @Test void importThrowsVocabularyImportExceptionWhenSheetContainsDuplicateIdentifiers() throws Exception { - vocabulary.setUri(URI.create("http://example.com")); - when(vocabularyDao.exists(vocabulary.getUri())).thenReturn(true); - when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + initVocabularyResolution(); final Workbook input = new XSSFWorkbook(Environment.loadFile("template/termit-import.xlsx")); final Sheet sheet = input.getSheet("English"); sheet.shiftColumns(0, 12, 1); @@ -597,9 +595,7 @@ void importThrowsVocabularyImportExceptionWhenSheetContainsDuplicateIdentifiers( @Test void importSupportsSpecifyingStateAndTypeOnlyInOneSheet() throws Exception { - vocabulary.setUri(URI.create("http://example.com")); - when(vocabularyDao.exists(vocabulary.getUri())).thenReturn(true); - when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + initVocabularyResolution(); final Workbook input = new XSSFWorkbook(Environment.loadFile("template/termit-import.xlsx")); final Sheet englishSheet = input.getSheet("English"); englishSheet.getRow(1).createCell(0).setCellValue("Construction"); @@ -651,9 +647,7 @@ void importThrowsVocabularyImportExceptionWhenVocabularyAlreadyContainsTermWithS @Test void importSupportsMultipleTypesDeclaredForTerm() throws Exception { - vocabulary.setUri(URI.create("http://example.com")); - when(vocabularyDao.exists(vocabulary.getUri())).thenReturn(true); - when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + initVocabularyResolution(); final Workbook input = new XSSFWorkbook(Environment.loadFile("template/termit-import.xlsx")); final Sheet englishSheet = input.getSheet("English"); englishSheet.getRow(1).createCell(0).setCellValue("Construction"); @@ -678,4 +672,100 @@ void importSupportsMultipleTypesDeclaredForTerm() throws Exception { assertThat(captor.getValue().getTypes(), hasItems(objectType.getUri().toString(), eventType.getUri().toString())); } + + @Test + void importTermTranslationsFromExcelWithIdentifiersUpdatesExistingTerms() { + vocabulary.setUri(URI.create("http://example.com")); + when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + final Term building = initTermBuilding(); + final Term construction = initTermConstruction(); + + final Vocabulary result = sut.importTermTranslations(vocabulary.getUri(), new VocabularyImporter.ImportInput( + Constants.MediaType.EXCEL, + Environment.loadFile("data/import-with-identifiers-en-cs.xlsx"))); + assertEquals(vocabulary, result); + assertEquals("Budova", building.getLabel().get("cs")); + List.of("Barák", "Dům").forEach(t -> assertTrue( + building.getAltLabels().stream().anyMatch(mls -> mls.contains("cs") && mls.get("cs").equals(t)))); + assertEquals("Definice pojmu budova", building.getDefinition().get("cs")); + assertEquals("Doplňující poznámka pojmu budova", building.getDescription().get("cs")); + assertEquals("Stavba", construction.getLabel().get("cs")); + assertEquals("Proces výstavby budovy", construction.getDefinition().get("cs")); + assertTrue(construction.getAltLabels().stream() + .anyMatch(mls -> mls.contains("cs") && mls.get("cs").equals("Staveniště"))); + verify(termService).update(building); + verify(termService).update(construction); + } + + private Term initTermBuilding() { + final Term building = new Term(URI.create("http://example.com/terms/budova")); + building.setLabel(MultilingualString.create("Building", "en")); + building.setAltLabels(new HashSet<>(Set.of(MultilingualString.create("Complex", "en")))); + building.setDefinition(MultilingualString.create("Definition of term Building", "en")); + building.setDescription(MultilingualString.create("Building scope note", "en")); + building.setHiddenLabels(new HashSet<>()); + building.setExamples(new HashSet<>()); + building.setVocabulary(vocabulary.getUri()); + when(termService.find(building.getUri())).thenReturn(Optional.of(building)); + return building; + } + + private Term initTermConstruction() { + final Term construction = new Term(URI.create("http://example.com/terms/stavba")); + construction.setLabel(MultilingualString.create("Construction", "en")); + construction.setAltLabels(new HashSet<>(Set.of(MultilingualString.create("Construction site", "en")))); + construction.setDefinition(MultilingualString.create("The process of building a building", "en")); + construction.setHiddenLabels(new HashSet<>()); + construction.setExamples(new HashSet<>()); + construction.setVocabulary(vocabulary.getUri()); + when(termService.find(construction.getUri())).thenReturn(Optional.of(construction)); + return construction; + } + + @Test + void importTermTranslationsPreservesExistingValues() { + vocabulary.setUri(URI.create("http://example.com")); + when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + final Term building = initTermBuilding(); + + final Vocabulary result = sut.importTermTranslations(vocabulary.getUri(), new VocabularyImporter.ImportInput( + Constants.MediaType.EXCEL, + Environment.loadFile("data/import-with-identifiers-en-cs.xlsx"))); + assertEquals(vocabulary, result); + assertEquals("Building", building.getLabel().get("en")); + assertEquals("Definition of term Building", building.getDefinition().get("en")); + assertTrue(building.getAltLabels().stream() + .anyMatch(mls -> mls.contains("en") && mls.get("en").equals("Complex"))); + } + + @Test + void importTermTranslationsUsesTermLabelToResolveIdentifierWhenExcelDoesNotContainIdentifiers() { + vocabulary.setUri(URI.create("http://example.com")); + when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + config.getPersistence().setLanguage("cs"); + final Term building = initTermBuilding(); + + sut.importTermTranslations(vocabulary.getUri(), new VocabularyImporter.ImportInput( + Constants.MediaType.EXCEL, + Environment.loadFile("data/import-simple-en-cs.xlsx"))); + verify(termService).find(building.getUri()); + assertEquals("Budova", building.getLabel().get("cs")); + verify(termService).update(any(Term.class)); + } + + @Test + void importTermTranslationsThrowsVocabularyImportExceptionWhenExcelDoesNotContainIdentifierAndSheetWithLabelsInPrimaryLanguage() { + vocabulary.setUri(URI.create("http://example.com")); + when(vocabularyDao.find(vocabulary.getUri())).thenReturn(Optional.of(vocabulary)); + + VocabularyImportException ex = assertThrows(VocabularyImportException.class, + () -> sut.importTermTranslations(vocabulary.getUri(), + new VocabularyImporter.ImportInput( + Constants.MediaType.EXCEL, + Environment.loadFile( + "data/import-simple-de.xlsx")) + )); + assertEquals("error.vocabulary.import.excel.missingIdentifierOrLabel", ex.getMessageId()); + verify(termService, never()).update(any()); + } } diff --git a/src/test/java/cz/cvut/kbss/termit/service/repository/ChangeRecordServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/repository/ChangeRecordServiceTest.java index 3d2b135a5..24b1f313f 100644 --- a/src/test/java/cz/cvut/kbss/termit/service/repository/ChangeRecordServiceTest.java +++ b/src/test/java/cz/cvut/kbss/termit/service/repository/ChangeRecordServiceTest.java @@ -26,6 +26,7 @@ import cz.cvut.kbss.termit.model.changetracking.AbstractChangeRecord; import cz.cvut.kbss.termit.model.changetracking.UpdateChangeRecord; import cz.cvut.kbss.termit.persistence.context.DescriptorFactory; +import cz.cvut.kbss.termit.persistence.dao.changetracking.ChangeRecordDao; import cz.cvut.kbss.termit.service.BaseServiceTestRunner; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -51,6 +52,9 @@ class ChangeRecordServiceTest extends BaseServiceTestRunner { @Autowired private ChangeRecordService sut; + @Autowired + private ChangeRecordDao dao; + private User author; private Vocabulary asset; @@ -84,7 +88,9 @@ private List generateChanges() { r.setTimestamp(Instant.ofEpochMilli(System.currentTimeMillis() - i * 10000L)); return r; }).collect(Collectors.toList()); - transactional(() -> records.forEach(em::persist)); + transactional(() -> { + records.forEach(r -> dao.persist(r, asset)); + }); return records; } } diff --git a/src/test/java/cz/cvut/kbss/termit/service/security/authorization/TermOccurrenceAuthorizationServiceTest.java b/src/test/java/cz/cvut/kbss/termit/service/security/authorization/TermOccurrenceAuthorizationServiceTest.java new file mode 100644 index 000000000..360f73ff2 --- /dev/null +++ b/src/test/java/cz/cvut/kbss/termit/service/security/authorization/TermOccurrenceAuthorizationServiceTest.java @@ -0,0 +1,86 @@ +package cz.cvut.kbss.termit.service.security.authorization; + +import cz.cvut.kbss.termit.environment.Generator; +import cz.cvut.kbss.termit.model.Vocabulary; +import cz.cvut.kbss.termit.model.assignment.DefinitionalOccurrenceTarget; +import cz.cvut.kbss.termit.model.assignment.FileOccurrenceTarget; +import cz.cvut.kbss.termit.model.assignment.TermDefinitionalOccurrence; +import cz.cvut.kbss.termit.model.assignment.TermFileOccurrence; +import cz.cvut.kbss.termit.model.assignment.TermOccurrence; +import cz.cvut.kbss.termit.model.resource.File; +import cz.cvut.kbss.termit.persistence.dao.TermOccurrenceDao; +import cz.cvut.kbss.termit.service.repository.ResourceRepositoryService; +import cz.cvut.kbss.termit.service.repository.TermRepositoryService; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.net.URI; +import java.util.Optional; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +class TermOccurrenceAuthorizationServiceTest { + + @Mock + private TermOccurrenceDao toDao; + + @Mock + private TermRepositoryService termService; + + @Mock + private ResourceRepositoryService resourceService; + + @Mock + private VocabularyAuthorizationService vocabularyAuthorizationService; + + @Mock + private ResourceAuthorizationService resourceAuthorizationService; + + @InjectMocks + private TermOccurrenceAuthorizationService sut; + + @Test + void canModifyResolvesTermVocabularyAndChecksIfUserCanModifyItWhenTermOccurrenceIsDefinitional() { + final URI vocabularyUri = Generator.generateUri(); + final TermOccurrence to = new TermDefinitionalOccurrence(Generator.generateUri(), + new DefinitionalOccurrenceTarget( + Generator.generateTermWithId(vocabularyUri))); + to.setUri(Generator.generateUri()); + when(termService.findTermVocabulary(to.getTarget().getSource())).thenReturn(Optional.of(vocabularyUri)); + when(vocabularyAuthorizationService.canModify(new Vocabulary(vocabularyUri))).thenReturn(true); + when(toDao.find(to.getUri())).thenReturn(Optional.of(to)); + + assertTrue(sut.canModify(to.getUri())); + verify(vocabularyAuthorizationService).canModify(new Vocabulary(vocabularyUri)); + } + + @Test + void canModifyResolvesResourceVocabularyAndChecksIfUserCanModifyItWhenTermOccurrenceIsFileOccurrence() { + final URI vocabularyUri = Generator.generateUri(); + final File file = Generator.generateFileWithId("test.html"); + file.setDocument(Generator.generateDocumentWithId()); + file.getDocument().setVocabulary(vocabularyUri); + final TermOccurrence to = new TermFileOccurrence(Generator.generateUri(), new FileOccurrenceTarget(file)); + to.setUri(Generator.generateUri()); + when(resourceService.find(file.getUri())).thenReturn(Optional.of(file)); + when(resourceAuthorizationService.canModify(file)).thenReturn(true); + when(toDao.find(to.getUri())).thenReturn(Optional.of(to)); + + assertTrue(sut.canModify(to.getUri())); + verify(resourceAuthorizationService).canModify(file); + } + + @Test + void canModifyReturnsFalseWhenTermOccurrenceDoesNotExist() { + when(toDao.find(any())).thenReturn(Optional.empty()); + assertFalse(sut.canModify(Generator.generateUri())); + } +} diff --git a/src/test/java/cz/cvut/kbss/termit/util/VocabularyTest.java b/src/test/java/cz/cvut/kbss/termit/util/VocabularyTest.java index a35fd6534..7c4d6aac9 100644 --- a/src/test/java/cz/cvut/kbss/termit/util/VocabularyTest.java +++ b/src/test/java/cz/cvut/kbss/termit/util/VocabularyTest.java @@ -23,7 +23,6 @@ public class VocabularyTest { @Test - // @todo until https://github.com/kbss-cvut/jopa/issues/85 is resolved public void ensureContentHasCorrectUrl() { Assert.equals("http://rdfs.org/sioc/ns#content", Vocabulary.s_p_sioc_content); } diff --git a/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java b/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java index bf8f4f4e0..b051471ab 100644 --- a/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java +++ b/src/test/java/cz/cvut/kbss/termit/util/throttle/ThrottledFutureTest.java @@ -85,8 +85,7 @@ void getNowReturnsEmptyWhenCacheIsNull() { @Test void thenActionIsExecutedSynchronouslyWhenFutureIsAlreadyDoneAndNotCanceled() { - final Object result = new Object(); - final ThrottledFuture future = ThrottledFuture.of(() -> result); + final ThrottledFuture future = ThrottledFuture.of(() -> null); final AtomicBoolean completed = new AtomicBoolean(false); final AtomicReference futureResult = new AtomicReference<>(null); future.run(null); @@ -97,25 +96,24 @@ void thenActionIsExecutedSynchronouslyWhenFutureIsAlreadyDoneAndNotCanceled() { futureResult.set(fResult); }); assertTrue(completed.get()); - assertEquals(result, futureResult.get()); + assertEquals(future, futureResult.get()); } @Test - void thenActionIsNotExecutedWhenFutureIsAlreadyCancelled() { + void thenActionIsExecutedWhenFutureIsAlreadyCancelled() { final ThrottledFuture future = ThrottledFuture.of(Object::new); final AtomicBoolean completed = new AtomicBoolean(false); future.cancel(false); assertTrue(future.isCancelled()); future.then(result -> completed.set(true)); - assertFalse(completed.get()); + assertTrue(completed.get()); } @Test void thenActionIsExecutedOnceFutureIsRun() { - final Object result = new Object(); final AtomicBoolean completed = new AtomicBoolean(false); final AtomicReference fResult = new AtomicReference<>(null); - final ThrottledFuture future = ThrottledFuture.of(() -> result); + final ThrottledFuture future = ThrottledFuture.of(() -> null); future.then(futureResult -> { completed.set(true); fResult.set(futureResult); @@ -124,18 +122,82 @@ void thenActionIsExecutedOnceFutureIsRun() { assertFalse(completed.get()); // action was not executed yet future.run(null); assertTrue(completed.get()); - assertEquals(result, fResult.get()); + assertEquals(future, fResult.get()); } @Test - void thenActionIsNotExecutedOnceFutureIsCancelled() { + void thenActionIsExecutedOnceFutureIsCancelled() { final Object result = new Object(); final AtomicBoolean completed = new AtomicBoolean(false); final ThrottledFuture future = ThrottledFuture.of(() -> result); future.then(futureResult -> completed.set(true)); assertFalse(completed.get()); // action was not executed yet future.cancel(false); + assertTrue(completed.get()); + } + + @Test + void thenActionIsExecutedOnlyOnceWhenFutureIsCancelled() { + final AtomicInteger executionCount = new AtomicInteger(0); + final ThrottledFuture future = ThrottledFuture.of(() -> null); + future.then(f -> executionCount.incrementAndGet()); + assertEquals(0, executionCount.get()); + future.cancel(false); + assertEquals(1, executionCount.get()); + future.cancel(false); + future.cancel(true); + assertEquals(1, executionCount.get()); + } + + @Test + void thenActionIsExecutedWhenFutureCompletesExceptionally() { + final AtomicBoolean completed = new AtomicBoolean(false); + final ThrottledFuture future = ThrottledFuture.of(() -> { + throw new RuntimeException(); + }); + future.then(futureResult -> completed.set(true)); assertFalse(completed.get()); + future.run(null); + assertTrue(completed.get()); + } + + @Test + void isCompletedExceptionallyReturnsTrueWhenFutureCompletesExceptionally() { + final ThrottledFuture future = ThrottledFuture.of(() -> { + throw new RuntimeException(); + }); + future.run(null); + assertTrue(future.isCompletedExceptionally()); + } + + @Test + void isCompletedExceptionallyReturnsFalseWhenFutureCompletesNormally() { + final ThrottledFuture future = ThrottledFuture.of(() -> null); + future.run(null); + assertFalse(future.isCompletedExceptionally()); + assertFalse(future.isCancelled()); + assertTrue(future.isDone()); + } + + @Test + void isCompletedExceptionallyReturnsTrueWhenFutureIsCancelled() { + final ThrottledFuture future = ThrottledFuture.of(() -> null); + future.cancel(false); + assertTrue(future.isCompletedExceptionally()); + assertTrue(future.isCancelled()); + assertTrue(future.isDone()); + } + + @Test + void thenActionIsExecutedWhenFutureIsAlreadyCompletedExceptionally() { + final AtomicBoolean completed = new AtomicBoolean(false); + final ThrottledFuture future = ThrottledFuture.of(() -> { + throw new RuntimeException(); + }); + future.run(null); + assertFalse(completed.get()); + future.then(futureResult -> completed.set(true)); + assertTrue(completed.get()); } @Test @@ -287,8 +349,8 @@ void transferUpdatesSecondFutureWithTask() { @Test void transferUpdatesSecondFutureWithCallbacks() { - final Consumer firstCallback = (result) -> {}; - final Consumer secondCallback = (result) -> {}; + final Consumer> firstCallback = (result) -> {}; + final Consumer> secondCallback = (result) -> {}; final ThrottledFuture firstFuture = ThrottledFuture.of(()->"").then(firstCallback); final ThrottledFuture secondFuture = ThrottledFuture.of(()->"").then(secondCallback); final ThrottledFuture mocked = mock(ThrottledFuture.class); @@ -311,14 +373,14 @@ void transferUpdatesSecondFutureWithCallbacks() { @Test void callbacksAreClearedAfterTransferring() { - final Consumer firstCallback = (result) -> {}; - final Consumer secondCallback = (result) -> {}; + final Consumer> firstCallback = (result) -> {}; + final Consumer> secondCallback = (result) -> {}; final ThrottledFuture future = ThrottledFuture.of(()->"").then(firstCallback).then(secondCallback); final ThrottledFuture mocked = mock(ThrottledFuture.class); future.transfer(mocked); - final ArgumentCaptor>> captor = ArgumentCaptor.forClass(List.class); + final ArgumentCaptor>>> captor = ArgumentCaptor.forClass(List.class); verify(mocked).update(notNull(), captor.capture()); // captor takes the original list from the future @@ -374,8 +436,8 @@ void updateSetsTask() { @Test void updateAddsCallbacksToTheCurrentOnes() { - final Consumer callback = result -> {}; - final Consumer originalCallback = result -> {}; + final Consumer> callback = result -> {}; + final Consumer> originalCallback = result -> {}; final ThrottledFuture future = ThrottledFuture.of(() -> "").then(originalCallback); future.update(()->"", List.of(callback)); diff --git a/src/test/resources/application.yml b/src/test/resources/application.yml index 56f473bd6..258bfa622 100644 --- a/src/test/resources/application.yml +++ b/src/test/resources/application.yml @@ -1,3 +1,4 @@ +application.version: DEV spring: servlet: multipart: @@ -29,7 +30,8 @@ termit: file: storage: /tmp/termit textAnalysis: - url: http://localhost/annotace + url: http://localhost/annotace/annotate + languagesUrl: http://localhost/annotace/languages termOccurrenceMinScore: 0.49 comments: context: http://onto.fel.cvut.cz/ontologies/komentare diff --git a/src/test/resources/data/import-with-identifiers-en-cs.xlsx b/src/test/resources/data/import-with-identifiers-en-cs.xlsx new file mode 100644 index 000000000..c5309e834 Binary files /dev/null and b/src/test/resources/data/import-with-identifiers-en-cs.xlsx differ diff --git a/src/test/resources/ontologies/popis-dat-model.ttl b/src/test/resources/ontologies/popis-dat-model.ttl index 180b9a5a1..04152b693 100644 --- a/src/test/resources/ontologies/popis-dat-model.ttl +++ b/src/test/resources/ontologies/popis-dat-model.ttl @@ -536,3 +536,7 @@ a-popis-dat-pojem:má-původní-hodnotu a ; rdfs:domain a-popis-dat-pojem:úprava-entity ; rdfs:subPropertyOf . + +a-popis-dat-pojem:smazání-entity + a , owl:Class ; + rdfs:subClassOf a-popis-dat-pojem:změna .