Skip to content

Commit

Permalink
SAK-50880 Improve site import performence using item level caching
Browse files Browse the repository at this point in the history
  • Loading branch information
stetsche committed Jan 16, 2025
1 parent ad7823d commit b9ccb3e
Show file tree
Hide file tree
Showing 5 changed files with 147 additions and 56 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
Expand All @@ -35,9 +36,15 @@
import java.util.Set;
import java.util.stream.Collectors;

import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Join;
import javax.persistence.criteria.Root;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.collections4.CollectionUtils;
import org.hibernate.Hibernate;
import org.hibernate.Session;
import org.hibernate.query.Query;
import org.sakaiproject.component.cover.ServerConfigurationService;
import org.sakaiproject.content.api.ContentResource;
Expand Down Expand Up @@ -2500,5 +2507,26 @@ public void restoreAssessment(Long assessmentId) {
}
}
}

public Set<String> getDuplicateItemHashesForAssessmentIds(Collection<Long> assessmentIds) {
if (assessmentIds.isEmpty()) {
return Collections.emptySet();
}

Session session = currentSession();
CriteriaBuilder cb = session.getCriteriaBuilder();
CriteriaQuery<String> cq = cb.createQuery(String.class);
Root<ItemData> root = cq.from(ItemData.class);
Join<ItemData, SectionData> sectionJoin = root.join("section");
Join<SectionData, AssessmentData> assessmentJoin = sectionJoin.join("assessment");

cq.select(root.get("hash"))
.where(assessmentJoin.get("assessmentBaseId").in(assessmentIds))
.groupBy(root.get("hash"))
// Item count with same hash must be greater then one
.having(cb.gt(cb.count(root), 1));

return session.createQuery(cq).getResultStream().collect(Collectors.toSet());
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

package org.sakaiproject.tool.assessment.facade;

import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.List;
Expand Down Expand Up @@ -248,4 +249,6 @@ public Set prepareItemAttachmentSet(ItemData newItem,
public List<AssessmentData> getDeletedAssessments(String siteId);

public void restoreAssessment(Long assessmentId);

public Set<String> getDuplicateItemHashesForAssessmentIds(Collection<Long> assessmentIds);
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,12 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.Set;
import java.util.Stack;
import java.util.TreeSet;
Expand All @@ -49,6 +53,8 @@
import org.sakaiproject.component.api.ServerConfigurationService;
import org.sakaiproject.content.api.ContentHostingService;
import org.sakaiproject.content.api.ContentResource;
import org.apache.commons.lang3.StringUtils;
import org.sakaiproject.component.cover.ComponentManager;
import org.sakaiproject.entity.api.Entity;
import org.sakaiproject.entity.api.EntityProducer;
import org.sakaiproject.entity.api.EntityTransferrer;
Expand All @@ -66,16 +72,19 @@
import org.sakaiproject.tool.assessment.data.dao.assessment.AssessmentData;
import org.sakaiproject.tool.assessment.data.dao.assessment.ItemData;
import org.sakaiproject.tool.assessment.data.dao.assessment.ItemText;
import org.sakaiproject.tool.assessment.data.ifc.assessment.AnswerIfc;
import org.sakaiproject.tool.assessment.data.ifc.assessment.AssessmentIfc;
import org.sakaiproject.tool.assessment.data.ifc.assessment.AssessmentMetaDataIfc;
import org.sakaiproject.tool.assessment.data.ifc.questionpool.QuestionPoolDataIfc;
import org.sakaiproject.tool.assessment.data.dao.assessment.*;
import org.sakaiproject.tool.assessment.data.dao.questionpool.QuestionPoolItemData;
import org.sakaiproject.tool.assessment.data.ifc.assessment.ItemTextIfc;
import org.sakaiproject.tool.assessment.facade.AssessmentFacade;
import org.sakaiproject.tool.assessment.facade.PublishedAssessmentFacade;
import org.sakaiproject.tool.assessment.facade.PublishedAssessmentFacadeQueriesAPI;
import org.sakaiproject.tool.assessment.facade.SectionFacade;
import org.sakaiproject.tool.assessment.shared.api.questionpool.QuestionPoolServiceAPI;
import org.sakaiproject.util.api.LinkMigrationHelper;
import org.sakaiproject.tool.assessment.shared.api.qti.QTIServiceAPI;
import org.sakaiproject.user.api.UserDirectoryService;
import org.sakaiproject.user.api.UserNotDefinedException;
Expand Down Expand Up @@ -104,6 +113,8 @@ public class AssessmentEntityProducer implements EntityTransferrer, EntityProduc
@Getter @Setter protected UserDirectoryService userDirectoryService;
@Getter @Setter protected PublishedAssessmentFacadeQueriesAPI publishedAssessmentFacadeQueries;

private final LinkMigrationHelper linkMigrationHelper = ComponentManager.get(LinkMigrationHelper.class);

public void init() {
log.info("init()");
try {
Expand Down Expand Up @@ -396,14 +407,25 @@ public void updateEntityReferences(String toContext, Map<String, String> transve

AssessmentService service = new AssessmentService();

List<AssessmentData> assessmentList = service.getAllActiveAssessmentsbyAgent(toContext);
List<AssessmentData> assessmentList = service.getAllActiveAssessmentsbyAgent(toContext);

Set<Long> assessmentIds = assessmentList.stream()
.map(AssessmentData::getAssessmentBaseId)
.filter(Objects::nonNull)
.collect(Collectors.toSet());

Set<String> duplicateHashes = service.getDuplicateItemHashesForAssessmentIds(assessmentIds);

Map<String, Boolean> needToUpdateCache = new HashMap<>();
Map<String, String> itemContentCache = new HashMap<>();

for (AssessmentData assessment : assessmentList) {
//get initialized assessment
AssessmentFacade assessmentFacade = (AssessmentFacade) service.getAssessment(assessment.getAssessmentId());
boolean needToUpdate = false;

String assessmentDesc = assessmentFacade.getDescription();
if(assessmentDesc != null){
if(StringUtils.isNotBlank(assessmentDesc)){
assessmentDesc = org.sakaiproject.util.cover.LinkMigrationHelper.migrateAllLinks(entrySet, assessmentDesc);
if(!assessmentDesc.equals(assessmentFacade.getDescription())){
//need to save since a ref has been updated:
Expand All @@ -416,7 +438,7 @@ public void updateEntityReferences(String toContext, Map<String, String> transve
for(int i = 0; i < sectionList.size(); i++){
SectionFacade section = (SectionFacade) sectionList.get(i);
String sectionDesc = section.getDescription();
if(sectionDesc != null){
if(StringUtils.isNotBlank(sectionDesc)){
sectionDesc = org.sakaiproject.util.cover.LinkMigrationHelper.migrateAllLinks(entrySet, sectionDesc);
if(!sectionDesc.equals(section.getDescription())){
//need to save since a ref has been updated:
Expand All @@ -425,68 +447,51 @@ public void updateEntityReferences(String toContext, Map<String, String> transve
}
}

List itemList = section.getItemArray();
for(int j = 0; j < itemList.size(); j++){
ItemData item = (ItemData) itemList.get(j);
String itemIntr = item.getInstruction();
if(itemIntr != null){
itemIntr = org.sakaiproject.util.cover.LinkMigrationHelper.migrateAllLinks(entrySet, itemIntr);
if(!itemIntr.equals(item.getInstruction())){
//need to save since a ref has been updated:
needToUpdate = true;
item.setInstruction(itemIntr);
}
}
List<ItemData> itemList = section.getItemArray();
for (ItemData item : itemList) {
String itemHash = item.getHash();
boolean hasDuplicates = StringUtils.isNotEmpty(itemHash) && duplicateHashes.contains(itemHash);
boolean hasCaches = hasDuplicates && needToUpdateCache.containsKey(itemHash);

String itemDesc = item.getDescription();
if(itemDesc != null){
itemDesc = org.sakaiproject.util.cover.LinkMigrationHelper.migrateAllLinks(entrySet, itemDesc);
if(!itemDesc.equals(item.getDescription())){
//need to save since a ref has been updated:
needToUpdate = true;
item.setDescription(itemDesc);
}
// If no update is required so far and we cached that an item does not need an update, we can skip the item
if (hasCaches && !needToUpdateCache.get(itemHash)) {
continue;
}

List itemTextList = item.getItemTextArray();
if(itemTextList != null){
for(int k = 0; k < itemTextList.size(); k++){
ItemText itemText = (ItemText) itemTextList.get(k);
String text = itemText.getText();
if(text != null){
// Transfer all of the attachments to the new site
text = service.copyContentHostingAttachments(text, toContext);

text = org.sakaiproject.util.cover.LinkMigrationHelper.migrateAllLinks(entrySet, text);
if(!text.equals(itemText.getText())){
//need to save since a ref has been updated:
needToUpdate = true;
itemText.setText(text);
}
}
boolean instructionChanged = migrateText(service, toContext, item, itemHash, hasCaches, hasDuplicates, false,
"inst", itemContentCache, entrySet, ItemData::getInstruction, ItemData::setInstruction);

List answerSetList = itemText.getAnswerArray();
if (answerSetList != null) {
for (int l = 0; l < answerSetList.size(); l++) {
Answer answer = (Answer) answerSetList.get(l);
String answerText = answer.getText();
boolean descriptionChanged = migrateText(service, toContext, item, itemHash, hasCaches, hasDuplicates, false,
"desc", itemContentCache, entrySet, ItemData::getDescription, ItemData::setDescription);

if (answerText != null) {
// Transfer all of the attachments embedded in the answer text
answerText = service.copyContentHostingAttachments(answerText, toContext);
boolean itemTextsChanged = false;
List<ItemTextIfc> itemTexts = item.getItemTextArray();
if (itemTexts != null) {
for (ItemTextIfc itemText : itemTexts) {
boolean itemTextChanged = migrateText(service, toContext, itemText, itemHash, hasCaches, hasDuplicates, true,
"it-" + itemText.getSequence(), itemContentCache, entrySet, ItemTextIfc::getText, ItemTextIfc::setText);

// Now rewrite the answerText with links to the new site
answerText = org.sakaiproject.util.cover.LinkMigrationHelper.migrateAllLinks(entrySet, answerText);
boolean answersChanged = false;
List<AnswerIfc> answers = itemText.getAnswerArray();
if (answers != null) {
for (AnswerIfc answer : answers) {
boolean answerChanged = migrateText(service, toContext, answer, itemHash, hasCaches, hasDuplicates, true,
"at-" + itemText.getSequence() + "-"+ answer.getSequence() , itemContentCache, entrySet, AnswerIfc::getText, AnswerIfc::setText);

if (!answerText.equals(answer.getText())) {
needToUpdate = true;
answer.setText(answerText);
}
}
answersChanged = answersChanged || answerChanged;
}
}

itemTextsChanged = itemTextsChanged || itemTextChanged || answersChanged;
}
}

needToUpdate = needToUpdate
|| instructionChanged
|| descriptionChanged
|| itemTextsChanged;

needToUpdateCache.put(itemHash, needToUpdate);
}
}

Expand Down Expand Up @@ -773,4 +778,41 @@ private List<String> getAttachmentResourceIds(NodeList list) {
}
return result;
}

private <T> boolean migrateText(AssessmentService assessmentService, String toContext, T item, String itemHash,
boolean hasCaches,boolean hasDuplicates, boolean copyAttachments, String cacheCode, Map<String, String> textCache,
Set<Entry<String, String>> entrySet, Function<T, String> getter, BiConsumer<T, String> setter) {

String cacheKey = itemHash + "-" + cacheCode;

if (hasCaches && textCache.containsKey(cacheKey)) {
// Item instruction has been cashed, lets get it form the cache
setter.accept(item, textCache.get(cacheKey));
return true;
} else {
// Item instruction has not been cached, lets try migrating
String itemText = StringUtils.trimToEmpty(getter.apply(item));
String migratedText;
if (copyAttachments) {
migratedText = assessmentService.copyContentHostingAttachments(itemText, toContext);
} else {
migratedText = itemText;
}

migratedText = linkMigrationHelper.migrateAllLinks(entrySet, migratedText);

// Check if there has been a change
if (!StringUtils.equals(itemText, migratedText)) {
setter.accept(item, migratedText);

if (hasDuplicates) {
textCache.put(cacheKey, migratedText);
}

return true;
}
}

return false;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
Expand All @@ -34,6 +35,7 @@
import java.util.TreeSet;
import java.util.Set;

import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;

Expand Down Expand Up @@ -1198,7 +1200,11 @@ public String copyContentHostingAttachments(String text, String toContext) {
Set<String> attachments = new HashSet<String>();
for (String source : sources) {
String theHref = StringUtils.substringBefore(source, "\"");
if (StringUtils.contains(theHref, "/access/content/")) {

if (!StringUtils.startsWith(theHref, "data:")
&& StringUtils.contains(theHref, "/access/content/")
// Skip attachments associated with user
&& !StringUtils.contains(theHref, "/access/content/user/")) {
attachments.add(theHref);
}
}
Expand Down Expand Up @@ -1584,4 +1590,16 @@ public static String renameDuplicate(String title) {

return rename;
}

public Set<String> getDuplicateItemHashesByAssessmentId(@NonNull Long assessmentId) {
return getDuplicateItemHashesForAssessmentIds(Collections.singleton(assessmentId));
}

public Set<String> getDuplicateItemHashesForAssessmentIds(@NonNull Collection<Long> assessmentIds) {
// Eliminate duplicates
Set<Long> assessmentIdSet = Set.copyOf(assessmentIds);

return PersistenceService.getInstance().getAssessmentFacadeQueries()
.getDuplicateItemHashesForAssessmentIds(assessmentIdSet);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -675,7 +675,7 @@ protected void doInTransactionWithoutResult(TransactionStatus status) {
});
}
} catch (Exception e) {
log.error("Error encountered while transferring data for producer: [{}] from: [{}] to: [{}], {}", ep.getLabel(), fromContext, toContext, e.toString());
log.error("Error encountered while transferring data for producer: [{}] from: [{}] to: [{}]", ep.getLabel(), fromContext, toContext, e);
}
}
}
Expand Down

0 comments on commit b9ccb3e

Please sign in to comment.