diff --git a/app/jobs/create_new_parent_job.rb b/app/jobs/create_new_parent_job.rb index 1e3bada6d..9eb395a8a 100644 --- a/app/jobs/create_new_parent_job.rb +++ b/app/jobs/create_new_parent_job.rb @@ -7,8 +7,7 @@ def default_priority -50 end - def perform(batch_process, start_index = 0) - index = batch_process.create_new_parent_csv(start_index) - CreateNewParentJob.perform_later(batch_process, index) if !index.nil? && index != -1 && index > BatchProcess::BATCH_LIMIT + def perform(batch_process) + batch_process.create_new_parent_csv end end diff --git a/app/jobs/delete_parent_objects_job.rb b/app/jobs/delete_parent_objects_job.rb index 12ebb2a17..4fe556328 100644 --- a/app/jobs/delete_parent_objects_job.rb +++ b/app/jobs/delete_parent_objects_job.rb @@ -7,8 +7,7 @@ def default_priority -50 end - def perform(batch_process, start_index = 0) - index = batch_process.delete_parent_objects(start_index) - DeleteParentObjectsJob.perform_later(batch_process, index) if !index.nil? && index != -1 && index > BatchProcess::BATCH_LIMIT + def perform(batch_process) + batch_process.delete_parent_objects end end diff --git a/app/jobs/reassociate_child_oids_job.rb b/app/jobs/reassociate_child_oids_job.rb index 17ada4214..aeac15616 100644 --- a/app/jobs/reassociate_child_oids_job.rb +++ b/app/jobs/reassociate_child_oids_job.rb @@ -7,9 +7,8 @@ def default_priority 50 end - def perform(batch_process, start_index = 0) - index = batch_process.reassociate_child_oids(start_index) - ReassociateChildOidsJob.perform_later(batch_process, index) if !index.nil? && index != -1 && index > BatchProcess::BATCH_LIMIT + def perform(batch_process) + batch_process.reassociate_child_oids rescue => e batch_process.batch_processing_event("ReassociateChildOidsJob failed due to #{e.message}", "failed") end diff --git a/app/jobs/recreate_child_oid_ptiffs_job.rb b/app/jobs/recreate_child_oid_ptiffs_job.rb index f779e2da5..c786d5f85 100644 --- a/app/jobs/recreate_child_oid_ptiffs_job.rb +++ b/app/jobs/recreate_child_oid_ptiffs_job.rb @@ -7,8 +7,7 @@ def default_priority 9 end - def perform(batch_process, start_index = 0) - index = batch_process.recreate_child_oid_ptiffs(start_index) - RecreateChildOidPtiffsJob.perform_later(batch_process, index) if !index.nil? && index != -1 && index > BatchProcess::BATCH_LIMIT + def perform(batch_process) + batch_process.recreate_child_oid_ptiffs end end diff --git a/app/jobs/update_parent_objects_job.rb b/app/jobs/update_parent_objects_job.rb index 2e36d7f1a..8c87a093f 100644 --- a/app/jobs/update_parent_objects_job.rb +++ b/app/jobs/update_parent_objects_job.rb @@ -9,8 +9,7 @@ def default_priority 50 end - def perform(batch_process, start_index = 0) - index = batch_process.update_parent_objects(start_index) - UpdateParentObjectsJob.perform_later(batch_process, index) if !index.nil? && index != -1 && index > BatchProcess::BATCH_LIMIT + def perform(batch_process) + batch_process.update_parent_objects end end diff --git a/app/models/batch_process.rb b/app/models/batch_process.rb index b3f8990be..6f832a254 100644 --- a/app/models/batch_process.rb +++ b/app/models/batch_process.rb @@ -32,7 +32,6 @@ class BatchProcess < ApplicationRecord # rubocop:disable Metrics/ClassLength has_many :child_objects, through: :batch_connections, source_type: "ChildObject", source: :connectable CSV_MAXIMUM_ENTRIES = 10_000 - BATCH_LIMIT = 50 # SHARED BY ALL BATCH ACTIONS: ------------------------------------------------------------------- # diff --git a/app/models/concerns/create_parent_object.rb b/app/models/concerns/create_parent_object.rb index b9989095a..e975cacd8 100644 --- a/app/models/concerns/create_parent_object.rb +++ b/app/models/concerns/create_parent_object.rb @@ -11,11 +11,10 @@ module CreateParentObject # rubocop:disable Metrics/CyclomaticComplexity # rubocop:disable Metrics/BlockLength # rubocop:disable Layout/LineLength - def create_new_parent_csv(start_index = 0) + def create_new_parent_csv self.admin_set = '' sets = admin_set parsed_csv.each_with_index do |row, index| - next if start_index > index if row['digital_object_source'].present? && row['preservica_uri'].present? && !row['preservica_uri'].blank? begin parent_object = CsvRowParentService.new(row, index, current_ability, user).parent_object @@ -117,9 +116,7 @@ def create_new_parent_csv(start_index = 0) rescue StandardError => e batch_processing_event("Skipping row [#{index + 2}] Unable to save parent: #{e.message}.", "Skipped Row") end - return index + 1 if index + 1 - start_index > BatchProcess::BATCH_LIMIT end - -1 end # rubocop:enable Metrics/AbcSize # rubocop:enable Metrics/MethodLength diff --git a/app/models/concerns/deletable.rb b/app/models/concerns/deletable.rb index d1a5ba0eb..abc5576c9 100644 --- a/app/models/concerns/deletable.rb +++ b/app/models/concerns/deletable.rb @@ -6,12 +6,10 @@ module Deletable # DELETE PARENT OBJECTS: ------------------------------------------------------------------------ # # DELETES PARENT OBJECTS FROM INGESTED CSV - # rubocop:disable Metrics/MethodLength - def delete_parent_objects(start_index = 0) + def delete_parent_objects self.admin_set = '' sets = admin_set parsed_csv.each_with_index do |row, index| - next if start_index > index oid = row['oid'] action = row['action'] metadata_source = row['source'] @@ -24,11 +22,8 @@ def delete_parent_objects(start_index = 0) setup_for_background_jobs(parent_object, metadata_source) parent_object.destroy! parent_object.processing_event("Parent #{parent_object.oid} has been deleted", 'deleted') - return index + 1 if index + 1 - start_index > BatchProcess::BATCH_LIMIT end - -1 end - # rubocop:enable Metrics/MethodLength # CHECKS TO SEE IF USER HAS ABILITY TO DELETE OBJECTS: def deletable_parent_object(oid, index) diff --git a/app/models/concerns/reassociatable.rb b/app/models/concerns/reassociatable.rb index 961fbad7e..2760946d3 100644 --- a/app/models/concerns/reassociatable.rb +++ b/app/models/concerns/reassociatable.rb @@ -7,19 +7,16 @@ module Reassociatable BLANK_VALUE = "_blank_" # triggers the reassociate process - def reassociate_child_oids(start_index = 0) + def reassociate_child_oids return unless batch_action == "reassociate child oids" - parents_needing_update, parent_destination_map, index = update_child_objects(start_index) + parents_needing_update, parent_destination_map = update_child_objects update_related_parent_objects(parents_needing_update, parent_destination_map) - index end # finds which parents are needed to update # rubocop:disable Metrics/AbcSize # rubocop:disable Metrics/MethodLength - # rubocop:disable Metrics/CyclomaticComplexity - # rubocop:disable Metrics/PerceivedComplexity - def update_child_objects(start_index) + def update_child_objects self.admin_set = '' sets = admin_set return unless batch_action == "reassociate child oids" @@ -28,7 +25,6 @@ def update_child_objects(start_index) parent_destination_map = {} parsed_csv.each_with_index do |row, index| - next if start_index > index co = load_child(index, row["child_oid"].to_i) po = load_parent(index, row["parent_oid"].to_i) next unless co.present? && po.present? @@ -49,14 +45,11 @@ def update_child_objects(start_index) values_to_update = check_headers(child_headers, row) update_child_values(values_to_update, co, row, index) - return [parents_needing_update, parent_destination_map, index + 1] if index + 1 - start_index > BatchProcess::BATCH_LIMIT end - [parents_needing_update, parent_destination_map, -1] + [parents_needing_update, parent_destination_map] end # rubocop:enable Metrics/AbcSize # rubocop:enable Metrics/MethodLength - # rubocop:enable Metrics/CyclomaticComplexity - # rubocop:enable Metrics/PerceivedComplexity # verifies headers are included. child headers found in csv_exportable:90 def check_headers(headers, row) diff --git a/app/models/concerns/recreate_child_ptiff.rb b/app/models/concerns/recreate_child_ptiff.rb index 10f8acdc9..d9a06ca41 100644 --- a/app/models/concerns/recreate_child_ptiff.rb +++ b/app/models/concerns/recreate_child_ptiff.rb @@ -11,12 +11,11 @@ module RecreateChildPtiff # rubocop:disable Metrics/MethodLength # rubocop:disable Metrics/CyclomaticComplexity # rubocop:disable Metrics/PerceivedComplexity - def recreate_child_oid_ptiffs(start_index = 0) + def recreate_child_oid_ptiffs parents = Set[] self.admin_set = '' sets = admin_set oids.each_with_index do |oid, index| - next if start_index > index child_object = ChildObject.find_by_oid(oid.to_i) unless child_object batch_processing_event("Skipping row [#{index + 2}] with unknown Child: #{oid}", 'Skipped Row') @@ -36,9 +35,7 @@ def recreate_child_oid_ptiffs(start_index = 0) GeneratePtiffJob.perform_later(child_object, self) if file_size <= SetupMetadataJob::FIVE_HUNDRED_MB attach_item(child_object) child_object.processing_event("Ptiff Queued", "ptiff-queued") - return index + 1 if index + 1 - start_index > BatchProcess::BATCH_LIMIT end - -1 end # rubocop:enable Metrics/AbcSize # rubocop:enable Metrics/MethodLength diff --git a/app/models/concerns/updatable.rb b/app/models/concerns/updatable.rb index 74dc0ed0e..a042f0d0b 100644 --- a/app/models/concerns/updatable.rb +++ b/app/models/concerns/updatable.rb @@ -64,12 +64,11 @@ def update_child_objects_caption end # rubocop:disable Metrics/BlockLength - def update_parent_objects(start_index = 0) + def update_parent_objects self.admin_set = '' sets = admin_set - return unless batch_action == 'update parent objects' + return unless batch_action == "update parent objects" parsed_csv.each_with_index do |row, index| - next if start_index > index oid = row['oid'] unless ['oid'].nil? redirect = row['redirect_to'] unless ['redirect_to'].nil? parent_object = updatable_parent_object(oid, index) @@ -88,10 +87,10 @@ def update_parent_objects(start_index = 0) setup_for_background_jobs(parent_object, metadata_source) parent_object.admin_set = admin_set unless admin_set.nil? - if row['visibility'] == 'Open with Permission' && row['permission_set_key'].blank? + if row['visibility'] == "Open with Permission" && row['permission_set_key'].blank? batch_processing_event("Skipping row [#{index + 2}]. Process failed. Permission Set missing from CSV.", 'Skipped Row') next - elsif row['visibility'] == 'Open with Permission' && row['permission_set_key'] != parent_object&.permission_set&.key + elsif row['visibility'] == "Open with Permission" && row['permission_set_key'] != parent_object&.permission_set&.key permission_set = OpenWithPermission::PermissionSet.find_by(key: row['permission_set_key']) if permission_set.nil? batch_processing_event("Skipping row [#{index + 2}]. Process failed. Permission Set missing or nonexistent.", 'Skipped Row') @@ -114,9 +113,7 @@ def update_parent_objects(start_index = 0) sync_from_preservica if parent_object.digital_object_source == 'Preservica' processing_event_for_parent(parent_object) - return index + 1 if index + 1 - start_index > BatchProcess::BATCH_LIMIT end - -1 end # rubocop:enable Metrics/CyclomaticComplexity # rubocop:enable Metrics/PerceivedComplexity diff --git a/spec/fixtures/csv/create_many_parent_fixture_ids.csv b/spec/fixtures/csv/create_many_parent_fixture_ids.csv deleted file mode 100644 index b8a6937a8..000000000 --- a/spec/fixtures/csv/create_many_parent_fixture_ids.csv +++ /dev/null @@ -1,5 +0,0 @@ -oid,source,admin_set -2005512,ladybird,brbl -2005513,ladybird,brbl -2005514,ladybird,brbl -2005515,ladybird,brbl diff --git a/spec/fixtures/csv/delete_many_parent_fixture_ids.csv b/spec/fixtures/csv/delete_many_parent_fixture_ids.csv deleted file mode 100644 index eab9d0a35..000000000 --- a/spec/fixtures/csv/delete_many_parent_fixture_ids.csv +++ /dev/null @@ -1,5 +0,0 @@ -oid,action,source,admin_set -2005512,delete,ladybird,brbl -2005513,delete,ladybird,brbl -2005514,delete,ladybird,brbl -2005515,delete,ladybird,brbl \ No newline at end of file diff --git a/spec/fixtures/csv/reassociate_many_child_objects.csv b/spec/fixtures/csv/reassociate_many_child_objects.csv deleted file mode 100644 index b9fb31ee3..000000000 --- a/spec/fixtures/csv/reassociate_many_child_objects.csv +++ /dev/null @@ -1,5 +0,0 @@ -child_oid,parent_oid,order,parent_title,label,caption,viewing_hint -1030368,2005515,,,,, -1032318,2005514,,,,, -1030368,2002826,,,,, -1032318,2002826,,,,, diff --git a/spec/jobs/create_new_parent_job_spec.rb b/spec/jobs/create_new_parent_job_spec.rb deleted file mode 100644 index 6a948ea4d..000000000 --- a/spec/jobs/create_new_parent_job_spec.rb +++ /dev/null @@ -1,44 +0,0 @@ -# frozen_string_literal: true - -require 'rails_helper' - -RSpec.describe CreateNewParentJob, type: :job, prep_metadata_sources: true, prep_admin_sets: true do - let(:admin_set) { AdminSet.find_by(key: 'brbl') } - let(:user) { FactoryBot.create(:user) } - let(:create_many) { Rack::Test::UploadedFile.new(Rails.root.join(fixture_path, "csv", "create_many_parent_fixture_ids.csv")) } - let(:create_batch_process) { FactoryBot.create(:batch_process, user: user, file: create_many) } - let(:bare_create_batch_process) { FactoryBot.create(:batch_process, user: user) } - let(:total_parent_object_count) { 4 } - - before do - allow(GoodJob).to receive(:preserve_job_records).and_return(true) - ActiveJob::Base.queue_adapter = GoodJob::Adapter.new(execution_mode: :inline) - end - - it 'increments the job queue by one' do - create_parent_job = described_class.perform_later(bare_create_batch_process) - expect(create_parent_job.instance_variable_get(:@successfully_enqueued)).to eq true - end - - context 'with more than limit of batch objects' do - before do - BatchProcess::BATCH_LIMIT = 2 - expect(ParentObject.all.count).to eq 0 - user.add_role(:editor, admin_set) - login_as(:user) - expect(described_class).to receive(:perform_later).exactly(2).times.and_call_original - end - - around do |example| - perform_enqueued_jobs do - example.run - end - end - - it 'goes through all parents in batches once' do - create_batch_process.save - expect(ParentObject.all.count).to eq total_parent_object_count - expect(IngestEvent.where(reason: 'Processing has been queued').count).to eq total_parent_object_count - end - end -end diff --git a/spec/jobs/delete_parent_job_spec.rb b/spec/jobs/delete_parent_job_spec.rb deleted file mode 100644 index d2cf8dee0..000000000 --- a/spec/jobs/delete_parent_job_spec.rb +++ /dev/null @@ -1,54 +0,0 @@ -# frozen_string_literal: true - -require 'rails_helper' - -RSpec.describe DeleteParentObjectsJob, type: :job, prep_metadata_sources: true, prep_admin_sets: true do - let(:admin_set) { AdminSet.find_by(key: 'brbl') } - let(:user) { FactoryBot.create(:user) } - let(:create_many) { Rack::Test::UploadedFile.new(Rails.root.join(fixture_path, "csv", "create_many_parent_fixture_ids.csv")) } - let(:delete_many) { Rack::Test::UploadedFile.new(Rails.root.join(fixture_path, "csv", "delete_many_parent_fixture_ids.csv")) } - let(:create_batch_process) { FactoryBot.create(:batch_process, user: user, file: create_many) } - let(:delete_batch_process) { FactoryBot.create(:batch_process, user: user, file: delete_many, batch_action: 'delete parent objects') } - - before do - allow(GoodJob).to receive(:preserve_job_records).and_return(true) - ActiveJob::Base.queue_adapter = GoodJob::Adapter.new(execution_mode: :inline) - end - - context 'with tests active job queue' do - it 'increments the job queue by one' do - delete_parent_job = described_class.perform_later - expect(delete_parent_job.instance_variable_get(:@successfully_enqueued)).to be true - end - end - - context 'with more than limit parent objects' do - before do - BatchProcess::BATCH_LIMIT = 2 - expect(ParentObject.all.count).to eq 0 - user.add_role(:editor, admin_set) - login_as(:user) - create_batch_process.save - total_parent_object_count = 4 - expect(ParentObject.all.count).to eq total_parent_object_count - expect(described_class).to receive(:perform_later).exactly(2).times.and_call_original - end - - around do |example| - perform_enqueued_jobs do - example.run - end - end - - it 'goes through all parents in batches once' do - delete_batch_process.save - expect(IngestEvent.where(status: 'deleted').and(IngestEvent.where(reason: 'Parent 2005512 has been deleted')).count).to eq 1 - expect(IngestEvent.where(status: 'Skipped Row').and(IngestEvent.where(reason: 'Skipping row [2] with parent oid: 2005512 because it was not found in local database')).count).to eq 0 - expect(IngestEvent.where(status: 'deleted').and(IngestEvent.where(reason: 'Parent 2005513 has been deleted')).count).to eq 1 - expect(IngestEvent.where(status: 'Skipped Row').and(IngestEvent.where(reason: 'Skipping row [3] with parent oid: 2005513 because it was not found in local database')).count).to eq 0 - expect(IngestEvent.where(status: 'deleted').and(IngestEvent.where(reason: 'Parent 2005514 has been deleted')).count).to eq 1 - expect(IngestEvent.where(status: 'deleted').and(IngestEvent.where(reason: 'Parent 2005515 has been deleted')).count).to eq 1 - expect(ParentObject.all.count).to eq 0 - end - end -end diff --git a/spec/jobs/reassociate_child_oids_job_spec.rb b/spec/jobs/reassociate_child_oids_job_spec.rb index 9152b46cd..5670b45b7 100644 --- a/spec/jobs/reassociate_child_oids_job_spec.rb +++ b/spec/jobs/reassociate_child_oids_job_spec.rb @@ -3,14 +3,6 @@ require 'rails_helper' RSpec.describe ReassociateChildOidsJob, type: :job, prep_admin_sets: true, prep_metadata_sources: true do - let(:admin_set) { AdminSet.find_by(key: 'brbl') } - let(:user) { FactoryBot.create(:user) } - let(:create_many) { Rack::Test::UploadedFile.new(Rails.root.join(fixture_path, "csv", "create_many_parent_fixture_ids.csv")) } - let(:reassociate_many) { Rack::Test::UploadedFile.new(Rails.root.join(fixture_path, "csv", "reassociate_many_child_objects.csv")) } - let(:create_batch_process) { FactoryBot.create(:batch_process, user: user, file: create_many) } - let(:reassociate_batch_process) { FactoryBot.create(:batch_process, user: user, file: reassociate_many, batch_action: 'reassociate child oids') } - let(:parent_object_old_two) { FactoryBot.create(:parent_object, oid: 2_002_826) } - before do allow(GoodJob).to receive(:preserve_job_records).and_return(true) ActiveJob::Base.queue_adapter = GoodJob::Adapter.new(execution_mode: :inline) @@ -35,60 +27,4 @@ expect(IngestEvent.last.status).to eq "failed" end end - - context 'with more than limit of batch objects' do - before do - BatchProcess::BATCH_LIMIT = 2 - expect(ParentObject.all.count).to eq 0 - stub_metadata_cloud("2005512", "ladybird") - stub_metadata_cloud("2002826", "ladybird") - stub_ptiffs_and_manifests - user.add_role(:editor, admin_set) - login_as(:user) - create_batch_process.save - parent_object_old_two - total_parent_object_count = 5 - total_child_object_count = 3 - expect(ParentObject.all.count).to eq total_parent_object_count - expect(ChildObject.all.count).to eq total_child_object_count - po_one = ParentObject.find(2_005_512) - po_five = ParentObject.find(2_002_826) - expect(po_one.child_object_count).to eq 2 - expect(po_five.child_object_count).to eq 1 - expect(described_class).to receive(:perform_later).exactly(2).times.and_call_original - end - - around do |example| - perform_enqueued_jobs do - original_image_bucket = ENV['ACCESS_MASTER_MOUNT'] - ENV['ACCESS_MASTER_MOUNT'] = File.join('spec', 'fixtures', 'images', 'ptiff_images') - example.run - ENV['ACCESS_MASTER_MOUNT'] = original_image_bucket - end - end - - it 'goes through all parents in batches once' do - reassociate_batch_process.save - po_one = ParentObject.find(2_005_512) - po_two = ParentObject.find(2_005_513) - po_three = ParentObject.find(2_005_514) - po_four = ParentObject.find(2_005_515) - po_five = ParentObject.find(2_002_826) - co_one = ChildObject.find(1_030_368) - co_two = ChildObject.find(1_032_318) - co_three = ChildObject.find(1_011_398) - # four rows in csv - expect(IngestEvent.where(status: 'update-complete').count).to eq 4 - # parent create and four parent updates - expect(IngestEvent.where(status: 'manifest-saved').count).to eq 5 - expect(po_one.child_object_count).to eq(0).or be_nil - expect(po_two.child_object_count).to eq(0).or be_nil - expect(po_three.child_object_count).to eq(0).or be_nil - expect(po_four.child_object_count).to eq(0).or be_nil - expect(po_five.child_object_count).to eq 3 - expect(co_one.parent_object_oid).to eq po_five.oid - expect(co_two.parent_object_oid).to eq po_five.oid - expect(co_three.parent_object_oid).to eq po_five.oid - end - end end diff --git a/spec/jobs/recreate_child_oid_ptiffs_job_spec.rb b/spec/jobs/recreate_child_oid_ptiffs_job_spec.rb index 4636a0e3e..b71c1b1ec 100644 --- a/spec/jobs/recreate_child_oid_ptiffs_job_spec.rb +++ b/spec/jobs/recreate_child_oid_ptiffs_job_spec.rb @@ -3,26 +3,21 @@ require 'rails_helper' RSpec.describe RecreateChildOidPtiffsJob, type: :job, prep_metadata_sources: true, prep_admin_sets: true do + before do + allow(GoodJob).to receive(:preserve_job_records).and_return(true) + ActiveJob::Base.queue_adapter = GoodJob::Adapter.new(execution_mode: :inline) + end + let(:user) { FactoryBot.create(:user) } let(:role) { FactoryBot.create(:role, name: editor) } - let(:admin_set) { AdminSet.find_by(key: 'brbl') } - let(:recreate_many) { Rack::Test::UploadedFile.new(Rails.root.join(fixture_path, "csv", "recreate_child_ptiffs.csv")) } - let(:recreate_many_batch_process) { FactoryBot.create(:batch_process, user: user, file: recreate_many, batch_action: 'recreate child oid ptiffs') } - let(:recreate_batch_process) { FactoryBot.create(:batch_process, user: user, batch_action: 'recreate child oid ptiffs') } + let(:admin_set) { AdminSet.first } + let(:batch_process) { FactoryBot.create(:batch_process, user: user, batch_action: 'recreate child oid ptiffs') } let(:other_batch_process) { FactoryBot.create(:batch_process, user: user, batch_action: 'other recreate child oid ptiffs') } let(:parent_object) { FactoryBot.create(:parent_object, oid: 2_004_628, authoritative_metadata_source: MetadataSource.first, admin_set: admin_set) } let(:child_object) { FactoryBot.create(:child_object, oid: 456_789, parent_object: parent_object) } - let(:of_many_child_object_one) { FactoryBot.create(:child_object, oid: 1_011_398, parent_object: parent_object) } - let(:of_many_child_object_two) { FactoryBot.create(:child_object, oid: 1_126_257, parent_object: parent_object) } - let(:of_many_child_object_three) { FactoryBot.create(:child_object, oid: 16_057_784, parent_object: parent_object) } let(:recreate_child_oid_ptiffs_job) { RecreateChildOidPtiffsJob.new } let(:generate_ptiff_job) { GeneratePtiffJob.new } - before do - allow(GoodJob).to receive(:preserve_job_records).and_return(true) - ActiveJob::Base.queue_adapter = GoodJob::Adapter.new(execution_mode: :inline) - end - around do |example| original_image_bucket = ENV["S3_SOURCE_BUCKET_NAME"] ENV["S3_SOURCE_BUCKET_NAME"] = "not-a-real-bucket" @@ -35,23 +30,8 @@ .to_return(status: 200, body: "", headers: {}) stub_request(:head, "https://not-a-real-bucket.s3.amazonaws.com/ptiffs/89/45/67/89/456789.tif") .to_return(status: 200, body: "", headers: {}) - stub_request(:head, "https://not-a-real-bucket.s3.amazonaws.com/originals/98/10/11/39/1011398.tif") - .to_return(status: 200, body: "", headers: {}) - stub_request(:head, "https://not-a-real-bucket.s3.amazonaws.com/ptiffs/98/10/11/39/1011398.tif") - .to_return(status: 200, body: "", headers: {}) - stub_request(:head, "https://not-a-real-bucket.s3.amazonaws.com/originals/57/11/26/25/1126257.tif") - .to_return(status: 200, body: "", headers: {}) - stub_request(:head, "https://not-a-real-bucket.s3.amazonaws.com/ptiffs/57/11/26/25/1126257.tif") - .to_return(status: 200, body: "", headers: {}) - stub_request(:head, "https://not-a-real-bucket.s3.amazonaws.com/originals/84/16/05/77/84/16057784.tif") - .to_return(status: 200, body: "", headers: {}) - stub_request(:head, "https://not-a-real-bucket.s3.amazonaws.com/ptiffs/84/16/05/77/84/16057784.tif") - .to_return(status: 200, body: "", headers: {}) - allow(recreate_batch_process).to receive(:oids).and_return(['456789']) + allow(batch_process).to receive(:oids).and_return(['456789']) child_object - of_many_child_object_one - of_many_child_object_two - of_many_child_object_three end describe 'recreate ptiff job' do @@ -64,45 +44,24 @@ it 'succeeds if the user has the udpate permission' do user.add_role(:editor, admin_set) expect(GoodJob::Job.where(queue_name: 'ptiff').count).to eq(0) - recreate_job = described_class.perform_later(recreate_batch_process) + recreate_job = described_class.perform_later(batch_process) expect(recreate_job.instance_variable_get(:@successfully_enqueued)).to be true end it 'fails if the user does not have the udpate permission' do expect(GoodJob::Job.where(queue_name: 'ptiff').count).to eq(0) - recreate_child_oid_ptiffs_job.perform(recreate_batch_process) + recreate_child_oid_ptiffs_job.perform(batch_process) expect(GoodJob::Job.where(queue_name: 'ptiff').count).to eq(0) end # TODO: revert back to .once instead of count: 2 once need for preservica logging is no more it "with recreate batch, will force ptiff creation" do expect(child_object.pyramidal_tiff).to receive(:original_file_exists?).and_return(true, count: 2) expect(child_object.pyramidal_tiff).to receive(:generate_ptiff).and_return(true, count: 2) - generate_ptiff_job.perform(child_object, recreate_batch_process) + generate_ptiff_job.perform(child_object, batch_process) end it "another type of batch will not force ptiff creation" do expect(child_object.pyramidal_tiff).not_to receive(:original_file_exists?) expect(child_object.pyramidal_tiff).not_to receive(:generate_ptiff) generate_ptiff_job.perform(child_object, other_batch_process) end - - context 'when in batches' do - around do |example| - perform_enqueued_jobs do - example.run - end - end - - before do - stub_metadata_cloud("2002826", "ladybird") - stub_ptiffs_and_manifests - BatchProcess::BATCH_LIMIT = 2 - user.add_role(:editor, admin_set) - expect(described_class).to receive(:perform_later).exactly(2).times.and_call_original - end - - it 'can process each record once' do - recreate_many_batch_process.save - expect(IngestEvent.where(status: 'ptiff-queued').count).to eq 3 - end - end end end diff --git a/spec/jobs/update_parent_job_spec.rb b/spec/jobs/update_parent_job_spec.rb index 581bfb9c7..34d00ca0c 100644 --- a/spec/jobs/update_parent_job_spec.rb +++ b/spec/jobs/update_parent_job_spec.rb @@ -2,46 +2,16 @@ require 'rails_helper' -RSpec.describe UpdateParentObjectsJob, type: :job, prep_metadata_sources: true, prep_admin_sets: true do - let(:admin_set) { AdminSet.find_by(key: 'brbl') } - let(:user) { FactoryBot.create(:user) } - let(:create_many) { Rack::Test::UploadedFile.new(Rails.root.join(fixture_path, "csv", "create_many_parent_fixture_ids.csv")) } - let(:create_batch_process) { FactoryBot.create(:batch_process, user: user, file: create_many) } - let(:update_batch_process) { FactoryBot.create(:batch_process, user: user, file: create_many, batch_action: 'update parent objects') } - let(:bare_update_batch_process) { FactoryBot.create(:batch_process, batch_action: 'update parent objects', user: user) } - let(:total_parent_object_count) { 4 } - +RSpec.describe UpdateParentObjectsJob, type: :job do before do allow(GoodJob).to receive(:preserve_job_records).and_return(true) ActiveJob::Base.queue_adapter = GoodJob::Adapter.new(execution_mode: :inline) end + let(:user) { FactoryBot.create(:user) } + let(:batch_process) { FactoryBot.create(:batch_process, batch_action: 'update parent objects', user: user) } it 'increments the job queue by one' do - update_parent_job = described_class.perform_later(bare_update_batch_process) + update_parent_job = described_class.perform_later(batch_process) expect(update_parent_job.instance_variable_get(:@successfully_enqueued)).to eq true end - - context 'with more than limit of batch objects' do - before do - BatchProcess::BATCH_LIMIT = 2 - expect(ParentObject.all.count).to eq 0 - user.add_role(:editor, admin_set) - login_as(:user) - create_batch_process.save - expect(ParentObject.all.count).to eq total_parent_object_count - expect(described_class).to receive(:perform_later).exactly(2).times.and_call_original - end - - around do |example| - perform_enqueued_jobs do - example.run - end - end - - it 'goes through all parents in batches once' do - update_batch_process.save - expect(IngestEvent.where(reason: 'Processing has been queued').count).to eq total_parent_object_count - expect(IngestEvent.where(status: 'update-complete').count).to eq total_parent_object_count - end - end end