diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml index 9ec8b9d6..20f2d83f 100644 --- a/.github/workflows/release-drafter.yml +++ b/.github/workflows/release-drafter.yml @@ -9,7 +9,7 @@ jobs: update_release_draft: runs-on: ubuntu-latest steps: - - uses: release-drafter/release-drafter@v5 + - uses: release-drafter/release-drafter@v6 with: config-name: release-draft-template.yml env: diff --git a/.rubocop_todo.yml b/.rubocop_todo.yml index 734c395b..4a6eac57 100644 --- a/.rubocop_todo.yml +++ b/.rubocop_todo.yml @@ -1,21 +1,21 @@ # This configuration was generated by # `rubocop --auto-gen-config` -# on 2024-01-16 21:52:52 UTC using RuboCop version 1.50.2. +# on 2024-02-16 18:01:53 UTC using RuboCop version 1.50.2. # The point is for the user to remove these configuration records # one by one as the offenses are removed from the code base. # Note that changes in the inspected code, or installation of new # versions of RuboCop, may require this file to be generated again. -# Offense count: 55 +# Offense count: 63 # Configuration parameters: CountComments, CountAsOne, AllowedMethods, AllowedPatterns. # AllowedMethods: refine Metrics/BlockLength: - Max: 694 + Max: 581 -# Offense count: 2 +# Offense count: 4 # Configuration parameters: CountComments, CountAsOne. Metrics/ClassLength: - Max: 373 + Max: 421 # Offense count: 1 # Configuration parameters: Max, CountKeywordArgs. diff --git a/Gemfile b/Gemfile index 0261ef39..b63d82f5 100644 --- a/Gemfile +++ b/Gemfile @@ -18,5 +18,5 @@ group :development, :test do end group :development do - gem 'rubocop', '~> 1.50.1', require: false + gem 'rubocop', '~> 1.61.0', require: false end diff --git a/lib/meilisearch.rb b/lib/meilisearch.rb index 2ba15581..f7a58e49 100644 --- a/lib/meilisearch.rb +++ b/lib/meilisearch.rb @@ -2,6 +2,7 @@ require 'meilisearch/version' require 'meilisearch/utils' +require 'meilisearch/models/task' require 'meilisearch/http_request' require 'meilisearch/multi_search' require 'meilisearch/tenant_token' diff --git a/lib/meilisearch/client.rb b/lib/meilisearch/client.rb index e6b49186..1be31acc 100644 --- a/lib/meilisearch/client.rb +++ b/lib/meilisearch/client.rb @@ -16,7 +16,8 @@ def raw_indexes(options = {}) def swap_indexes(*options) mapped_array = options.map { |arr| { indexes: arr } } - http_post '/swap-indexes', mapped_array + response = http_post '/swap-indexes', mapped_array + Models::Task.new(response, task_endpoint) end def indexes(options = {}) @@ -35,14 +36,20 @@ def indexes(options = {}) def create_index(index_uid, options = {}) body = Utils.transform_attributes(options.merge(uid: index_uid)) - http_post '/indexes', body + response = http_post '/indexes', body + + Models::Task.new(response, task_endpoint) end # Synchronous version of create_index. # Waits for the task to be achieved, be careful when using it. def create_index!(index_uid, options = {}) - task = create_index(index_uid, options) - wait_for_task(task['taskUid']) + Utils.soft_deprecate( + 'Client#create_index!', + "client.create_index('#{index_uid}').await" + ) + + create_index(index_uid, options).await end def delete_index(index_uid) @@ -118,7 +125,8 @@ def stats ### DUMPS def create_dump - http_post '/dumps' + response = http_post '/dumps' + Models::Task.new(response, task_endpoint) end ### SNAPSHOTS diff --git a/lib/meilisearch/index.rb b/lib/meilisearch/index.rb index d0f7df0f..351b988d 100644 --- a/lib/meilisearch/index.rb +++ b/lib/meilisearch/index.rb @@ -30,13 +30,15 @@ def fetch_raw_info end def update(body) - http_patch indexes_path(id: @uid), Utils.transform_attributes(body) + response = http_patch indexes_path(id: @uid), Utils.transform_attributes(body) + Models::Task.new(response, task_endpoint) end alias update_index update def delete - http_delete indexes_path(id: @uid) + response = http_delete indexes_path(id: @uid) + Models::Task.new(response, task_endpoint) end alias delete_index delete @@ -86,28 +88,38 @@ def documents(options = {}) def add_documents(documents, primary_key = nil) documents = [documents] if documents.is_a?(Hash) - http_post "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact + response = http_post "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact + + Models::Task.new(response, task_endpoint) end alias replace_documents add_documents alias add_or_replace_documents add_documents def add_documents!(documents, primary_key = nil) - task = add_documents(documents, primary_key) - wait_for_task(task['taskUid']) + Utils.soft_deprecate( + 'Index#add_documents!', + 'index.add_documents(...).await' + ) + + add_documents(documents, primary_key).await end alias replace_documents! add_documents! alias add_or_replace_documents! add_documents! def add_documents_json(documents, primary_key = nil) options = { convert_body?: false } - http_post "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact, options + response = http_post "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact, options + + Models::Task.new(response, task_endpoint) end alias replace_documents_json add_documents_json alias add_or_replace_documents_json add_documents_json def add_documents_ndjson(documents, primary_key = nil) options = { headers: { 'Content-Type' => 'application/x-ndjson' }, convert_body?: false } - http_post "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact, options + response = http_post "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact, options + + Models::Task.new(response, task_endpoint) end alias replace_documents_ndjson add_documents_ndjson alias add_or_replace_documents_ndjson add_documents_ndjson @@ -115,58 +127,62 @@ def add_documents_ndjson(documents, primary_key = nil) def add_documents_csv(documents, primary_key = nil, delimiter = nil) options = { headers: { 'Content-Type' => 'text/csv' }, convert_body?: false } - http_post "/indexes/#{@uid}/documents", documents, { + response = http_post "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key, csvDelimiter: delimiter }.compact, options + + Models::Task.new(response, task_endpoint) end alias replace_documents_csv add_documents_csv alias add_or_replace_documents_csv add_documents_csv def update_documents(documents, primary_key = nil) documents = [documents] if documents.is_a?(Hash) - http_put "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact + response = http_put "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact + + Models::Task.new(response, task_endpoint) end alias add_or_update_documents update_documents def update_documents!(documents, primary_key = nil) - task = update_documents(documents, primary_key) - wait_for_task(task['taskUid']) + Utils.soft_deprecate( + 'Index#update_documents!', + 'index.update_documents(...).await' + ) + + update_documents(documents, primary_key).await end alias add_or_update_documents! update_documents! def add_documents_in_batches(documents, batch_size = 1000, primary_key = nil) - tasks = [] - documents.each_slice(batch_size) do |batch| - tasks.append(add_documents(batch, primary_key)) + documents.each_slice(batch_size).map do |batch| + add_documents(batch, primary_key) end - tasks end def add_documents_in_batches!(documents, batch_size = 1000, primary_key = nil) - tasks = add_documents_in_batches(documents, batch_size, primary_key) - responses = [] - tasks.each do |task_obj| - responses.append(wait_for_task(task_obj['taskUid'])) - end - responses + Utils.soft_deprecate( + 'Index#add_documents_in_batches!', + 'index.add_documents_in_batches(...).each(&:await)' + ) + + add_documents_in_batches(documents, batch_size, primary_key).each(&:await) end def update_documents_in_batches(documents, batch_size = 1000, primary_key = nil) - tasks = [] - documents.each_slice(batch_size) do |batch| - tasks.append(update_documents(batch, primary_key)) + documents.each_slice(batch_size).map do |batch| + update_documents(batch, primary_key) end - tasks end def update_documents_in_batches!(documents, batch_size = 1000, primary_key = nil) - tasks = update_documents_in_batches(documents, batch_size, primary_key) - responses = [] - tasks.each do |task_obj| - responses.append(wait_for_task(task_obj['taskUid'])) - end - responses + Utils.soft_deprecate( + 'Index#update_documents_in_batches!', + 'index.update_documents_in_batches(...).each(&:await)' + ) + + update_documents_in_batches(documents, batch_size, primary_key).each(&:await) end # Public: Delete documents from an index @@ -178,44 +194,61 @@ def update_documents_in_batches!(documents, batch_size = 1000, primary_key = nil # Returns a Task object. def delete_documents(options = {}) Utils.version_error_handler(__method__) do - if options.is_a?(Hash) && options.key?(:filter) - http_post "/indexes/#{@uid}/documents/delete", options - else - # backwards compatibility: - # expect to be a array or/number/string to send alongside as documents_ids. - options = [options] unless options.is_a?(Array) + response = if options.is_a?(Hash) && options.key?(:filter) + http_post "/indexes/#{@uid}/documents/delete", options + else + # backwards compatibility: + # expect to be a array or/number/string to send alongside as documents_ids. + options = [options] unless options.is_a?(Array) - http_post "/indexes/#{@uid}/documents/delete-batch", options - end + http_post "/indexes/#{@uid}/documents/delete-batch", options + end + + Models::Task.new(response, task_endpoint) end end alias delete_multiple_documents delete_documents def delete_documents!(documents_ids) - task = delete_documents(documents_ids) - wait_for_task(task['taskUid']) + Utils.soft_deprecate( + 'Index#delete_documents!', + 'index.delete_documents(...).await' + ) + + delete_documents(documents_ids).await end alias delete_multiple_documents! delete_documents! def delete_document(document_id) encode_document = URI.encode_www_form_component(document_id) - http_delete "/indexes/#{@uid}/documents/#{encode_document}" + response = http_delete "/indexes/#{@uid}/documents/#{encode_document}" + + Models::Task.new(response, task_endpoint) end alias delete_one_document delete_document def delete_document!(document_id) - task = delete_document(document_id) - wait_for_task(task['taskUid']) + Utils.soft_deprecate( + 'Index#delete_document!', + 'index.delete_document(...).await' + ) + + delete_document(document_id).await end alias delete_one_document! delete_document! def delete_all_documents - http_delete "/indexes/#{@uid}/documents" + response = http_delete "/indexes/#{@uid}/documents" + Models::Task.new(response, task_endpoint) end def delete_all_documents! - task = delete_all_documents - wait_for_task(task['taskUid']) + Utils.soft_deprecate( + 'Index#delete_all_documents!', + 'index.delete_all_documents(...).await' + ) + + delete_all_documents.await end ### SEARCH @@ -288,12 +321,14 @@ def settings alias get_settings settings def update_settings(settings) - http_patch "/indexes/#{@uid}/settings", Utils.transform_attributes(settings) + response = http_patch "/indexes/#{@uid}/settings", Utils.transform_attributes(settings) + Models::Task.new(response, task_endpoint) end alias settings= update_settings def reset_settings - http_delete "/indexes/#{@uid}/settings" + response = http_delete "/indexes/#{@uid}/settings" + Models::Task.new(response, task_endpoint) end ### SETTINGS - RANKING RULES @@ -304,12 +339,14 @@ def ranking_rules alias get_ranking_rules ranking_rules def update_ranking_rules(ranking_rules) - http_put "/indexes/#{@uid}/settings/ranking-rules", ranking_rules + response = http_put "/indexes/#{@uid}/settings/ranking-rules", ranking_rules + Models::Task.new(response, task_endpoint) end alias ranking_rules= update_ranking_rules def reset_ranking_rules - http_delete "/indexes/#{@uid}/settings/ranking-rules" + response = http_delete "/indexes/#{@uid}/settings/ranking-rules" + Models::Task.new(response, task_endpoint) end ### SETTINGS - SYNONYMS @@ -320,12 +357,14 @@ def synonyms alias get_synonyms synonyms def update_synonyms(synonyms) - http_put "/indexes/#{@uid}/settings/synonyms", synonyms + response = http_put "/indexes/#{@uid}/settings/synonyms", synonyms + Models::Task.new(response, task_endpoint) end alias synonyms= update_synonyms def reset_synonyms - http_delete "/indexes/#{@uid}/settings/synonyms" + response = http_delete "/indexes/#{@uid}/settings/synonyms" + Models::Task.new(response, task_endpoint) end ### SETTINGS - STOP-WORDS @@ -337,12 +376,14 @@ def stop_words def update_stop_words(stop_words) body = stop_words.nil? || stop_words.is_a?(Array) ? stop_words : [stop_words] - http_put "/indexes/#{@uid}/settings/stop-words", body + response = http_put "/indexes/#{@uid}/settings/stop-words", body + Models::Task.new(response, task_endpoint) end alias stop_words= update_stop_words def reset_stop_words - http_delete "/indexes/#{@uid}/settings/stop-words" + response = http_delete "/indexes/#{@uid}/settings/stop-words" + Models::Task.new(response, task_endpoint) end ### SETTINGS - DINSTINCT ATTRIBUTE @@ -353,12 +394,14 @@ def distinct_attribute alias get_distinct_attribute distinct_attribute def update_distinct_attribute(distinct_attribute) - http_put "/indexes/#{@uid}/settings/distinct-attribute", distinct_attribute + response = http_put "/indexes/#{@uid}/settings/distinct-attribute", distinct_attribute + Models::Task.new(response, task_endpoint) end alias distinct_attribute= update_distinct_attribute def reset_distinct_attribute - http_delete "/indexes/#{@uid}/settings/distinct-attribute" + response = http_delete "/indexes/#{@uid}/settings/distinct-attribute" + Models::Task.new(response, task_endpoint) end ### SETTINGS - SEARCHABLE ATTRIBUTES @@ -369,12 +412,14 @@ def searchable_attributes alias get_searchable_attributes searchable_attributes def update_searchable_attributes(searchable_attributes) - http_put "/indexes/#{@uid}/settings/searchable-attributes", searchable_attributes + response = http_put "/indexes/#{@uid}/settings/searchable-attributes", searchable_attributes + Models::Task.new(response, task_endpoint) end alias searchable_attributes= update_searchable_attributes def reset_searchable_attributes - http_delete "/indexes/#{@uid}/settings/searchable-attributes" + response = http_delete "/indexes/#{@uid}/settings/searchable-attributes" + Models::Task.new(response, task_endpoint) end ### SETTINGS - DISPLAYED ATTRIBUTES @@ -385,12 +430,14 @@ def displayed_attributes alias get_displayed_attributes displayed_attributes def update_displayed_attributes(displayed_attributes) - http_put "/indexes/#{@uid}/settings/displayed-attributes", displayed_attributes + response = http_put "/indexes/#{@uid}/settings/displayed-attributes", displayed_attributes + Models::Task.new(response, task_endpoint) end alias displayed_attributes= update_displayed_attributes def reset_displayed_attributes - http_delete "/indexes/#{@uid}/settings/displayed-attributes" + response = http_delete "/indexes/#{@uid}/settings/displayed-attributes" + Models::Task.new(response, task_endpoint) end ### SETTINGS - FILTERABLE ATTRIBUTES @@ -401,12 +448,14 @@ def filterable_attributes alias get_filterable_attributes filterable_attributes def update_filterable_attributes(filterable_attributes) - http_put "/indexes/#{@uid}/settings/filterable-attributes", filterable_attributes + response = http_put "/indexes/#{@uid}/settings/filterable-attributes", filterable_attributes + Models::Task.new(response, task_endpoint) end alias filterable_attributes= update_filterable_attributes def reset_filterable_attributes - http_delete "/indexes/#{@uid}/settings/filterable-attributes" + response = http_delete "/indexes/#{@uid}/settings/filterable-attributes" + Models::Task.new(response, task_endpoint) end ### SETTINGS - SORTABLE ATTRIBUTES @@ -417,12 +466,14 @@ def sortable_attributes alias get_sortable_attributes sortable_attributes def update_sortable_attributes(sortable_attributes) - http_put "/indexes/#{@uid}/settings/sortable-attributes", sortable_attributes + response = http_put "/indexes/#{@uid}/settings/sortable-attributes", sortable_attributes + Models::Task.new(response, task_endpoint) end alias sortable_attributes= update_sortable_attributes def reset_sortable_attributes - http_delete "/indexes/#{@uid}/settings/sortable-attributes" + response = http_delete "/indexes/#{@uid}/settings/sortable-attributes" + Models::Task.new(response, task_endpoint) end ### SETTINGS - PAGINATION @@ -433,12 +484,14 @@ def pagination alias get_pagination pagination def update_pagination(pagination) - http_patch "/indexes/#{@uid}/settings/pagination", pagination + response = http_patch "/indexes/#{@uid}/settings/pagination", pagination + Models::Task.new(response, task_endpoint) end alias pagination= update_sortable_attributes def reset_pagination - http_delete "/indexes/#{@uid}/settings/pagination" + response = http_delete "/indexes/#{@uid}/settings/pagination" + Models::Task.new(response, task_endpoint) end def typo_tolerance @@ -448,12 +501,14 @@ def typo_tolerance def update_typo_tolerance(typo_tolerance_attributes) attributes = Utils.transform_attributes(typo_tolerance_attributes) - http_patch("/indexes/#{@uid}/settings/typo-tolerance", attributes) + response = http_patch("/indexes/#{@uid}/settings/typo-tolerance", attributes) + Models::Task.new(response, task_endpoint) end alias typo_tolerance= update_typo_tolerance def reset_typo_tolerance - http_delete("/indexes/#{@uid}/settings/typo-tolerance") + response = http_delete("/indexes/#{@uid}/settings/typo-tolerance") + Models::Task.new(response, task_endpoint) end def faceting @@ -463,12 +518,14 @@ def faceting def update_faceting(faceting_attributes) attributes = Utils.transform_attributes(faceting_attributes) - http_patch("/indexes/#{@uid}/settings/faceting", attributes) + response = http_patch("/indexes/#{@uid}/settings/faceting", attributes) + Models::Task.new(response, task_endpoint) end alias faceting= update_faceting def reset_faceting - http_delete("/indexes/#{@uid}/settings/faceting") + response = http_delete("/indexes/#{@uid}/settings/faceting") + Models::Task.new(response, task_endpoint) end ### SETTINGS - DICTIONARY @@ -479,11 +536,13 @@ def dictionary def update_dictionary(dictionary_attributes) attributes = Utils.transform_attributes(dictionary_attributes) - http_put("/indexes/#{@uid}/settings/dictionary", attributes) + response = http_put("/indexes/#{@uid}/settings/dictionary", attributes) + Models::Task.new(response, task_endpoint) end def reset_dictionary - http_delete("/indexes/#{@uid}/settings/dictionary") + response = http_delete("/indexes/#{@uid}/settings/dictionary") + Models::Task.new(response, task_endpoint) end ### SETTINGS - SEPARATOR TOKENS @@ -493,11 +552,13 @@ def separator_tokens def update_separator_tokens(separator_tokens_attributes) attributes = Utils.transform_attributes(separator_tokens_attributes) - http_put("/indexes/#{@uid}/settings/separator-tokens", attributes) + response = http_put("/indexes/#{@uid}/settings/separator-tokens", attributes) + Models::Task.new(response, task_endpoint) end def reset_separator_tokens - http_delete("/indexes/#{@uid}/settings/separator-tokens") + response = http_delete("/indexes/#{@uid}/settings/separator-tokens") + Models::Task.new(response, task_endpoint) end ### SETTINGS - NON SEPARATOR TOKENS @@ -508,11 +569,13 @@ def non_separator_tokens def update_non_separator_tokens(non_separator_tokens_attributes) attributes = Utils.transform_attributes(non_separator_tokens_attributes) - http_put("/indexes/#{@uid}/settings/non-separator-tokens", attributes) + response = http_put("/indexes/#{@uid}/settings/non-separator-tokens", attributes) + Models::Task.new(response, task_endpoint) end def reset_non_separator_tokens - http_delete("/indexes/#{@uid}/settings/non-separator-tokens") + response = http_delete("/indexes/#{@uid}/settings/non-separator-tokens") + Models::Task.new(response, task_endpoint) end ### SETTINGS - PROXIMITY PRECISION diff --git a/lib/meilisearch/models/task.rb b/lib/meilisearch/models/task.rb new file mode 100644 index 00000000..2aab0837 --- /dev/null +++ b/lib/meilisearch/models/task.rb @@ -0,0 +1,160 @@ +# frozen_string_literal: true + +require 'forwardable' + +module MeiliSearch + module Models + class Task + extend Forwardable + + # Maintain backwards compatibility with task hash return type + def_delegators :metadata, :[], :dig, :keys, :key?, :has_key? + + attr_reader :metadata + + def initialize(metadata_hash, task_endpoint) + self.metadata = metadata_hash + validate_required_fields! metadata + + @task_endpoint = task_endpoint + end + + def uid + @metadata['taskUid'] + end + + def type + @metadata['type'] + end + + def status + @metadata['status'] + end + + def enqueued? + refresh if status_enqueued? + + status_enqueued? + end + + def processing? + refresh if status_processing? || status_enqueued? + + status_processing? + end + + def unfinished? + refresh if status_processing? || status_enqueued? + + status_processing? || status_enqueued? + end + alias waiting? unfinished? + + def finished? + !unfinished? + end + + def succeeded? + Utils.warn_on_unfinished_task(self) if unfinished? + + status == 'succeeded' + end + alias has_succeeded? succeeded? + + def failed? + Utils.warn_on_unfinished_task(self) if unfinished? + + status == 'failed' + end + alias has_failed? failed? + + def cancelled? + Utils.warn_on_unfinished_task(self) if unfinished? + + status_cancelled? + end + + def deleted? + refresh unless @deleted + + !!@deleted + end + + def error + @metadata['error'] + end + + def refresh(with: nil) + self.metadata = with || @task_endpoint.task(uid) + + self + rescue MeiliSearch::ApiError => e + raise e unless e.http_code == 404 + + @deleted = true + + self + end + + def await(timeout_in_ms = 5000, interval_in_ms = 50) + refresh with: @task_endpoint.wait_for_task(uid, timeout_in_ms, interval_in_ms) unless finished? + + self + end + + def cancel + return true if status_cancelled? + return false if status_finished? + + @task_endpoint.cancel_tasks(uids: [uid]).await + + cancelled? + end + + def delete + return false unless status_finished? + + @task_endpoint.delete_tasks(uids: [uid]).await + + deleted? + end + + def to_h + @metadata + end + alias to_hash to_h + + private + + def validate_required_fields!(task_hash) + raise ArgumentError, 'Cannot instantiate a task without an ID' unless task_hash['taskUid'] + raise ArgumentError, 'Cannot instantiate a task without a type' unless task_hash['type'] + raise ArgumentError, 'Cannot instantiate a task without a status' unless task_hash['status'] + end + + def status_enqueued? + status == 'enqueued' + end + + def status_processing? + status == 'processing' + end + + def status_finished? + ['succeeded', 'failed', 'cancelled'].include? status + end + + def status_cancelled? + status == 'cancelled' + end + + def metadata=(metadata) + @metadata = metadata + + uid = @metadata['taskUid'] || @metadata['uid'] + @metadata['uid'] = uid + @metadata['taskUid'] = uid + end + end + end +end diff --git a/lib/meilisearch/utils.rb b/lib/meilisearch/utils.rb index 83adfac0..b6652b27 100644 --- a/lib/meilisearch/utils.rb +++ b/lib/meilisearch/utils.rb @@ -1,73 +1,98 @@ # frozen_string_literal: true +require 'logger' + module MeiliSearch module Utils SNAKE_CASE = /[^a-zA-Z0-9]+(.)/ - def self.transform_attributes(body) - case body - when Array - body.map { |item| transform_attributes(item) } - when Hash - warn_on_non_conforming_attribute_names(body) - parse(body) - else - body + class << self + attr_writer :logger + + def logger + @logger ||= Logger.new($stdout) + end + + def soft_deprecate(subject, replacement) + logger.warn("[meilisearch-ruby] #{subject} is DEPRECATED, please use #{replacement} instead.") end - end - def self.parse(body) - body - .transform_keys(&:to_s) - .transform_keys do |key| - key.include?('_') ? key.downcase.gsub(SNAKE_CASE, &:upcase).gsub('_', '') : key + def warn_on_unfinished_task(task_uid) + message = <<~UNFINISHED_TASK_WARNING + [meilisearch-ruby] Task #{task_uid}'s finished state (succeeded?/failed?/cancelled?) is being checked before finishing. + [meilisearch-ruby] Tasks in meilisearch are processed in the background asynchronously. + [meilisearch-ruby] Please use the #finished? method to check if the task is finished or the #await method to wait for the task to finish. + UNFINISHED_TASK_WARNING + + message.lines.each do |line| + logger.warn(line) end - end + end - def self.filter(original_options, allowed_params = []) - original_options.transform_keys(&:to_sym).slice(*allowed_params) - end + def transform_attributes(body) + case body + when Array + body.map { |item| transform_attributes(item) } + when Hash + warn_on_non_conforming_attribute_names(body) + parse(body) + else + body + end + end - def self.parse_query(original_options, allowed_params = []) - only_allowed_params = filter(original_options, allowed_params) + def filter(original_options, allowed_params = []) + original_options.transform_keys(&:to_sym).slice(*allowed_params) + end + + def parse_query(original_options, allowed_params = []) + only_allowed_params = filter(original_options, allowed_params) - Utils.transform_attributes(only_allowed_params).then do |body| - body.transform_values do |v| - v.respond_to?(:join) ? v.join(',') : v.to_s + Utils.transform_attributes(only_allowed_params).then do |body| + body.transform_values do |v| + v.respond_to?(:join) ? v.join(',') : v.to_s + end end end - end - def self.message_builder(current_message, method_name) - "#{current_message}\nHint: It might not be working because maybe you're not up " \ - "to date with the Meilisearch version that `#{method_name}` call requires." - end + def version_error_handler(method_name) + yield if block_given? + rescue MeiliSearch::ApiError => e + message = message_builder(e.http_message, method_name) - def self.version_error_handler(method_name) - yield if block_given? - rescue MeiliSearch::ApiError => e - message = message_builder(e.http_message, method_name) + raise MeiliSearch::ApiError.new(e.http_code, message, e.http_body) + rescue StandardError => e + raise e.class, message_builder(e.message, method_name) + end - raise MeiliSearch::ApiError.new(e.http_code, message, e.http_body) - rescue StandardError => e - raise e.class, message_builder(e.message, method_name) - end + def warn_on_non_conforming_attribute_names(body) + return if body.nil? - def self.warn_on_non_conforming_attribute_names(body) - return if body.nil? + non_snake_case = body.keys.grep_v(/^[a-z0-9_]+$/) + return if non_snake_case.empty? - non_snake_case = body.keys.grep_v(/^[a-z0-9_]+$/) - return if non_snake_case.empty? + message = <<~MSG + [meilisearch-ruby] Attributes will be expected to be snake_case in future versions. + [meilisearch-ruby] Non-conforming attributes: #{non_snake_case.join(', ')} + MSG - message = <<~MSG - Attributes will be expected to be snake_case in future versions of Meilisearch Ruby. + logger.warn(message) + end - Non-conforming attributes: #{non_snake_case.join(', ')} - MSG + private - warn(message) - end + def parse(body) + body + .transform_keys(&:to_s) + .transform_keys do |key| + key.include?('_') ? key.downcase.gsub(SNAKE_CASE, &:upcase).gsub('_', '') : key + end + end - private_class_method :parse, :message_builder + def message_builder(current_message, method_name) + "#{current_message}\nHint: It might not be working because maybe you're not up " \ + "to date with the Meilisearch version that `#{method_name}` call requires." + end + end end end diff --git a/spec/meilisearch/client/dumps_spec.rb b/spec/meilisearch/client/dumps_spec.rb index e4606b5a..03a108cc 100644 --- a/spec/meilisearch/client/dumps_spec.rb +++ b/spec/meilisearch/client/dumps_spec.rb @@ -2,12 +2,6 @@ RSpec.describe 'MeiliSearch::Client - Dumps' do it 'creates a new dump' do - response = client.create_dump - expect(response).to be_a(Hash) - expect(response['taskUid']).to_not be_nil - expect(response['status']).to_not be_nil - expect(response['status']).to eq('enqueued') - response = client.wait_for_task(response['taskUid']) - expect(response['status']).to eq('succeeded') + expect(client.create_dump.await).to be_succeeded end end diff --git a/spec/meilisearch/client/indexes_spec.rb b/spec/meilisearch/client/indexes_spec.rb index e05a2c08..4e7067a1 100644 --- a/spec/meilisearch/client/indexes_spec.rb +++ b/spec/meilisearch/client/indexes_spec.rb @@ -5,28 +5,54 @@ context 'without a primary key' do it 'creates an index' do task = client.create_index('books') + expect(task.type).to eq('indexCreation') + task.await - expect(task['type']).to eq('indexCreation') - - client.wait_for_task(task['taskUid']) index = client.fetch_index('books') - expect(index).to be_a(MeiliSearch::Index) expect(index.uid).to eq('books') expect(index.primary_key).to be_nil end - it 'creates an index synchronously' do - task = client.create_index!('books') + context 'synchronously' do + context 'using ! method' do + before { allow(MeiliSearch::Utils).to receive(:soft_deprecate).and_return(nil) } - expect(task['type']).to eq('indexCreation') - expect(task['status']).to eq('succeeded') + it 'creates an index' do + task = client.create_index!('books') - index = client.fetch_index('books') + expect(task.type).to eq('indexCreation') + expect(task).to be_succeeded - expect(index).to be_a(MeiliSearch::Index) - expect(index.uid).to eq('books') - expect(index.primary_key).to be_nil + index = client.fetch_index('books') + + expect(index).to be_a(MeiliSearch::Index) + expect(index.uid).to eq('books') + expect(index.primary_key).to be_nil + end + + it 'warns about deprecation' do + client.create_index!('books') + expect(MeiliSearch::Utils) + .to have_received(:soft_deprecate) + .with('Client#create_index!', a_string_including('books')) + end + end + + context 'using await syntax' do + it 'creates an index' do + task = client.create_index('books').await + + expect(task['type']).to eq('indexCreation') + expect(task['status']).to eq('succeeded') + + index = client.fetch_index('books') + + expect(index).to be_a(MeiliSearch::Index) + expect(index.uid).to eq('books') + expect(index.primary_key).to be_nil + end + end end end @@ -34,11 +60,10 @@ it 'creates an index' do task = client.create_index('books', primary_key: 'reference_code') - expect(task['type']).to eq('indexCreation') + expect(task.type).to eq('indexCreation') + task.await - client.wait_for_task(task['taskUid']) index = client.fetch_index('books') - expect(index).to be_a(MeiliSearch::Index) expect(index.uid).to eq('books') expect(index.primary_key).to eq('reference_code') @@ -46,7 +71,7 @@ end it 'creates an index synchronously' do - task = client.create_index!('books', primary_key: 'reference_code') + task = client.create_index('books', primary_key: 'reference_code').await expect(task['type']).to eq('indexCreation') expect(task['status']).to eq('succeeded') @@ -62,8 +87,8 @@ context 'when primary key option in snake_case' do it 'creates an index' do task = client.create_index('books', primary_key: 'reference_code') - expect(task['type']).to eq('indexCreation') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('indexCreation') + task.await index = client.fetch_index('books') expect(index).to be_a(MeiliSearch::Index) @@ -81,11 +106,10 @@ uid: 'publications' ) - expect(task['type']).to eq('indexCreation') + expect(task.type).to eq('indexCreation') + task.await - client.wait_for_task(task['taskUid']) index = client.fetch_index('books') - expect(index).to be_a(MeiliSearch::Index) expect(index.uid).to eq('books') expect(index.primary_key).to eq('reference_code') @@ -96,8 +120,8 @@ context 'when an index with a given uid already exists' do it 'returns a failing task' do - initial_task = client.create_index!('books') - last_task = client.create_index!('books') + initial_task = client.create_index('books').await + last_task = client.create_index('books').await expect(initial_task['type']).to eq('indexCreation') expect(last_task['type']).to eq('indexCreation') @@ -118,7 +142,7 @@ describe '#indexes' do it 'returns MeiliSearch::Index objects' do - client.create_index!('books') + client.create_index('books').await index = client.indexes['results'].first @@ -126,7 +150,7 @@ end it 'gets a list of indexes' do - ['books', 'colors', 'artists'].each { |name| client.create_index!(name) } + ['books', 'colors', 'artists'].each { |name| client.create_index(name).await } indexes = client.indexes['results'] @@ -137,7 +161,7 @@ end it 'paginates indexes list with limit and offset' do - ['books', 'colors', 'artists'].each { |name| client.create_index!(name) } + ['books', 'colors', 'artists'].each { |name| client.create_index(name).await } indexes = client.indexes(limit: 1, offset: 2) @@ -151,7 +175,7 @@ describe '#raw_indexes' do it 'returns raw indexes' do - client.create_index!('index') + client.create_index('index').await response = client.raw_indexes['results'].first @@ -160,7 +184,7 @@ end it 'gets a list of raw indexes' do - ['books', 'colors', 'artists'].each { |name| client.create_index!(name) } + ['books', 'colors', 'artists'].each { |name| client.create_index(name).await } indexes = client.raw_indexes['results'] @@ -173,7 +197,7 @@ describe '#fetch_index' do it 'fetches index by uid' do - client.create_index!('books', primary_key: 'reference_code') + client.create_index('books', primary_key: 'reference_code').await fetched_index = client.fetch_index('books') @@ -186,7 +210,7 @@ describe '#fetch_raw_index' do it 'fetch a specific index raw Hash response based on uid' do - client.create_index!('books', primary_key: 'reference_code') + client.create_index('books', primary_key: 'reference_code').await index = client.fetch_index('books') raw_response = index.fetch_raw_info @@ -202,7 +226,7 @@ describe '#index' do it 'returns an index object with the provided uid' do - client.create_index!('books', primary_key: 'reference_code') + client.create_index('books', primary_key: 'reference_code').await # this index is in memory, without metadata from server index = client.index('books') @@ -219,14 +243,14 @@ describe '#delete_index' do context 'when the index exists' do it 'deletes the index' do - client.create_index!('books') + client.create_index('books').await task = client.delete_index('books') expect(task['type']).to eq('indexDeletion') - achieved_task = client.wait_for_task(task['taskUid']) + task.await - expect(achieved_task['status']).to eq('succeeded') + expect(task).to be_succeeded expect { client.fetch_index('books') }.to raise_index_not_found_meilisearch_api_error end end @@ -241,9 +265,9 @@ describe '#swap_indexes' do it 'swaps two indexes' do task = client.swap_indexes(['indexA', 'indexB'], ['indexC', 'indexD']) - task = client.wait_for_task(task['taskUid']) - expect(task['type']).to eq('indexSwap') + expect(task.type).to eq('indexSwap') + task.await expect(task['details']['swaps']).to eq([{ 'indexes' => ['indexA', 'indexB'] }, { 'indexes' => ['indexC', 'indexD'] }]) end diff --git a/spec/meilisearch/client/multi_search_spec.rb b/spec/meilisearch/client/multi_search_spec.rb index e8d52a53..4efc0aa7 100644 --- a/spec/meilisearch/client/multi_search_spec.rb +++ b/spec/meilisearch/client/multi_search_spec.rb @@ -3,8 +3,7 @@ RSpec.describe 'MeiliSearch::Client - Multiple Index Search' do before do client.create_index('books') - task = client.create_index('movies') - client.wait_for_task(task['taskUid']) + client.create_index('movies').await end it 'does a custom search with two different indexes' do diff --git a/spec/meilisearch/client/requests_spec.rb b/spec/meilisearch/client/requests_spec.rb index 4d551315..35c01bea 100644 --- a/spec/meilisearch/client/requests_spec.rb +++ b/spec/meilisearch/client/requests_spec.rb @@ -10,7 +10,7 @@ end it 'parses options when they are in a snake_case' do - client.create_index!(key, primary_key: key) + client.create_index(key, primary_key: key).await index = client.fetch_index(key) expect(index.uid).to eq(key) diff --git a/spec/meilisearch/client/tasks_spec.rb b/spec/meilisearch/client/tasks_spec.rb index 8e42d9de..5d646b3a 100644 --- a/spec/meilisearch/client/tasks_spec.rb +++ b/spec/meilisearch/client/tasks_spec.rb @@ -5,7 +5,7 @@ let(:enqueued_task_keys) { ['uid', 'indexUid', 'status', 'type', 'enqueuedAt'] } let(:succeeded_task_keys) { [*enqueued_task_keys, 'details', 'duration', 'startedAt', 'finishedAt'] } - let!(:doc_addition_task) { index.add_documents!(documents) } + let!(:doc_addition_task) { index.add_documents(documents).await } let(:task_uid) { doc_addition_task['uid'] } it 'gets a task of an index' do @@ -123,7 +123,7 @@ describe '#client.wait_for_task' do it 'waits for task with default values' do - task = index.add_documents!(documents) + task = index.add_documents(documents).await task = client.wait_for_task(task['taskUid']) expect(task).to be_a(Hash) diff --git a/spec/meilisearch/client/token_spec.rb b/spec/meilisearch/client/token_spec.rb index 1728557c..49b5d9ce 100644 --- a/spec/meilisearch/client/token_spec.rb +++ b/spec/meilisearch/client/token_spec.rb @@ -124,10 +124,7 @@ def initialize(api_key) context 'with search_rules definitions' do include_context 'search books with genre' - before do - filterable_task = index.update_filterable_attributes(['genre', 'objectId']) - index.wait_for_task(filterable_task['taskUid']) - end + before { index.update_filterable_attributes(['genre', 'objectId']).await } let(:adm_client) { MeiliSearch::Client.new(URL, adm_key['key']) } let(:adm_key) do diff --git a/spec/meilisearch/index/base_spec.rb b/spec/meilisearch/index/base_spec.rb index 026c843a..f4c1508e 100644 --- a/spec/meilisearch/index/base_spec.rb +++ b/spec/meilisearch/index/base_spec.rb @@ -2,7 +2,7 @@ RSpec.describe MeiliSearch::Index do it 'fetch the info of the index' do - client.create_index!('books') + client.create_index('books').await index = client.fetch_index('books') expect(index).to be_a(MeiliSearch::Index) @@ -15,7 +15,7 @@ end it 'fetch the raw Hash info of the index' do - client.create_index!('books', primary_key: 'reference_number') + client.create_index('books', primary_key: 'reference_number').await raw_index = client.fetch_raw_index('books') @@ -29,7 +29,7 @@ end it 'get primary-key of index if null' do - client.create_index!('index_without_primary_key') + client.create_index('index_without_primary_key').await index = client.fetch_index('index_without_primary_key') expect(index.primary_key).to be_nil @@ -37,7 +37,7 @@ end it 'get primary-key of index if it exists' do - client.create_index!('index_with_prirmary_key', primary_key: 'primary_key') + client.create_index('index_with_prirmary_key', primary_key: 'primary_key').await index = client.fetch_index('index_with_prirmary_key') expect(index.primary_key).to eq('primary_key') @@ -45,18 +45,18 @@ end it 'get uid of index' do - client.create_index!('uid') + client.create_index('uid').await index = client.fetch_index('uid') expect(index.uid).to eq('uid') end it 'updates primary-key of index if not defined before' do - client.create_index!('uid') + client.create_index('uid').await task = client.index('uid').update(primary_key: 'new_primary_key') - expect(task['type']).to eq('indexUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('indexUpdate') + task.await index = client.fetch_index('uid') expect(index).to be_a(MeiliSearch::Index) @@ -70,11 +70,11 @@ end it 'updates primary-key of index if has been defined before but there is not docs' do - client.create_index!('books', primary_key: 'reference_number') + client.create_index('books', primary_key: 'reference_number').await task = client.index('books').update(primary_key: 'international_standard_book_number') - expect(task['type']).to eq('indexUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('indexUpdate') + task.await index = client.fetch_index('books') expect(index).to be_a(MeiliSearch::Index) @@ -89,14 +89,14 @@ it 'returns a failing task if primary-key is already defined' do index = client.index('uid') - index.add_documents!({ id: 1, title: 'My Title' }) + index.add_documents({ id: 1, title: 'My Title' }).await task = index.update(primary_key: 'new_primary_key') - expect(task['type']).to eq('indexUpdate') - achieved_task = client.wait_for_task(task['taskUid']) + expect(task.type).to eq('indexUpdate') - expect(achieved_task['status']).to eq('failed') - expect(achieved_task['error']['code']).to eq('index_primary_key_already_exists') + task.await + expect(task).to be_failed + expect(task.error['code']).to eq('index_primary_key_already_exists') end it 'supports options' do @@ -107,7 +107,7 @@ } new_client = MeiliSearch::Client.new(URL, MASTER_KEY, options) - new_client.create_index!('books') + new_client.create_index('books').await index = new_client.fetch_index('books') expect(index.options).to eq({ max_retries: 1, timeout: 2, convert_body?: true }) @@ -135,7 +135,7 @@ } new_client = MeiliSearch::Client.new(URL, MASTER_KEY, options) - new_client.create_index!('books') + new_client.create_index('books').await index = new_client.fetch_index('books') expect(index.options).to eq(options.merge({ convert_body?: true })) @@ -155,21 +155,21 @@ end it 'deletes index' do - client.create_index!('uid') + client.create_index('uid').await task = client.index('uid').delete - expect(task['type']).to eq('indexDeletion') - achieved_task = client.wait_for_task(task['taskUid']) - expect(achieved_task['status']).to eq('succeeded') + expect(task.type).to eq('indexDeletion') + task.await + expect(task).to be_succeeded expect { client.fetch_index('uid') }.to raise_index_not_found_meilisearch_api_error end it 'fails to manipulate index object after deletion' do - client.create_index!('uid') + client.create_index('uid').await task = client.index('uid').delete - expect(task['type']).to eq('indexDeletion') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('indexDeletion') + task.await index = client.index('uid') expect { index.fetch_primary_key }.to raise_index_not_found_meilisearch_api_error @@ -177,7 +177,7 @@ end it 'works with method aliases' do - client.create_index!('uid', primary_key: 'primary_key') + client.create_index('uid', primary_key: 'primary_key').await index = client.fetch_index('uid') expect(index.method(:fetch_primary_key) == index.method(:get_primary_key)).to be_truthy @@ -187,11 +187,11 @@ context 'with snake_case options' do it 'does the request with camelCase attributes' do - client.create_index!('uid') + client.create_index('uid').await task = client.index('uid').update(primary_key: 'new_primary_key') - expect(task['type']).to eq('indexUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('indexUpdate') + task.await index = client.fetch_index('uid') expect(index).to be_a(MeiliSearch::Index) diff --git a/spec/meilisearch/index/documents_spec.rb b/spec/meilisearch/index/documents_spec.rb index b4dab437..177af894 100644 --- a/spec/meilisearch/index/documents_spec.rb +++ b/spec/meilisearch/index/documents_spec.rb @@ -16,163 +16,168 @@ ] end - describe 'adding documents' do - it 'adds documents (as a array of documents)' do - task = index.add_documents(documents) + let(:documents_with_string_keys) { documents.map { |doc| doc.transform_keys(&:to_s) } } + + describe '#add_documents' do + context 'passed an array of documents' do + it 'adds documents' do + task = index.add_documents(documents) + expect(task.type).to eq('documentAdditionOrUpdate') + task.await + expect(index.documents['results']).to contain_exactly(*documents_with_string_keys) + end - expect(task['type']).to eq('documentAdditionOrUpdate') - client.wait_for_task(task['taskUid']) - expect(index.documents['results'].count).to eq(documents.count) - end + it 'keeps the structure of the original documents' do + doc = { object_id: 123, my_title: 'Pride and Prejudice', 'my-comment': 'A great book' } + index.add_documents([doc]).await - it 'keeps the structure of the original documents' do - docs = [ - { object_id: 123, my_title: 'Pride and Prejudice', 'my-comment': 'A great book' } - ] - - task = index.add_documents(docs) - client.wait_for_task(task['taskUid']) + expect(index.documents['results'].first.keys).to eq(doc.keys.map(&:to_s)) + end - expect(index.documents['results'].first.keys).to eq(docs.first.keys.map(&:to_s)) - end + it 'adds JSON documents' do + documents = <<~JSON + [ + { "objectRef": 123, "title": "Pride and Prejudice", "comment": "A great book" }, + { "objectRef": 456, "title": "Le Petit Prince", "comment": "A french book" }, + { "objectRef": 1, "title": "Alice In Wonderland", "comment": "A weird book" }, + { "objectRef": 1344, "title": "The Hobbit", "comment": "An awesome book" }, + { "objectRef": 4, "title": "Harry Potter and the Half-Blood Prince", "comment": "The best book" } + ] + JSON + index.add_documents_json(documents, 'objectRef').await + + expect(index.documents['results'].count).to eq(5) + end - it 'adds JSON documents (as a array of documents)' do - documents = <<~JSON - [ - { "objectRef": 123, "title": "Pride and Prejudice", "comment": "A great book" }, - { "objectRef": 456, "title": "Le Petit Prince", "comment": "A french book" }, - { "objectRef": 1, "title": "Alice In Wonderland", "comment": "A weird book" }, - { "objectRef": 1344, "title": "The Hobbit", "comment": "An awesome book" }, + it 'adds NDJSON documents' do + documents = <<~NDJSON + { "objectRef": 123, "title": "Pride and Prejudice", "comment": "A great book" } + { "objectRef": 456, "title": "Le Petit Prince", "comment": "A french book" } + { "objectRef": 1, "title": "Alice In Wonderland", "comment": "A weird book" } { "objectRef": 4, "title": "Harry Potter and the Half-Blood Prince", "comment": "The best book" } - ] - JSON - response = index.add_documents_json(documents, 'objectRef') + NDJSON + index.add_documents_ndjson(documents, 'objectRef').await - index.wait_for_task(response['taskUid']) - expect(index.documents['results'].count).to eq(5) - end + expect(index.documents['results'].count).to eq(4) + end - it 'adds NDJSON documents (as a array of documents)' do - documents = <<~NDJSON - { "objectRef": 123, "title": "Pride and Prejudice", "comment": "A great book" } - { "objectRef": 456, "title": "Le Petit Prince", "comment": "A french book" } - { "objectRef": 1, "title": "Alice In Wonderland", "comment": "A weird book" } - { "objectRef": 4, "title": "Harry Potter and the Half-Blood Prince", "comment": "The best book" } - NDJSON - response = index.add_documents_ndjson(documents, 'objectRef') + it 'adds CSV documents' do + documents = <<~CSV + "objectRef:number","title:string","comment:string" + "1239","Pride and Prejudice","A great book" + "4569","Le Petit Prince","A french book" + "49","Harry Potter and the Half-Blood Prince","The best book" + CSV + index.add_documents_csv(documents, 'objectRef').await - index.wait_for_task(response['taskUid']) - expect(index.documents['results'].count).to eq(4) - end + expect(index.documents['results'].count).to eq(3) + end - it 'adds CSV documents (as a array of documents)' do - documents = <<~CSV - "objectRef:number","title:string","comment:string" - "1239","Pride and Prejudice","A great book" - "4569","Le Petit Prince","A french book" - "49","Harry Potter and the Half-Blood Prince","The best book" - CSV - response = index.add_documents_csv(documents, 'objectRef') + it 'adds CSV documents with different separator' do + documents = <<~CSV + "objectRef:number"|"title:string"|"comment:string" + "1239"|"Pride and Prejudice"|"A great book" + "4569"|"Le Petit Prince"|"A french book" + "49"|"Harry Potter and the Half-Blood Prince"|"The best book" + CSV + + index.add_documents_csv(documents, 'objectRef', '|').await + + expect(index.documents['results'].count).to eq(3) + expect(index.documents['results'][1]).to match( + 'objectRef' => 4569, + 'title' => 'Le Petit Prince', + 'comment' => 'A french book' + ) + end - index.wait_for_task(response['taskUid']) - expect(index.documents['results'].count).to eq(3) - end + it 'infers order of fields' do + index.add_documents(documents).await + task = index.document(1) + expect(task.keys).to eq(['objectId', 'title', 'comment']) + end - it 'adds CSV documents (as an array of documents with a different separator)' do - documents = <<~CSV - "objectRef:number"|"title:string"|"comment:string" - "1239"|"Pride and Prejudice"|"A great book" - "4569"|"Le Petit Prince"|"A french book" - "49"|"Harry Potter and the Half-Blood Prince"|"The best book" - CSV + it 'slices response fields' do + index.add_documents(documents).await - response = index.add_documents_csv(documents, 'objectRef', '|') - index.wait_for_task(response['taskUid']) + document = index.document(1, fields: ['title']) - expect(index.documents['results'].count).to eq(3) - expect(index.documents['results'][1]['objectRef']).to eq(4569) - expect(index.documents['results'][1]['title']).to eq('Le Petit Prince') - expect(index.documents['results'][1]['comment']).to eq('A french book') - end + expect(document.keys).to eq(['title']) + end - it 'adds documents in a batch (as a array of documents)' do - task = index.add_documents_in_batches(documents, 5) - expect(task).to be_a(Array) - expect(task.count).to eq(2) # 2 batches, since we start with 5 < documents.count <= 10 documents - expect(task[0]).to have_key('taskUid') - task.each do |task_object| - client.wait_for_task(task_object['taskUid']) + it 'infers primary-key attribute' do + index.add_documents(documents).await + expect(index.fetch_primary_key).to eq('objectId') end - expect(index.documents['results'].count).to eq(documents.count) - end - it 'adds documents synchronously (as an array of documents)' do - task = index.add_documents!(documents) + it 'creates the index during document addition' do + new_index = client.index('books') + new_index.add_documents(documents).await - expect(task).to have_key('status') - expect(task['status']).not_to eql('enqueued') - expect(task['status']).to eql('succeeded') - expect(index.documents['results'].count).to eq(documents.count) - end - - it 'adds document batches synchronously (as an array of documents)' do - task = index.add_documents_in_batches!(documents, 5) - expect(task).to be_a(Array) - expect(task.count).to eq(2) # 2 batches, since we start with 5 < documents.count <= 10 documents - task.each do |task_object| - expect(task_object).to have_key('uid') - expect(task_object).to have_key('status') - expect(task_object['status']).not_to eql('enqueued') - expect(task_object['status']).to eql('succeeded') + expect(client.index('books').fetch_primary_key).to eq('objectId') + expect(client.index('books').documents['results'].count).to eq(documents.count) end - expect(index.documents['results'].count).to eq(documents.count) end - it 'infers order of fields' do - index.add_documents!(documents) - task = index.document(1) - expect(task.keys).to eq(['objectId', 'title', 'comment']) - end + it 'adds documents in a batch (as a array of documents)' do + tasks = index.add_documents_in_batches(documents, 5) + expect(tasks).to contain_exactly(a_kind_of(MeiliSearch::Models::Task), + a_kind_of(MeiliSearch::Models::Task)) + tasks.each(&:await) + expect(index.documents['results']).to contain_exactly(*documents_with_string_keys) + end + + context 'given a single document' do + it 'adds only one document to index (as an hash of one document)' do + new_doc = { objectId: 30, title: 'Hamlet' } + client.create_index('books').await + new_index = client.index('books') + expect do + new_index.add_documents(new_doc).await + end.to(change { new_index.documents['results'].length }.by(1)) - it 'slices response fields' do - index.add_documents!(documents) + expect(new_index.document(30)['title']).to eq('Hamlet') + end - task = index.document(1, fields: ['title']) + it 'fails to add document with bad primary-key format' do + index.add_documents(documents).await + task = index.add_documents(objectId: 'toto et titi', title: 'Unknown').await + expect(task).to have_failed + end - expect(task.keys).to eq(['title']) - end + it 'fails to add document with no primary-key' do + index.add_documents(documents).await + task = index.add_documents(id: 0, title: 'Unknown').await + expect(task).to have_failed + end - it 'infers primary-key attribute' do - index.add_documents!(documents) - expect(index.fetch_primary_key).to eq('objectId') + it 'allows the user to store vectors' do + enable_vector_store(true) + new_doc = { objectId: 123, _vectors: [0.1, 0.2, 0.3] } + client.create_index('vector_test').await + new_index = client.index('vector_test') + new_index.add_documents(new_doc).await + expect(new_index.document(123)['_vectors']).to include(0.1) + end end + end - it 'create the index during document addition' do - new_index = client.index('books') - task = new_index.add_documents(documents) - - new_index.wait_for_task(task['taskUid']) - expect(client.index('books').fetch_primary_key).to eq('objectId') - expect(client.index('books').documents['results'].count).to eq(documents.count) - end + describe '#add_documents!' do + before { allow(MeiliSearch::Utils).to receive(:soft_deprecate).and_return(nil) } - it 'adds only one document to index (as an hash of one document)' do - new_doc = { objectId: 30, title: 'Hamlet' } - client.create_index!('books') - new_index = client.index('books') - expect do - new_index.add_documents!(new_doc) + it 'adds documents synchronously (as an array of documents)' do + task = index.add_documents!(documents) - expect(new_index.document(30)['title']).to eq('Hamlet') - end.to(change { new_index.documents['results'].length }.by(1)) + expect(task).to be_finished + expect(index.documents['results'].count).to eq(documents.count) end it 'adds only one document synchronously to index (as an hash of one document)' do new_doc = { objectId: 30, title: 'Hamlet' } - client.create_index!('books') + client.create_index('books').await new_index = client.index('books') expect do - task = new_index.add_documents!(new_doc) + task = new_index.add_documents(new_doc).await expect(task).to have_key('status') expect(task['status']).to eq('succeeded') @@ -180,206 +185,119 @@ end.to(change { new_index.documents['results'].length }.by(1)) end - it 'fails to add document with bad primary-key format' do + it 'warns about deprecation' do index.add_documents!(documents) - task = index.add_documents(objectId: 'toto et titi', title: 'Unknown') - client.wait_for_task(task['taskUid']) - expect(index.task(task['taskUid'])['status']).to eq('failed') + expect(MeiliSearch::Utils).to have_received(:soft_deprecate) + .with('Index#add_documents!', a_string_including('await')) end + end - it 'fails to add document with no primary-key' do - index.add_documents!(documents) - task = index.add_documents(id: 0, title: 'Unknown') - client.wait_for_task(task['taskUid']) - expect(index.task(task['taskUid'])['status']).to eq('failed') - end + describe '#add_documents_in_batches!' do + before { allow(MeiliSearch::Utils).to receive(:soft_deprecate).and_return(nil) } - it 'allows the user to store vectors' do - enable_vector_store(true) + it 'adds document batches synchronously' do + expect(index.add_documents_in_batches!(documents, 5)).to contain_exactly(be_succeeded, be_succeeded) + expect(index.documents['results'].count).to eq(documents.count) + end - new_doc = { objectId: 123, _vectors: [0.1, 0.2, 0.3] } - client.create_index!('vector_test') - new_index = client.index('vector_test') - expect do - new_index.add_documents!(new_doc) - end.to(change { new_index.documents['results'].length }.by(1)) - expect(new_index.document(123)).to have_key('_vectors') - expect(new_index.document(123)['_vectors']).to be_a(Array) - expect(new_index.document(123)['_vectors'].first).to be_a(Float) - expect(new_index.document(123)['_vectors'].first).to eq(0.1) + it 'warns about deprecation' do + index.add_documents_in_batches!(documents, 5) + expect(MeiliSearch::Utils).to have_received(:soft_deprecate) + .with('Index#add_documents_in_batches!', a_string_including('await')) end end - describe 'accessing documents' do - before do - index.add_documents(documents) + describe '#document' do + before { index.add_documents(documents).await } - task = index.update_filterable_attributes(['title', 'objectId']) - client.wait_for_task(task['taskUid']) + it 'gets one document from its primary-key' do + expect(index.document(123)).to include( + 'title' => 'Pride and Prejudice', + 'comment' => 'A great book' + ) end + end - it 'gets one document from its primary-key' do - task = index.document(123) - expect(task).to be_a(Hash) - expect(task['title']).to eq('Pride and Prejudice') - expect(task['comment']).to eq('A great book') + describe '#documents' do + before do + index.add_documents(documents).await + index.update_filterable_attributes(['title', 'objectId']).await end it 'browses documents' do docs = index.documents['results'] - - expect(docs).to be_a(Array) - expect(docs.size).to eq(documents.count) - expected_titles = documents.map { |doc| doc[:title] } - expect(docs.map { |doc| doc['title'] }).to contain_exactly(*expected_titles) + expect(docs).to contain_exactly(*documents_with_string_keys) end it 'browses documents with query parameters' do docs = index.documents(offset: 2, limit: 5)['results'] - expect(docs).to be_a(Array) expect(docs.size).to eq(5) - expect(docs.first['objectId']).to eq(index.documents['results'][2]['objectId']) + expect(docs.first).to eq(index.documents['results'][2]) end it 'browses documents with fields' do docs = index.documents(fields: ['title'])['results'] - expect(docs).to be_a(Array) - expect(docs.first.keys).to eq(['title']) + expect(docs).to include(a_hash_including('title')) + expect(docs).not_to include(a_hash_including('comment')) end it 'retrieves documents by filters' do docs = index.documents(filter: 'objectId > 400')['results'] - expect(docs).to be_a(Array) - expect(docs.first).to eq({ - 'objectId' => 456, - 'title' => 'Le Petit Prince', - 'comment' => 'A french book' - }) + expect(docs).to include('objectId' => 456, + 'title' => 'Le Petit Prince', + 'comment' => 'A french book') end it 'retrieves documents by filters & other parameters' do docs = index.documents(fields: ['title'], filter: 'objectId > 100')['results'] - expect(docs).to be_a(Array) - expect(docs.size).to eq(3) - expect(docs.first.keys).to eq(['title']) + expect(docs).to contain_exactly( + { 'title' => a_kind_of(String) }, + { 'title' => a_kind_of(String) }, + { 'title' => a_kind_of(String) } + ) end end - describe 'updating documents' do - before { index.add_documents!(documents) } - - it 'updates documents in index (as an array of documents)' do - id1 = 123 - id2 = 456 - updated_documents = [ - { objectId: id1, title: 'Sense and Sensibility' }, - { objectId: id2, title: 'The Little Prince' } - ] - task = index.update_documents(updated_documents) - client.wait_for_task(task['taskUid']) - doc1 = index.document(id1) - doc2 = index.document(id2) - expect(index.documents['results'].count).to eq(documents.count) - expect(doc1['title']).to eq(updated_documents.detect { |doc| doc[:objectId] == id1 }[:title]) - expect(doc1['comment']).to eq(documents.detect { |doc| doc[:objectId] == id1 }[:comment]) - expect(doc2['title']).to eq(updated_documents.detect { |doc| doc[:objectId] == id2 }[:title]) - expect(doc2['comment']).to eq(documents.detect { |doc| doc[:objectId] == id2 }[:comment]) - end - - it 'updates documents synchronously in index (as an array of documents)' do - id1 = 123 - id2 = 456 - updated_documents = [ - { objectId: id1, title: 'Sense and Sensibility' }, - { objectId: id2, title: 'The Little Prince' } - ] - task = index.update_documents!(updated_documents) - - expect(task).to have_key('status') - expect(task['status']).not_to eql('enqueued') - expect(task['status']).to eql('succeeded') - doc1 = index.document(id1) - doc2 = index.document(id2) - expect(index.documents['results'].count).to eq(documents.count) - expect(doc1['title']).to eq(updated_documents.detect { |doc| doc[:objectId] == id1 }[:title]) - expect(doc1['comment']).to eq(documents.detect { |doc| doc[:objectId] == id1 }[:comment]) - expect(doc2['title']).to eq(updated_documents.detect { |doc| doc[:objectId] == id2 }[:title]) - expect(doc2['comment']).to eq(documents.detect { |doc| doc[:objectId] == id2 }[:comment]) - end - - it 'updates documents synchronously in index in batches (as an array of documents)' do - id1 = 123 - id2 = 456 - updated_documents = [ - { objectId: id1, title: 'Sense and Sensibility' }, - { objectId: id2, title: 'The Little Prince' } - ] - task = index.update_documents_in_batches!(updated_documents, 1) - expect(task).to be_a(Array) - expect(task.count).to eq(2) # 2 batches, since we have two items with batch size 1 - task.each do |task_object| - expect(task_object).to have_key('uid') - expect(task_object).to have_key('status') - expect(task_object['status']).not_to eql('enqueued') - expect(task_object['status']).to eql('succeeded') - end - doc1 = index.document(id1) - doc2 = index.document(id2) - expect(index.documents['results'].count).to eq(documents.count) - expect(doc1['title']).to eq(updated_documents.detect { |doc| doc[:objectId] == id1 }[:title]) - expect(doc1['comment']).to eq(documents.detect { |doc| doc[:objectId] == id1 }[:comment]) - expect(doc2['title']).to eq(updated_documents.detect { |doc| doc[:objectId] == id2 }[:title]) - expect(doc2['comment']).to eq(documents.detect { |doc| doc[:objectId] == id2 }[:comment]) - end + describe '#update_documents' do + before { index.add_documents(documents).await } - it 'updates one document in index (as an hash of one document)' do - id = 123 - updated_document = { objectId: id, title: 'Emma' } - task = index.update_documents(updated_document) - client.wait_for_task(task['taskUid']) + it 'updates multiple documents in index' do + index.update_documents( + [{ objectId: 123, title: 'Sense and Sensibility' }, + { objectId: 456, title: 'The Little Prince' }] + ).await expect(index.documents['results'].count).to eq(documents.count) - new_doc = index.document(id) - expect(new_doc['title']).to eq(updated_document[:title]) - expect(new_doc['comment']).to eq(documents.detect { |doc| doc[:objectId] == id }[:comment]) + expect(index.document(123)).to include('objectId' => 123, 'title' => 'Sense and Sensibility') + expect(index.document(456)).to include('objectId' => 456, 'title' => 'The Little Prince') end - it 'updates one document synchronously in index (as an hash of one document)' do - id = 123 - updated_document = { objectId: id, title: 'Emma' } - task = index.update_documents!(updated_document) + it 'updates a single document in index' do + index.update_documents({ objectId: 123, title: 'Emma' }).await - expect(task).to have_key('status') - expect(task['status']).not_to eql('enqueued') - expect(task['status']).to eql('succeeded') expect(index.documents['results'].count).to eq(documents.count) - new_doc = index.document(id) - expect(new_doc['title']).to eq(updated_document[:title]) - expect(new_doc['comment']).to eq(documents.detect { |doc| doc[:objectId] == id }[:comment]) + expect(index.document(123)).to include('objectId' => 123, 'title' => 'Emma') end it 'update a document with new fields' do - id = 2 - doc = { objectId: id, note: '8/10' } - task = index.update_documents(doc) - client.wait_for_task(task['taskUid']) + doc = { objectId: 2, note: '8/10' } + old_title = 'Le Rouge et le Noir' + + index.update_documents(doc).await expect(index.documents['results'].count).to eq(documents.count) - new_document = index.document(id) - expect(new_document['title']).to eq(documents.detect { |d| d[:objectId] == id }[:title]) - expect(new_document).to have_key('note') + expect(index.document(2)).to include('title' => old_title, 'note' => '8/10') end it 'replaces document' do id = 123 new_title = 'Pride & Prejudice' - task = index.replace_documents(objectId: id, title: 'Pride & Prejudice', note: '8.5/10') + index.replace_documents(objectId: id, title: 'Pride & Prejudice', note: '8.5/10').await - client.wait_for_task(task['taskUid']) expect(index.documents['results'].count).to eq(documents.count) doc = index.document(id) expect(doc['title']).to eq(new_title) @@ -388,21 +306,118 @@ end end - describe 'deleting documents' do - before { index.add_documents!(documents) } + describe '#update_documents!' do + before do + index.add_documents(documents).await + allow(MeiliSearch::Utils).to receive(:soft_deprecate).and_return(nil) + end + + it 'updates multiple documents synchronously' do + updated_documents = [ + { objectId: 123, title: 'Sense and Sensibility' }, + { objectId: 456, title: 'The Little Prince' } + ] + + expect(index.update_documents!(updated_documents)).to be_succeeded + + expect(index.document(123)).to include('objectId' => 123, 'title' => 'Sense and Sensibility') + expect(index.document(456)).to include('objectId' => 456, 'title' => 'The Little Prince') + end + + it 'updates a single document synchronously' do + updated_document = { objectId: 123, title: 'Emma' } + + expect(index.update_documents!(updated_document)).to be_succeeded + expect(index.document(123)).to include('objectId' => 123, 'title' => 'Emma') + end + + it 'warns about deprecation' do + updated_documents = [ + { objectId: 123, title: 'Sense and Sensibility' }, + { objectId: 456, title: 'The Little Prince' } + ] + + index.update_documents!(updated_documents) + expect(MeiliSearch::Utils).to have_received(:soft_deprecate) + .with('Index#update_documents!', a_string_including('await')) + end + end + + describe '#update_documents_in_batches' do + before { index.add_documents(documents).await } + + it 'updates documents in index in batches' do + updated_documents = [ + { objectId: 123, title: 'Sense and Sensibility' }, + { objectId: 456, title: 'The Little Prince' } + ] + + index.update_documents_in_batches(updated_documents, 1).each(&:await) + + expect(index.document(123)).to include('objectId' => 123, 'title' => 'Sense and Sensibility') + expect(index.document(456)).to include('objectId' => 456, 'title' => 'The Little Prince') + end + end + + describe '#update_documents_in_batches!' do + before do + index.add_documents(documents).await + allow(MeiliSearch::Utils).to receive(:soft_deprecate).and_return(nil) + end + + it 'updates documents synchronously in index in batches (as an array of documents)' do + updated_documents = [ + { objectId: 123, title: 'Sense and Sensibility' }, + { objectId: 456, title: 'The Little Prince' } + ] + + expect(index.update_documents_in_batches!(updated_documents, 1)) + .to contain_exactly(be_succeeded, be_succeeded) + + expect(index.document(123)).to include('objectId' => 123, 'title' => 'Sense and Sensibility') + expect(index.document(456)).to include('objectId' => 456, 'title' => 'The Little Prince') + end + + it 'warns about deprecation' do + updated_documents = [ + { objectId: 123, title: 'Sense and Sensibility' }, + { objectId: 456, title: 'The Little Prince' } + ] + + index.update_documents_in_batches!(updated_documents, 1) + expect(MeiliSearch::Utils).to have_received(:soft_deprecate) + .with('Index#update_documents_in_batches!', a_string_including('await')) + end + end + + describe '#delete_document' do + before { index.add_documents(documents).await } it 'deletes one document from index' do id = 456 - task = index.delete_document(id) - client.wait_for_task(task['taskUid']) + index.delete_document(id).await - expect(index.documents['results'].size).to eq(documents.count - 1) + expect(index.documents['results']).not_to include(a_hash_including('id' => 456)) + end + + it 'does nothing when trying to delete a document which does not exist' do + id = 111 expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error + expect do + index.delete_document(id).await + end.not_to(change { index.documents['results'].size }) + end + end + + describe '#delete_document!' do + before do + index.add_documents(documents).await + allow(MeiliSearch::Utils).to receive(:soft_deprecate).and_return(nil) end it 'deletes one document synchronously from index' do id = 456 - task = index.delete_document!(id) + task = index.delete_document(id).await expect(task).to have_key('status') expect(task['status']).not_to eql('enqueued') @@ -411,122 +426,116 @@ expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error end - it 'does nothing when trying to delete a document which does not exist' do - id = 111 - expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error - expect do - task = index.delete_document(id) - client.wait_for_task(task['taskUid']) - end.not_to(change { index.documents['results'].size }) + it 'warns about deprecation' do + index.delete_document!(2) + expect(MeiliSearch::Utils).to have_received(:soft_deprecate) + .with('Index#delete_document!', a_string_including('await')) end + end - it 'deletes one document from index (with delete-batch route)' do + describe '#delete_documents' do + before { index.add_documents(documents).await } + + it 'deletes a single document' do id = 2 expect do - task = index.delete_documents(id) - client.wait_for_task(task['taskUid']) - end.to(change { index.documents['results'].size }.by(-1)) + index.delete_documents(id).await + end.to change { index.documents['results'].size }.by(-1) expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error end - it 'deletes documents based on filter from index (with delete route)' do - expect do - index.update_filterable_attributes(['objectId']) - task = index.delete_documents(filter: ['objectId > 0']) - - client.wait_for_task(task['taskUid']) - end.to(change { index.documents['results'].size }.by(-documents.size)) + it 'deletes documents based on filter from index' do + index.update_filterable_attributes(['objectId']) + index.delete_documents(filter: ['objectId > 0']).await + expect(index.documents['results']).to be_empty end - it 'ignores filter even when documents_ids is empty (with delete-batch route)' do + it 'ignores filters when documents_ids is empty' do expect do - task = index.delete_documents(filter: ['objectId > 0']) - - client.wait_for_task(task['taskUid']) - end.to(change { index.documents['results'].size }.by(0)) + index.delete_documents(filter: ['objectId > 0']).await + end.not_to(change { index.documents['results'] }) end - it 'deletes one document synchronously from index (with delete-batch route)' do - id = 2 + it 'deletes multiple documents from index' do + docs_to_delete = [1, 4] expect do - task = index.delete_documents!(id) - - expect(task['status']).not_to eql('enqueued') - expect(task['status']).to eql('succeeded') - end.to(change { index.documents['results'].size }.by(-1)) - expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error + index.delete_documents(docs_to_delete).await + end.to change { index.documents['results'].size }.by(-2) end + end - it 'deletes one document from index (with delete-batch route as an array of one uid)' do - id = 123 - expect do - task = index.delete_documents([id]) - client.wait_for_task(task['taskUid']) - end.to(change { index.documents['results'].size }.by(-1)) - expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error + describe '#delete_documents!' do + before do + index.add_documents(documents).await + allow(MeiliSearch::Utils).to receive(:soft_deprecate).and_return(nil) end - it 'deletes one document synchronously from index (with delete-batch route as an array of one uid)' do - id = 123 - expect do - task = index.delete_documents!([id]) + it 'deletes a single document' do + id = 2 - expect(task['status']).not_to eql('enqueued') - expect(task['status']).to eql('succeeded') - end.to(change { index.documents['results'].size }.by(-1)) - expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error + expect(index.delete_documents!(id)).to be_succeeded + expect(index.documents['results']).not_to include(a_hash_including('id' => 2)) end - it 'deletes multiples documents from index' do + it 'deletes multiple documents' do docs_to_delete = [1, 4] - expect do - task = index.delete_documents(docs_to_delete) - client.wait_for_task(task['taskUid']) - end.to(change { index.documents['results'].size }.by(-2)) - end + expect(index.delete_documents!(docs_to_delete)).to be_succeeded - it 'deletes multiples documents synchronously from index' do - docs_to_delete = [1, 4] - expect do - task = index.delete_documents!(docs_to_delete) + expect(index.documents['results']).not_to include( + a_hash_including('id' => 1), + a_hash_including('id' => 4) + ) + end - expect(task['status']).not_to eql('enqueued') - expect(task['status']).to eql('succeeded') - end.to(change { index.documents['results'].size }.by(-2)) + it 'warns about deprecation' do + index.delete_documents!([2]) + expect(MeiliSearch::Utils).to have_received(:soft_deprecate) + .with('Index#delete_documents!', a_string_including('await')) end + end + + describe '#delete_all_documents' do + before { index.add_documents(documents).await } it 'clears all documents from index' do - expect do - task = index.delete_all_documents - client.wait_for_task(task['taskUid']) - expect(index.documents['results']).to be_empty - end.to(change { index.documents['results'].size }.from(documents.size).to(0)) + expect(index.documents['results']).not_to be_empty + index.delete_all_documents.await + expect(index.documents['results']).to be_empty end + end - it 'clears all documents synchronously from index' do - task = index.delete_all_documents! + describe '#delete_all_documents!' do + before do + index.add_documents(documents).await + allow(MeiliSearch::Utils).to receive(:soft_deprecate).and_return(nil) + end - expect(task).to have_key('status') - expect(task['status']).not_to eql('enqueued') - expect(task['status']).to eql('succeeded') + it 'clears all documents synchronously from index' do + expect(index.documents['results']).not_to be_empty + expect(index.delete_all_documents!).to be_succeeded expect(index.documents['results']).to be_empty - expect(index.documents['results'].size).to eq(0) + end + + it 'warns about deprecation' do + index.delete_all_documents! + expect(MeiliSearch::Utils).to have_received(:soft_deprecate) + .with('Index#delete_all_documents!', a_string_including('await')) end end it 'works with method aliases' do - expect(index.method(:document) == index.method(:get_document)).to be_truthy - expect(index.method(:document) == index.method(:get_one_document)).to be_truthy - expect(index.method(:documents) == index.method(:get_documents)).to be_truthy - expect(index.method(:add_documents) == index.method(:add_or_replace_documents)).to be_truthy - expect(index.method(:add_documents) == index.method(:replace_documents)).to be_truthy - expect(index.method(:update_documents) == index.method(:add_or_update_documents)).to be_truthy - expect(index.method(:delete_documents) == index.method(:delete_multiple_documents)).to be_truthy - expect(index.method(:delete_document) == index.method(:delete_one_document)).to be_truthy + expect(index.method(:document)).to eq index.method(:get_document) + expect(index.method(:document)).to eq index.method(:get_one_document) + expect(index.method(:documents)).to eq index.method(:get_documents) + expect(index.method(:add_documents)).to eq index.method(:add_or_replace_documents) + expect(index.method(:add_documents)).to eq index.method(:replace_documents) + expect(index.method(:update_documents)).to eq index.method(:add_or_update_documents) + expect(index.method(:delete_documents)).to eq index.method(:delete_multiple_documents) + expect(index.method(:delete_document)).to eq index.method(:delete_one_document) end end - context 'Right primary-key added when pushing documents' do + context 'when the right primary key is passed' do let(:documents) do [ { unique: 1, id: 1, title: 'Pride and Prejudice', comment: 'A great book' }, @@ -535,80 +544,78 @@ ] end - it 'adds documents and the primary-key' do - task = index.add_documents(documents, 'unique') - expect(task).to be_a(Hash) - client.wait_for_task(task['taskUid']) + it 'adds documents and the primary key' do + index.add_documents(documents, 'unique').await expect(index.fetch_primary_key).to eq('unique') end - it 'does not take into account the new primary key' do - index.add_documents!(documents, 'unique') + it 'fails to add tasks with a different primary key' do + index.add_documents(documents, 'unique').await task = index.update_documents({ unique: 3, id: 1, title: 'The Red and the Black' }, 'id') - task = client.wait_for_task(task['taskUid']) - - expect(task['status']).to eq('failed') - expect(task['type']).to eq('documentAdditionOrUpdate') - expect(task['error']['code']).to eq('index_primary_key_already_exists') + expect(task.await).to be_failed + expect(task.type).to eq('documentAdditionOrUpdate') + expect(task.error['code']).to eq('index_primary_key_already_exists') end end - context 'Wrong primary-key (attribute does not exist) when pushing documents' do + context 'when passed a non existant attribute as primary key' do let(:documents) do { unique: 3, id: 1, title: 'Le Rouge et le Noir' } end - it 'does not add the primary key and the documents either' do - task = index.update_documents(documents, 'objectId') - client.wait_for_task(task['taskUid']) + it 'fails to add the documents and the primary key' do + task = index.update_documents(documents, 'objectId').await + expect(task).to be_failed expect(index.fetch_primary_key).to be_nil - expect(index.task(task['taskUid'])['status']).to eq('failed') end end - context 'Wrong primary-key (attribute bad formatted) when pushing documents' do + context 'when the specified primary key field is of an unsupported type' do let(:documents) do { id: 1, title: 'Le Rouge et le Noir' } end - it 'does not add the primary key and the documents either' do - task = index.add_documents(documents, 'title') - client.wait_for_task(task['taskUid']) + it 'fails to add the primary key and the documents' do + task = index.add_documents(documents, 'title').await + expect(task).to be_failed expect(index.fetch_primary_key).to be_nil - expect(index.task(task['taskUid'])['status']).to eq('failed') - expect(index.documents['results'].count).to eq(0) + expect(index.documents['results']).to be_empty end end - context 'Impossible to infer the primary-key' do + context 'when it is not possible to infer the primary key' do let(:documents) do { title: 'Le Rouge et le Noir' } end - it 'Impossible to push docs if the pk is missing' do - task = index.add_documents!(documents) - update = index.task(task['uid']) - expect(update['status']).to eq('failed') - expect(update['error']['code']).to eq('index_primary_key_no_candidate_found') + it 'fails to add documents' do + task = index.add_documents(documents).await + expect(task).to be_failed + expect(task.error['code']).to eq('index_primary_key_no_candidate_found') end end - context 'Impossible to update primary-key if already given during index creation' do + context 'when the primary key was specified on the index' do + let(:index) do + uid = random_uid + client.create_index uid, primary_key: 'id' + client.index(uid) + end + let(:documents) do { id: 1, unique: 1, title: 'Le Rouge et le Noir' } end - it 'adds the documents anyway' do + it 'fails to add documents with another primary key' do task = index.add_documents(documents, 'unique') - expect(task).to be_a(Hash) - client.wait_for_task(task['taskUid']) - expect(index.fetch_primary_key).to eq('unique') - expect(index.documents['results'].count).to eq(1) + task.await + expect(index.fetch_primary_key).to eq('id') + expect(index.documents['results']).to be_empty end end end diff --git a/spec/meilisearch/index/search/attributes_to_crop_spec.rb b/spec/meilisearch/index/search/attributes_to_crop_spec.rb index 652f60c6..71d70bd9 100644 --- a/spec/meilisearch/index/search/attributes_to_crop_spec.rb +++ b/spec/meilisearch/index/search/attributes_to_crop_spec.rb @@ -10,7 +10,7 @@ } end - before { index.add_documents!(document) } + before { index.add_documents(document).await } it 'searches with default cropping params' do response = index.search('galaxy', attributes_to_crop: ['*'], crop_length: 6) @@ -53,7 +53,7 @@ expect(response['hits'].first['_formatted']['description']).to eq('…Guide to the Galaxy is a…') end - it 'does a placehodler search with attributes to crop' do + it 'does a placeholder search with attributes to crop' do response = index.search('', { attributes_to_crop: ['description'], crop_length: 5 }) expect(response['hits'].first).to have_key('_formatted') expect(response['hits'].first['description']).to eq(document[:description]) diff --git a/spec/meilisearch/index/search/facets_distribution_spec.rb b/spec/meilisearch/index/search/facets_distribution_spec.rb index dbe21fb8..26d865c6 100644 --- a/spec/meilisearch/index/search/facets_distribution_spec.rb +++ b/spec/meilisearch/index/search/facets_distribution_spec.rb @@ -4,8 +4,7 @@ include_context 'search books with author, genre, year' before do - response = index.update_filterable_attributes(['genre', 'year', 'author']) - index.wait_for_task(response['taskUid']) + index.update_filterable_attributes(['genre', 'year', 'author']).await end it 'does a custom search with facets' do diff --git a/spec/meilisearch/index/search/filter_spec.rb b/spec/meilisearch/index/search/filter_spec.rb index 4fc4acdb..51ba9355 100644 --- a/spec/meilisearch/index/search/filter_spec.rb +++ b/spec/meilisearch/index/search/filter_spec.rb @@ -3,10 +3,7 @@ RSpec.describe 'MeiliSearch::Index - Filtered search' do include_context 'search books with author, genre, year' - before do - response = index.update_filterable_attributes(['genre', 'year', 'author']) - index.wait_for_task(response['taskUid']) - end + before { index.update_filterable_attributes(['genre', 'year', 'author']).await } it 'does a custom search with one filter' do response = index.search('le', { filter: 'genre = romance' }) diff --git a/spec/meilisearch/index/search/multi_params_spec.rb b/spec/meilisearch/index/search/multi_params_spec.rb index 13434f48..c554fa3e 100644 --- a/spec/meilisearch/index/search/multi_params_spec.rb +++ b/spec/meilisearch/index/search/multi_params_spec.rb @@ -3,10 +3,7 @@ RSpec.describe 'MeiliSearch::Index - Multi-paramaters search' do include_context 'search books with genre' - before do - response = index.update_filterable_attributes(['genre']) - index.wait_for_task(response['taskUid']) - end + before { index.update_filterable_attributes(['genre']).await } it 'does a custom search with attributes to crop, filter and attributes to highlight' do response = index.search('prince', @@ -45,8 +42,7 @@ end it 'does a custom search with filter, attributes_to_retrieve and attributes_to_highlight' do - response = index.update_filterable_attributes(['genre']) - index.wait_for_task(response['taskUid']) + index.update_filterable_attributes(['genre']).await response = index.search('prinec', { filter: ['genre = fantasy'], @@ -63,8 +59,7 @@ end it 'does a custom search with facets and limit' do - response = index.update_filterable_attributes(['genre']) - index.wait_for_task(response['taskUid']) + index.update_filterable_attributes(['genre']).await response = index.search('prinec', facets: ['genre'], limit: 1) expect(response.keys).to contain_exactly( diff --git a/spec/meilisearch/index/search/nested_fields_spec.rb b/spec/meilisearch/index/search/nested_fields_spec.rb index 711c4e2d..ab049d20 100644 --- a/spec/meilisearch/index/search/nested_fields_spec.rb +++ b/spec/meilisearch/index/search/nested_fields_spec.rb @@ -12,8 +12,8 @@ end it 'searches within index with searchableAttributes setting' do - wait_for_it index.update_searchable_attributes(['title', 'info.comment']) - wait_for_it index.add_documents(documents) + index.update_searchable_attributes(['title', 'info.comment']).await + index.add_documents(documents).await response = index.search('An awesome') @@ -23,9 +23,9 @@ end it 'searches within index with searchableAttributes and sortableAttributes settings' do - wait_for_it index.update_searchable_attributes(['title', 'info.comment']) - wait_for_it index.update_sortable_attributes(['info.reviewNb']) - wait_for_it index.add_documents(documents) + index.update_searchable_attributes(['title', 'info.comment']).await + index.update_sortable_attributes(['info.reviewNb']).await + index.add_documents(documents).await response = index.search('An awesome') diff --git a/spec/meilisearch/index/search/offset_spec.rb b/spec/meilisearch/index/search/offset_spec.rb index ec297bf6..4f33224a 100644 --- a/spec/meilisearch/index/search/offset_spec.rb +++ b/spec/meilisearch/index/search/offset_spec.rb @@ -16,8 +16,7 @@ end it 'does a placeholder search with an offset set to 3 and custom ranking rules' do - response = index.update_ranking_rules(['objectId:asc']) - index.wait_for_task(response['taskUid']) + index.update_ranking_rules(['objectId:asc']).await response = index.search('') response_with_offset = index.search('', offset: 3) expect(response['hits'].first['objectId']).to eq(1) diff --git a/spec/meilisearch/index/search/q_spec.rb b/spec/meilisearch/index/search/q_spec.rb index 3c7e5e0a..6791b095 100644 --- a/spec/meilisearch/index/search/q_spec.rb +++ b/spec/meilisearch/index/search/q_spec.rb @@ -38,16 +38,15 @@ end it 'does a basic search with an empty query and a custom ranking rule' do - response = index.update_ranking_rules([ - 'words', - 'typo', - 'sort', - 'proximity', - 'attribute', - 'exactness', - 'objectId:asc' - ]) - index.wait_for_task(response['taskUid']) + index.update_ranking_rules([ + 'words', + 'typo', + 'sort', + 'proximity', + 'attribute', + 'exactness', + 'objectId:asc' + ]).await response = index.search('') expect(response['estimatedTotalHits']).to eq(documents.count) expect(response['hits'].first['objectId']).to eq(1) diff --git a/spec/meilisearch/index/search/sort_spec.rb b/spec/meilisearch/index/search/sort_spec.rb index a196af07..efefa6ee 100644 --- a/spec/meilisearch/index/search/sort_spec.rb +++ b/spec/meilisearch/index/search/sort_spec.rb @@ -3,18 +3,17 @@ RSpec.describe 'MeiliSearch::Index - Sorted search' do include_context 'search books with author, genre, year' before do - response = index.update_sortable_attributes(['year', 'author']) - index.wait_for_task(response['taskUid']) + sortable_update = index.update_sortable_attributes(['year', 'author']) - response = index.update_ranking_rules([ - 'sort', - 'words', - 'typo', - 'proximity', - 'attribute', - 'exactness' - ]) - index.wait_for_task(response['taskUid']) + index.update_ranking_rules([ + 'sort', + 'words', + 'typo', + 'proximity', + 'attribute', + 'exactness' + ]).await + sortable_update.await end it 'does a custom search with one sort' do diff --git a/spec/meilisearch/index/search/vector_search.rb b/spec/meilisearch/index/search/vector_search.rb index 3b1f8564..b3d756cf 100644 --- a/spec/meilisearch/index/search/vector_search.rb +++ b/spec/meilisearch/index/search/vector_search.rb @@ -10,9 +10,9 @@ { objectId: 2, _vectors: [0.5, 3, 1], title: 'And Your Bird Can Sing' } ] - client.create_index!('vector_test_search') + client.create_index('vector_test_search').await new_index = client.index('vector_test_search') - new_index.add_documents!(documents) + new_index.add_documents(documents).await expect(new_index.search('q', vector: [0, 1, 2])['hits']).not_to be_empty end diff --git a/spec/meilisearch/index/settings_spec.rb b/spec/meilisearch/index/settings_spec.rb index 488c6eab..9ca7cbd3 100644 --- a/spec/meilisearch/index/settings_spec.rb +++ b/spec/meilisearch/index/settings_spec.rb @@ -39,266 +39,266 @@ context 'On global settings routes' do let(:index) { client.index(uid) } - before { client.create_index!(uid) } - - it 'gets default values of settings' do - settings = index.settings - expect(settings).to be_a(Hash) - expect(settings.keys).to include(*settings_keys) - expect(settings['rankingRules']).to eq(default_ranking_rules) - expect(settings['distinctAttribute']).to be_nil - expect(settings['searchableAttributes']).to eq(default_searchable_attributes) - expect(settings['displayedAttributes']).to eq(default_displayed_attributes) - expect(settings['stopWords']).to eq([]) - expect(settings['synonyms']).to eq({}) - expect(settings['pagination'].transform_keys(&:to_sym)).to eq(default_pagination) - expect(settings['filterableAttributes']).to eq([]) - expect(settings['sortableAttributes']).to eq([]) - expect(settings['proximityPrecision']).to eq(default_proximity_precision) - end - - it 'updates multiples settings at the same time' do - task = index.update_settings( - ranking_rules: ['title:asc', 'typo'], - distinct_attribute: 'title' + before { client.create_index(uid).await } + + it '#settings gets default values of settings' do + expect(index.settings).to include( + 'rankingRules' => default_ranking_rules, + 'distinctAttribute' => nil, + 'searchableAttributes' => default_searchable_attributes, + 'displayedAttributes' => default_displayed_attributes, + 'stopWords' => [], + 'synonyms' => {}, + 'pagination' => default_pagination.transform_keys(&:to_s), + 'filterableAttributes' => [], + 'sortableAttributes' => [], + 'dictionary' => [], + 'separatorTokens' => [], + 'nonSeparatorTokens' => [], + 'proximityPrecision' => default_proximity_precision ) - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - settings = index.settings - expect(settings['rankingRules']).to eq(['title:asc', 'typo']) - expect(settings['distinctAttribute']).to eq('title') - expect(settings['stopWords']).to be_empty end - it 'updates one setting without reset the others' do - task = index.update_settings(stop_words: ['the']) + describe '#update_settings' do + it 'updates multiples settings at the same time' do + task = index.update_settings( + ranking_rules: ['title:asc', 'typo'], + distinct_attribute: 'title' + ) + + expect(task.type).to eq('settingsUpdate') + task.await + + expect(index.settings).to include( + 'rankingRules' => ['title:asc', 'typo'], + 'distinctAttribute' => 'title', + 'stopWords' => [] + ) + end + + it 'updates one setting without touching the others' do + task = index.update_settings(stop_words: ['the']) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - settings = index.settings - expect(settings['rankingRules']).to eq(default_ranking_rules) - expect(settings['distinctAttribute']).to be_nil - expect(settings['stopWords']).to eq(['the']) - expect(settings['synonyms']).to be_empty + expect(task.type).to eq('settingsUpdate') + task.await + + expect(index.settings).to include( + 'rankingRules' => default_ranking_rules, + 'distinctAttribute' => nil, + 'stopWords' => ['the'], + 'synonyms' => {} + ) + end end - it 'resets all settings' do - task = index.update_settings( + it '#reset_settings resets all settings' do + index.update_settings( ranking_rules: ['title:asc', 'typo'], distinct_attribute: 'title', stop_words: ['the', 'a'], synonyms: { wow: ['world of warcraft'] }, proximity_precision: 'byAttribute' - ) - client.wait_for_task(task['taskUid']) + ).await task = index.reset_settings - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - - settings = index.settings - expect(settings['rankingRules']).to eq(default_ranking_rules) - expect(settings['distinctAttribute']).to be_nil - expect(settings['stopWords']).to be_empty - expect(settings['synonyms']).to be_empty - expect(settings['proximityPrecision']).to eq(default_proximity_precision) + expect(task.type).to eq('settingsUpdate') + task.await + + expect(index.settings).to include( + 'rankingRules' => default_ranking_rules, + 'distinctAttribute' => nil, + 'stopWords' => [], + 'synonyms' => {}, + 'proximityPrecision' => default_proximity_precision + ) end end - context 'On ranking-rules sub-routes' do + context 'On ranking rules' do let(:index) { client.index(uid) } let(:ranking_rules) { ['title:asc', 'words', 'typo'] } let(:wrong_ranking_rules) { ['title:asc', 'typos'] } - before { client.create_index!(uid) } + before { client.create_index(uid).await } - it 'gets default values of ranking rules' do - settings = index.ranking_rules - expect(settings).to eq(default_ranking_rules) + it '#ranking_rules gets default values of ranking rules' do + expect(index.ranking_rules).to eq(default_ranking_rules) end - it 'updates ranking rules' do - task = index.update_ranking_rules(ranking_rules) - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - expect(index.ranking_rules).to eq(ranking_rules) - end + describe '#update_ranking_rules' do + it 'updates ranking rules' do + task = index.update_ranking_rules(ranking_rules) + expect(task.type).to eq('settingsUpdate') + task.await - it 'updates ranking rules at null' do - task = index.update_ranking_rules(ranking_rules) - client.wait_for_task(task['taskUid']) + expect(index.ranking_rules).to eq(ranking_rules) + end - task = index.update_ranking_rules(nil) + it 'resets ranking rules when passed nil' do + index.update_ranking_rules(ranking_rules).await + task = index.update_ranking_rules(nil) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('settingsUpdate') + task.await - expect(index.ranking_rules).to eq(default_ranking_rules) - end + expect(index.ranking_rules).to eq(default_ranking_rules) + end - it 'fails when updating with wrong ranking rules name' do - expect do - index.update_ranking_rules(wrong_ranking_rules) - end.to raise_meilisearch_api_error_with(400, 'invalid_settings_ranking_rules', 'invalid_request') + it 'fails when updating with wrong ranking rules name' do + expect do + index.update_ranking_rules(wrong_ranking_rules) + end.to raise_meilisearch_api_error_with(400, 'invalid_settings_ranking_rules', 'invalid_request') + end end - it 'resets ranking rules' do - task = index.update_ranking_rules(ranking_rules) - client.wait_for_task(task['taskUid']) - + it '#reset_ranking_rules resets ranking rules' do + index.update_ranking_rules(ranking_rules).await task = index.reset_ranking_rules - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('settingsUpdate') + task.await expect(index.ranking_rules).to eq(default_ranking_rules) end end - context 'On distinct-attribute sub-routes' do + context 'On distinct attribute' do let(:index) { client.index(uid) } let(:distinct_attribute) { 'title' } - it 'gets default values of distinct attribute' do - client.create_index!(uid) - settings = index.distinct_attribute + before { client.create_index(uid).await } - expect(settings).to be_nil + it '#distinct_attribute gets default values of distinct attribute' do + expect(index.distinct_attribute).to be_nil end - it 'updates distinct attribute' do - task = index.update_distinct_attribute(distinct_attribute) - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + describe '#update_distinct_attribute' do + it 'updates distinct attribute' do + task = index.update_distinct_attribute(distinct_attribute) + expect(task.type).to eq('settingsUpdate') + task.await - expect(index.distinct_attribute).to eq(distinct_attribute) - end + expect(index.distinct_attribute).to eq(distinct_attribute) + end - it 'updates distinct attribute at null' do - task = index.update_distinct_attribute(distinct_attribute) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + it 'resets district attributes when passed nil' do + task = index.update_distinct_attribute(distinct_attribute) + expect(task.type).to eq('settingsUpdate') + task.await - task = index.update_distinct_attribute(nil) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + task = index.update_distinct_attribute(nil) + expect(task.type).to eq('settingsUpdate') + task.await - expect(index.distinct_attribute).to be_nil + expect(index.distinct_attribute).to be_nil + end end - it 'resets distinct attribute' do + it '#reset_distinct_attribute resets distinct attribute' do task = index.update_distinct_attribute(distinct_attribute) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('settingsUpdate') + task.await task = index.reset_distinct_attribute - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('settingsUpdate') + task.await expect(index.distinct_attribute).to be_nil end end - context 'On searchable-attributes sub-routes' do + context 'On searchable attributes' do let(:index) { client.index(uid) } let(:searchable_attributes) { ['title', 'description'] } - before { client.create_index!(uid) } + before { client.create_index(uid).await } - it 'gets default values of searchable attributes' do - settings = index.searchable_attributes - expect(settings).to eq(default_searchable_attributes) + it '#searchable_attributes gets default values of searchable attributes' do + expect(index.searchable_attributes).to eq(default_searchable_attributes) end - it 'updates searchable attributes' do - task = index.update_searchable_attributes(searchable_attributes) - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - expect(index.searchable_attributes).to eq(searchable_attributes) - end + describe '#update_searchable_attributes' do + it 'updates searchable attributes' do + task = index.update_searchable_attributes(searchable_attributes) + expect(task.type).to eq('settingsUpdate') + task.await - it 'updates searchable attributes at null' do - task = index.update_searchable_attributes(searchable_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(index.searchable_attributes).to eq(searchable_attributes) + end - task = index.update_searchable_attributes(nil) - expect(task['type']).to eq('settingsUpdate') + it 'resets searchable attributes when passed nil' do + task = index.update_searchable_attributes(searchable_attributes) + expect(task.type).to eq('settingsUpdate') + task.await - client.wait_for_task(task['taskUid']) + task = index.update_searchable_attributes(nil) + expect(task.type).to eq('settingsUpdate') + task.await - expect(index.searchable_attributes).to eq(default_searchable_attributes) + expect(index.searchable_attributes).to eq(default_searchable_attributes) + end end - it 'resets searchable attributes' do + it '#reset_searchable_attributes resets searchable attributes' do task = index.update_searchable_attributes(searchable_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('settingsUpdate') + task.await task = index.reset_searchable_attributes + expect(task.type).to eq('settingsUpdate') + expect(task.await).to be_succeeded - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - - expect(index.task(task['taskUid'])['status']).to eq('succeeded') expect(index.searchable_attributes).to eq(default_searchable_attributes) end end - context 'On displayed-attributes sub-routes' do + context 'On displayed attributes' do let(:index) { client.index(uid) } let(:displayed_attributes) { ['title', 'description'] } - before { client.create_index!(uid) } + before { client.create_index(uid).await } - it 'gets default values of displayed attributes' do - settings = index.displayed_attributes - expect(settings).to eq(default_displayed_attributes) + it '#displayed_attributes gets default values of displayed attributes' do + expect(index.displayed_attributes).to eq(default_displayed_attributes) end - it 'updates displayed attributes' do - task = index.update_displayed_attributes(displayed_attributes) + describe '#update_displayed_attributes' do + it 'updates displayed attributes' do + task = index.update_displayed_attributes(displayed_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('settingsUpdate') + task.await - expect(index.displayed_attributes).to contain_exactly(*displayed_attributes) - end - - it 'updates displayed attributes at null' do - task = index.update_displayed_attributes(displayed_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(index.displayed_attributes).to contain_exactly(*displayed_attributes) + end - task = index.update_displayed_attributes(nil) + it 'resets displayed attributes when passed nil' do + task = index.update_displayed_attributes(displayed_attributes) + expect(task.type).to eq('settingsUpdate') + task.await - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + task = index.update_displayed_attributes(nil) + expect(task.type).to eq('settingsUpdate') + task.await - expect(index.displayed_attributes).to eq(default_displayed_attributes) + expect(index.displayed_attributes).to eq(default_displayed_attributes) + end end - it 'resets displayed attributes' do + it '#reset_displayed_attributes resets displayed attributes' do task = index.update_displayed_attributes(displayed_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('settingsUpdate') + task.await task = index.reset_displayed_attributes + expect(task.type).to eq('settingsUpdate') + expect(task.await).to be_succeeded - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - - expect(index.task(task['taskUid'])['status']).to eq('succeeded') expect(index.displayed_attributes).to eq(default_displayed_attributes) end end - context 'On synonyms sub-routes' do + context 'On synonyms' do let(:index) { client.index(uid) } let(:synonyms) do { @@ -308,406 +308,262 @@ } end - before { client.create_index!(uid) } - - it 'gets an empty hash of synonyms by default' do - settings = index.synonyms - expect(settings).to be_a(Hash) - expect(settings).to be_empty - end + before { client.create_index(uid).await } - it 'returns an uid when updating' do - task = index.update_synonyms(synonyms) - expect(task).to be_a(Hash) + describe '#synonyms' do + it 'gets an empty hash of synonyms by default' do + expect(index.synonyms).to eq({}) + end - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + it 'gets all the synonyms' do + index.update_synonyms(synonyms).await + expect(index.synonyms).to match( + 'wow' => ['world of warcraft'], + 'wolverine' => ['xmen', 'logan'], + 'logan' => ['wolverine', 'xmen'] + ) + end end - it 'gets all the synonyms' do - update_synonyms(index, synonyms) - settings = index.synonyms - expect(settings).to be_a(Hash) - expect(settings.count).to eq(3) - expect(settings.keys).to contain_exactly('wow', 'wolverine', 'logan') - expect(settings['wow']).to be_a(Array) - expect(settings['wow']).to eq(['world of warcraft']) - end + describe '#update_synonyms' do + it 'overwrites all existing synonyms' do + index.update_synonyms(synonyms).await + index.update_synonyms(hp: ['harry potter'], 'harry potter': ['hp']).await - it 'overwrites all synonyms when updating' do - update_synonyms(index, synonyms) - update_synonyms(index, hp: ['harry potter'], 'harry potter': ['hp']) - synonyms = index.synonyms - expect(synonyms).to be_a(Hash) - expect(synonyms.count).to eq(2) - expect(synonyms.keys).to contain_exactly('hp', 'harry potter') - expect(synonyms['hp']).to be_a(Array) - expect(synonyms['hp']).to eq(['harry potter']) - end + expect(index.synonyms).to match( + 'hp' => ['harry potter'], 'harry potter' => ['hp'] + ) + end - it 'updates synonyms at null' do - update_synonyms(index, synonyms) + it 'resets synonyms when passed nil' do + index.update_synonyms(synonyms).await + expect(index.synonyms).not_to be_empty - expect do - update_synonyms(index, nil) - end.to(change { index.synonyms.length }.from(3).to(0)) + index.update_synonyms(nil).await + expect(index.synonyms).to eq({}) + end end - it 'deletes all the synonyms' do - update_synonyms(index, synonyms) - - expect do - task = index.reset_synonyms - - expect(task).to be_a(Hash) - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + it '#reset_synonyms deletes all the synonyms' do + index.update_synonyms(synonyms).await + expect(index.synonyms).not_to be_empty - expect(index.synonyms).to be_a(Hash) - end.to(change { index.synonyms.length }.from(3).to(0)) + index.reset_synonyms.await + expect(index.synonyms).to eq({}) end end - context 'On stop-words sub-routes' do + context 'On stop words' do let(:index) { client.index(uid) } let(:stop_words_array) { ['the', 'of'] } let(:stop_words_string) { 'a' } - before { client.create_index!(uid) } + before { client.create_index(uid).await } - it 'gets an empty array when there is no stop-words' do - settings = index.stop_words - expect(settings).to be_a(Array) - expect(settings).to be_empty - end - - it 'updates stop-words when the body is valid (as an array)' do - task = index.update_stop_words(stop_words_array) - expect(task).to be_a(Hash) + describe '#stop_words' do + it 'gets an empty array when there is no stop-words' do + expect(index.stop_words).to eq([]) + end - expect(task['type']).to eq('settingsUpdate') - end + it 'gets list of stop-words' do + task = index.update_stop_words(stop_words_array) + expect(task.type).to eq('settingsUpdate') + task.await - it 'gets list of stop-words' do - task = index.update_stop_words(stop_words_array) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - settings = index.stop_words - expect(settings).to be_a(Array) - expect(settings).to contain_exactly(*stop_words_array) + expect(index.stop_words).to contain_exactly(*stop_words_array) + end end - it 'updates stop-words when the body is valid (as single string)' do - task = index.update_stop_words(stop_words_string) - expect(task).to be_a(Hash) - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - sw = index.stop_words - expect(sw).to be_a(Array) - expect(sw).to contain_exactly(stop_words_string) - end + describe '#update_stop_words' do + it 'updates stop words when passed an array' do + index.update_stop_words(stop_words_array).await + expect(index.stop_words).to contain_exactly(*stop_words_array) + end - it 'updates stop-words at null' do - task = index.update_stop_words(stop_words_string) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + it 'updates stop-words when passed a string' do + index.update_stop_words(stop_words_string).await + expect(index.stop_words).to contain_exactly(stop_words_string) + end - task = index.update_stop_words(nil) + it 'resets stop words when passed nil' do + task = index.update_stop_words(stop_words_string) + expect(task.type).to eq('settingsUpdate') + task.await - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + task = index.update_stop_words(nil) + expect(task.type).to eq('settingsUpdate') + task.await - expect(index.stop_words).to be_empty - end + expect(index.stop_words).to be_empty + end - it 'returns an error when the body is invalid' do - expect do - index.update_stop_words(test: 'test') - end.to raise_meilisearch_api_error_with(400, 'invalid_settings_stop_words', 'invalid_request') + it 'raises an error when the body is invalid' do + expect do + index.update_stop_words(test: 'test') + end.to raise_meilisearch_api_error_with(400, 'invalid_settings_stop_words', 'invalid_request') + end end - it 'resets stop-words' do + it '#reset_stop_words resets stop-words' do task = index.update_stop_words(stop_words_string) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('settingsUpdate') + task.await - task = index.reset_stop_words - expect(task).to be_a(Hash) + expect(index.stop_words).to contain_exactly(stop_words_string) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + task = index.reset_stop_words + expect(task.type).to eq('settingsUpdate') + task.await - expect(index.stop_words).to be_a(Array) - expect(index.stop_words).to be_empty + expect(index.stop_words).to eq([]) end end - context 'On filterable-attributes sub-routes' do + context 'On filterable attributes' do let(:index) { client.index(uid) } let(:filterable_attributes) { ['title', 'description'] } - before { client.create_index!(uid) } + before { client.create_index(uid).await } - it 'gets default values of filterable attributes' do - settings = index.filterable_attributes - expect(settings).to be_a(Array) - expect(settings).to be_empty + it '#filterable_attributes gets default values of filterable attributes' do + expect(index.filterable_attributes).to eq([]) end - it 'updates filterable attributes' do - task = index.update_filterable_attributes(filterable_attributes) - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - expect(index.filterable_attributes).to contain_exactly(*filterable_attributes) - end - - it 'updates filterable attributes at null' do - task = index.update_filterable_attributes(filterable_attributes) + describe '#update_filterable_attributes' do + it 'updates filterable attributes' do + task = index.update_filterable_attributes(filterable_attributes) + expect(task.type).to eq('settingsUpdate') + task.await - expect(task['type']).to eq('settingsUpdate') + expect(index.filterable_attributes).to contain_exactly(*filterable_attributes) + end - task = index.update_filterable_attributes(nil) + it 'resets filterable attributes when passed nil' do + task = index.update_filterable_attributes(filterable_attributes) + expect(task.type).to eq('settingsUpdate') + task.await + expect(index.filterable_attributes).to contain_exactly(*filterable_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + task = index.update_filterable_attributes(nil) + expect(task.type).to eq('settingsUpdate') + task.await - expect(index.filterable_attributes).to be_empty + expect(index.filterable_attributes).to be_empty + end end - it 'resets filterable attributes' do + it '#reset_filterable_attributes resets filterable attributes' do task = index.update_filterable_attributes(filterable_attributes) - - expect(task['type']).to eq('settingsUpdate') + expect(task.type).to eq('settingsUpdate') + task.await + expect(index.filterable_attributes).to contain_exactly(*filterable_attributes) task = index.reset_filterable_attributes + expect(task.type).to eq('settingsUpdate') + task.await - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - - expect(index.task(task['taskUid'])['status']).to eq('succeeded') expect(index.filterable_attributes).to be_empty end end - context 'On sortable-attributes sub-routes' do + context 'On sortable attributes' do let(:index) { client.index(uid) } let(:sortable_attributes) { ['title', 'description'] } - before { client.create_index!(uid) } + before { client.create_index(uid).await } it 'gets default values of sortable attributes' do - settings = index.sortable_attributes - expect(settings).to be_a(Array) - expect(settings).to be_empty + expect(index.sortable_attributes).to eq([]) end - it 'updates sortable attributes' do - task = index.update_sortable_attributes(sortable_attributes) + describe '#update_sortable_attributes' do + it 'updates sortable attributes' do + task = index.update_sortable_attributes(sortable_attributes) + expect(task.type).to eq('settingsUpdate') + task.await - client.wait_for_task(task['taskUid']) - expect(task['type']).to eq('settingsUpdate') - expect(index.sortable_attributes).to contain_exactly(*sortable_attributes) - end + expect(index.sortable_attributes).to contain_exactly(*sortable_attributes) + end - it 'updates sortable attributes at null' do - task = index.update_sortable_attributes(sortable_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + it 'resets sortable attributes when passed nil' do + task = index.update_sortable_attributes(sortable_attributes) + expect(task.type).to eq('settingsUpdate') + task.await - task = index.update_sortable_attributes(nil) + expect(index.sortable_attributes).to contain_exactly(*sortable_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + task = index.update_sortable_attributes(nil) + expect(task.type).to eq('settingsUpdate') + task.await - expect(index.sortable_attributes).to be_empty + expect(index.sortable_attributes).to be_empty + end end it 'resets sortable attributes' do task = index.update_sortable_attributes(sortable_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('settingsUpdate') + task.await - task = index.reset_sortable_attributes + expect(index.sortable_attributes).to contain_exactly(*sortable_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + task = index.reset_sortable_attributes + expect(task.type).to eq('settingsUpdate') + task.await - expect(index.task(task['taskUid'])['status']).to eq('succeeded') expect(index.sortable_attributes).to be_empty end end - context 'Index with primary-key' do - let(:index) { client.index(uid) } - - before { client.create_index!(uid, primary_key: 'id') } - - it 'gets the default values of settings' do - settings = index.settings - expect(settings).to be_a(Hash) - expect(settings.keys).to include(*settings_keys) - expect(settings['rankingRules']).to eq(default_ranking_rules) - expect(settings['distinctAttribute']).to be_nil - expect(settings['searchableAttributes']).to eq(default_searchable_attributes) - expect(settings['displayedAttributes']).to eq(default_displayed_attributes) - expect(settings['stopWords']).to eq([]) - expect(settings['synonyms']).to eq({}) - end - - it 'updates multiples settings at the same time' do - task = index.update_settings( - ranking_rules: ['title:asc', 'typo'], - distinct_attribute: 'title' - ) - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - settings = index.settings - expect(settings['rankingRules']).to eq(['title:asc', 'typo']) - expect(settings['distinctAttribute']).to eq('title') - expect(settings['stopWords']).to be_empty - end - - it 'updates one setting without reset the others' do - task = index.update_settings(stop_words: ['the']) - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - settings = index.settings - expect(settings['rankingRules']).to eq(default_ranking_rules) - expect(settings['distinctAttribute']).to be_nil - expect(settings['stopWords']).to eq(['the']) - expect(settings['synonyms']).to be_empty - end - - it 'resets all settings' do - task = index.update_settings( - ranking_rules: ['title:asc', 'typo'], - distinct_attribute: 'title', - stop_words: ['the'], - synonyms: { - wow: ['world of warcraft'] - } - ) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - - task = index.reset_settings - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - - settings = index.settings - expect(settings['rankingRules']).to eq(default_ranking_rules) - expect(settings['distinctAttribute']).to be_nil - expect(settings['stopWords']).to be_empty - expect(settings['synonyms']).to be_empty - end - end - - context 'Manipulation of searchable/displayed attributes with the primary-key' do - let(:index) { client.index(random_uid) } - - it 'does not add document when there is no primary-key' do - task = index.add_documents(title: 'Test') - task = client.wait_for_task(task['taskUid']) - - expect(task.keys).to include('error') - expect(task['error']['code']).to eq('index_primary_key_no_candidate_found') - end - - it 'adds documents when there is a primary-key' do - task = index.add_documents(objectId: 1, title: 'Test') - - client.wait_for_task(task['taskUid']) - expect(index.documents['results'].count).to eq(1) - end - - it 'resets searchable/displayed attributes' do - task = index.update_displayed_attributes(['title', 'description']) - client.wait_for_task(task['taskUid']) - task = index.update_searchable_attributes(['title']) - - client.wait_for_task(task['taskUid']) - - task = index.reset_displayed_attributes - - client.wait_for_task(task['taskUid']) - expect(index.task(task['taskUid'])['status']).to eq('succeeded') - - task = index.reset_searchable_attributes - - client.wait_for_task(task['taskUid']) - expect(index.task(task['taskUid'])['status']).to eq('succeeded') - - expect(index.displayed_attributes).to eq(['*']) - expect(index.searchable_attributes).to eq(['*']) - end - end - context 'Aliases' do let(:index) { client.index(uid) } - before { client.create_index!(uid) } + before { client.create_index(uid).await } it 'works with method aliases' do - expect(index.method(:settings) == index.method(:get_settings)).to be_truthy - expect(index.method(:ranking_rules) == index.method(:get_ranking_rules)).to be_truthy - expect(index.method(:distinct_attribute) == index.method(:get_distinct_attribute)).to be_truthy - expect(index.method(:searchable_attributes) == index.method(:get_searchable_attributes)).to be_truthy - expect(index.method(:displayed_attributes) == index.method(:get_displayed_attributes)).to be_truthy - expect(index.method(:synonyms) == index.method(:get_synonyms)).to be_truthy - expect(index.method(:stop_words) == index.method(:get_stop_words)).to be_truthy - expect(index.method(:filterable_attributes) == index.method(:get_filterable_attributes)).to be_truthy + expect(index.method(:settings)).to eq index.method(:get_settings) + expect(index.method(:ranking_rules)).to eq index.method(:get_ranking_rules) + expect(index.method(:distinct_attribute)).to eq index.method(:get_distinct_attribute) + expect(index.method(:searchable_attributes)).to eq index.method(:get_searchable_attributes) + expect(index.method(:displayed_attributes)).to eq index.method(:get_displayed_attributes) + expect(index.method(:synonyms)).to eq index.method(:get_synonyms) + expect(index.method(:stop_words)).to eq index.method(:get_stop_words) + expect(index.method(:filterable_attributes)).to eq index.method(:get_filterable_attributes) end end - def update_synonyms(index, synonyms) - task = index.update_synonyms(synonyms) - - client.wait_for_task(task['taskUid']) - end - - context 'On pagination sub-routes' do + context 'On pagination' do let(:index) { client.index(uid) } let(:pagination) { { maxTotalHits: 3141 } } + let(:pagination_with_string_keys) { pagination.transform_keys(&:to_s) } - before { client.create_index!(uid) } - - it 'gets default values of pagination' do - settings = index.pagination.transform_keys(&:to_sym) - - expect(settings).to eq(default_pagination) - end - - it 'updates pagination' do - task = index.update_pagination(pagination) - client.wait_for_task(task['taskUid']) + before { client.create_index(uid).await } - expect(index.pagination.transform_keys(&:to_sym)).to eq(pagination) + it '#pagination gets default values of pagination' do + expect(index.pagination).to eq(default_pagination.transform_keys(&:to_s)) end - it 'updates pagination at null' do - task = index.update_pagination(pagination) - client.wait_for_task(task['taskUid']) + describe '#update_pagination' do + it 'updates pagination' do + index.update_pagination(pagination).await + expect(index.pagination).to eq(pagination_with_string_keys) + end - task = index.update_pagination(nil) - client.wait_for_task(task['taskUid']) + it 'resets pagination when passed nil' do + index.update_pagination(pagination).await + expect(index.pagination).to eq(pagination_with_string_keys) - expect(index.pagination.transform_keys(&:to_sym)).to eq(default_pagination) + index.update_pagination(nil).await + expect(index.pagination).to eq(default_pagination.transform_keys(&:to_s)) + end end - it 'resets pagination' do - task = index.update_pagination(pagination) - client.wait_for_task(task['taskUid']) - - task = index.reset_pagination - client.wait_for_task(task['taskUid']) + it '#reset_pagination resets pagination' do + index.update_pagination(pagination).await + expect(index.pagination).to eq(pagination_with_string_keys) - expect(index.pagination.transform_keys(&:to_sym)).to eq(default_pagination) + index.reset_pagination.await + expect(index.pagination).to eq(default_pagination.transform_keys(&:to_s)) end end @@ -739,28 +595,23 @@ def update_synonyms(index, synonyms) } end - before { client.create_index!(uid) } - - it 'gets default typo tolerance settings' do - settings = index.typo_tolerance + before { client.create_index(uid).await } - expect(settings).to eq(default_typo_tolerance) + it '#typo_tolerance gets default typo tolerance settings' do + expect(index.typo_tolerance).to eq(default_typo_tolerance) end - it 'updates typo tolerance settings' do - update_task = index.update_typo_tolerance(new_typo_tolerance) - client.wait_for_task(update_task['taskUid']) + it '#update_type_tolerance updates typo tolerance settings' do + index.update_typo_tolerance(new_typo_tolerance).await expect(index.typo_tolerance).to eq(MeiliSearch::Utils.transform_attributes(new_typo_tolerance)) end - it 'resets typo tolerance settings' do - update_task = index.update_typo_tolerance(new_typo_tolerance) - client.wait_for_task(update_task['taskUid']) - - reset_task = index.reset_typo_tolerance - client.wait_for_task(reset_task['taskUid']) + it '#reset_typo_tolerance resets typo tolerance settings' do + index.update_typo_tolerance(new_typo_tolerance).await + expect(index.typo_tolerance).to eq(MeiliSearch::Utils.transform_attributes(new_typo_tolerance)) + index.reset_typo_tolerance.await expect(index.typo_tolerance).to eq(default_typo_tolerance) end end @@ -768,122 +619,102 @@ def update_synonyms(index, synonyms) context 'On faceting' do let(:index) { client.index(uid) } let(:default_faceting) { { maxValuesPerFacet: 100, sortFacetValuesBy: { '*' => 'alpha' } } } + let(:default_faceting_with_string_keys) { default_faceting.transform_keys(&:to_s) } - before { client.create_index!(uid) } + before { client.create_index(uid).await } - it 'gets default values of faceting' do - settings = index.faceting.transform_keys(&:to_sym) - - expect(settings.keys).to include(*default_faceting.keys) + it '#faceting gets default values of faceting' do + expect(index.faceting).to eq(default_faceting_with_string_keys) end - it 'updates faceting' do - update_task = index.update_faceting({ 'max_values_per_facet' => 333 }) - client.wait_for_task(update_task['taskUid']) - - expect(index.faceting['maxValuesPerFacet']).to eq(333) - expect(index.faceting.transform_keys(&:to_sym).keys).to include(*default_faceting.keys) - end + describe '#update_faceting' do + it 'updates faceting' do + index.update_faceting({ 'max_values_per_facet' => 333 }).await + new_faceting = default_faceting_with_string_keys.merge('maxValuesPerFacet' => 333) - it 'updates faceting at null' do - update_task = index.update_faceting({ 'max_values_per_facet' => 444 }) - client.wait_for_task(update_task['taskUid']) + expect(index.faceting).to eq(new_faceting) + end - update_task = index.update_faceting(nil) - client.wait_for_task(update_task['taskUid']) + it 'resets faceting when passed nil' do + index.update_faceting({ 'max_values_per_facet' => 333 }).await + new_faceting = default_faceting_with_string_keys.merge('maxValuesPerFacet' => 333) + expect(index.faceting).to eq(new_faceting) - expect(index.faceting.transform_keys(&:to_sym).keys).to include(*default_faceting.keys) + index.update_faceting(nil).await + expect(index.faceting).to eq(default_faceting_with_string_keys) + end end - it 'resets faceting' do - update_task = index.update_faceting({ 'max_values_per_facet' => 444 }) - client.wait_for_task(update_task['taskUid']) - - reset_task = index.reset_faceting - client.wait_for_task(reset_task['taskUid']) + it '#reset_faceting resets faceting' do + index.update_faceting({ 'max_values_per_facet' => 333 }).await + new_faceting = default_faceting_with_string_keys.merge('maxValuesPerFacet' => 333) + expect(index.faceting).to eq(new_faceting) - expect(index.faceting.transform_keys(&:to_sym).keys).to include(*default_faceting.keys) + index.reset_faceting.await + expect(index.faceting).to eq(default_faceting_with_string_keys) end end context 'On user-defined dictionary' do let(:index) { client.index(uid) } - before { client.create_index!(uid) } + before { client.create_index(uid).await } it 'has no default value' do - settings = index.dictionary - - expect(settings).to be_empty + expect(index.dictionary).to eq([]) end - it 'updates dictionary' do - update_task = index.update_dictionary(['J. R. R.', 'W. E. B.']) - client.wait_for_task(update_task['taskUid']) - + it '#update_dictionary updates dictionary' do + index.update_dictionary(['J. R. R.', 'W. E. B.']).await expect(index.dictionary).to contain_exactly('J. R. R.', 'W. E. B.') end - it 'resets dictionary' do - update_task = index.update_dictionary(['J. R. R.', 'W. E. B.']) - client.wait_for_task(update_task['taskUid']) - - reset_task = index.reset_dictionary - client.wait_for_task(reset_task['taskUid']) + it '#reset_dictionary resets dictionary' do + index.update_dictionary(['J. R. R.', 'W. E. B.']).await + expect(index.dictionary).to contain_exactly('J. R. R.', 'W. E. B.') - expect(index.dictionary).to be_empty + index.reset_dictionary.await + expect(index.dictionary).to eq([]) end end context 'On separator tokens' do let(:index) { client.index(uid) } - before { client.create_index!(uid) } - - describe 'separator_tokens' do - it 'has no default value' do - expect(index.separator_tokens).to be_empty - end - - it 'updates separator tokens' do - update_task = index.update_separator_tokens ['|', '…'] - client.wait_for_task(update_task['taskUid']) + before { client.create_index(uid).await } - expect(index.separator_tokens).to contain_exactly('|', '…') - end - - it 'resets separator tokens' do - update_task = index.update_separator_tokens ['|', '…'] - client.wait_for_task(update_task['taskUid']) - - reset_task = index.reset_separator_tokens - client.wait_for_task(reset_task['taskUid']) + it '#separator_tokens has no default value' do + expect(index.separator_tokens).to eq([]) + end - expect(index.separator_tokens).to be_empty - end + it '#update_separator_tokens updates separator tokens' do + index.update_separator_tokens(['|', '…']).await + expect(index.separator_tokens).to contain_exactly('|', '…') end - describe '#non_separator_tokens' do - it 'has no default value' do - expect(index.non_separator_tokens).to be_empty - end + it '#reset_separator_tokens resets separator tokens' do + index.update_separator_tokens(['|', '…']).await + expect(index.separator_tokens).to contain_exactly('|', '…') - it 'updates non separator tokens' do - update_task = index.update_non_separator_tokens ['@', '#'] - client.wait_for_task(update_task['taskUid']) + index.reset_separator_tokens.await + expect(index.separator_tokens).to eq([]) + end - expect(index.non_separator_tokens).to contain_exactly('@', '#') - end + it '#non_separator_tokens has no default value' do + expect(index.non_separator_tokens).to eq([]) + end - it 'resets non separator tokens' do - update_task = index.update_non_separator_tokens ['@', '#'] - client.wait_for_task(update_task['taskUid']) + it '#update_non_separator_tokens updates non separator tokens' do + index.update_non_separator_tokens(['@', '#']).await + expect(index.non_separator_tokens).to contain_exactly('@', '#') + end - reset_task = index.reset_non_separator_tokens - client.wait_for_task(reset_task['taskUid']) + it '#reset_non_separator_tokens resets non separator tokens' do + index.update_non_separator_tokens(['@', '#']).await + expect(index.non_separator_tokens).to contain_exactly('@', '#') - expect(index.non_separator_tokens).to be_empty - end + index.reset_non_separator_tokens.await + expect(index.non_separator_tokens).to eq([]) end describe '#proximity_precision' do diff --git a/spec/meilisearch/models/task_spec.rb b/spec/meilisearch/models/task_spec.rb new file mode 100644 index 00000000..33f47c80 --- /dev/null +++ b/spec/meilisearch/models/task_spec.rb @@ -0,0 +1,485 @@ +# frozen_string_literal: true + +describe MeiliSearch::Models::Task do + let(:new_index_uid) { random_uid } + let(:task_hash) { client.http_post '/indexes', { 'uid' => new_index_uid } } + let(:endpoint) { MeiliSearch::Task.new(URL, MASTER_KEY, client.options) } + + subject { described_class.new task_hash, endpoint } + + let(:enqueued_endpoint) { instance_double(MeiliSearch::Task, task: task_hash) } + let(:enqueued_task) { described_class.new task_hash, enqueued_endpoint } + + let(:processing_endpoint) { instance_double(MeiliSearch::Task, task: task_hash.update('status' => 'processing')) } + let(:processing_task) { described_class.new task_hash, processing_endpoint } + + let(:logger) { instance_double(Logger, warn: nil) } + before { MeiliSearch::Utils.logger = logger } + after { MeiliSearch::Utils.logger = nil } + + describe '.initialize' do + it 'requires a uid in the task hash' do + task_hash.delete 'taskUid' + + expect { subject }.to raise_error(ArgumentError) + end + + it 'requires a type in the task hash' do + task_hash.delete 'type' + + expect { subject }.to raise_error(ArgumentError) + end + + it 'requires a status in the task hash' do + task_hash.delete 'status' + + expect { subject }.to raise_error(ArgumentError) + end + + it 'sets "taskUid" key when given a "uid"' do + expect(subject).to have_key('uid') + end + + it 'sets "uid" key when given a "taskUid"' do + task_hash['uid'] = task_hash.delete 'taskUid' + + expect(subject).to have_key('taskUid') + end + end + + describe 'forwarding' do + it 'allows accessing values in the internal task hash' do + subject + + task_hash.each do |key, value| + expect(subject[key]).to eq(value) + end + end + end + + describe '#enqueued?' do + context 'when the task is processing' do + before { task_hash['status'] = 'processing' } + + it { is_expected.not_to be_enqueued } + + it 'does not refresh the task' do + allow(subject).to receive(:refresh) + subject.enqueued? + expect(subject).not_to have_received(:refresh) + end + end + + context 'when the task has succeeded' do + before { task_hash['status'] = 'succeeded' } + + it { is_expected.not_to be_enqueued } + + it 'does not refresh the task' do + allow(subject).to receive(:refresh) + subject.enqueued? + expect(subject).not_to have_received(:refresh) + end + end + + context 'when the task has failed' do + before { task_hash['status'] = 'failed' } + + it { is_expected.not_to be_enqueued } + + it 'does not refresh the task' do + allow(subject).to receive(:refresh) + subject.enqueued? + expect(subject).not_to have_received(:refresh) + end + end + + it 'returns true when the task is enqueued' do + expect(enqueued_task).to be_enqueued + end + + context 'when the task has succeeded but not refreshed' do + let(:successful_task_hash) { task_hash.merge('status' => 'succeeded') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: successful_task_hash) } + + it { is_expected.not_to be_enqueued } + end + end + + describe '#processing?' do + context 'when the task has succeeded' do + before { task_hash['status'] = 'succeeded' } + + it { is_expected.not_to be_processing } + + it 'does not refresh the task' do + allow(subject).to receive(:refresh) + subject.processing? + expect(subject).not_to have_received(:refresh) + end + end + + context 'when the task has failed' do + before { task_hash['status'] = 'failed' } + + it { is_expected.not_to be_processing } + + it 'does not refresh the task' do + allow(subject).to receive(:refresh) + subject.processing? + expect(subject).not_to have_received(:refresh) + end + end + + it 'returns false when the task has not begun to process' do + expect(enqueued_task).not_to be_processing + end + + it 'returns true when the task is processing' do + expect(processing_task).to be_processing + end + + context 'when the task has begun processing but has not refreshed' do + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash.merge('status' => 'processing')) } + + it { is_expected.to be_processing } + end + + context 'when the task has succeeded but not refreshed' do + let(:successful_task_hash) { task_hash.merge('status' => 'succeeded') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: successful_task_hash) } + + it 'refreshes and returns false' do + expect(subject).not_to be_enqueued + end + end + end + + describe '#unfinished?' do + it 'returns false if the task has succeeded' do + task_hash['status'] = 'succeeded' + expect(subject).not_to be_unfinished + end + + it 'returns false when the task has failed' do + task_hash['status'] = 'failed' + expect(subject).not_to be_unfinished + end + + it 'returns true when the task is enqueued' do + expect(enqueued_task).to be_unfinished + end + + it 'returns true when the task is processing' do + expect(processing_task).to be_unfinished + end + + context 'when the task has succeeded but not refreshed' do + let(:successful_task_hash) { task_hash.merge('status' => 'succeeded') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: successful_task_hash) } + + it { is_expected.not_to be_unfinished } + end + end + + describe '#finished?' do + it 'returns true when the task has succeeded' do + task_hash['status'] = 'succeeded' + expect(subject).to be_finished + end + + it 'returns true when the task has failed' do + task_hash['status'] = 'failed' + expect(subject).to be_finished + end + + it 'returns false when the task is enqueued' do + expect(enqueued_task).not_to be_finished + end + + it 'returns false when the task is processing' do + expect(processing_task).not_to be_finished + end + + context 'when the task has succeeded but not refreshed' do + let(:successful_task_hash) { task_hash.merge('status' => 'succeeded') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: successful_task_hash) } + + it { is_expected.to be_finished } + end + end + + describe '#failed?' do + it 'returns false if the task has succeeded or been cancelled' do + task_hash['status'] = 'succeeded' + expect(subject).not_to be_failed + task_hash['status'] = 'cancelled' + expect(subject).not_to be_failed + end + + it 'returns true if the task has failed' do + task_hash['status'] = 'failed' + expect(subject).to be_failed + end + + context 'when the task is not finished' do + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash) } + + it { is_expected.not_to be_failed } + + it 'warns that the task is not finished' do + subject.failed? + + expect(logger).to have_received(:warn).with(a_string_including('checked before finishing')) + end + end + + context 'when the task has failed but not refreshed' do + let(:failed_task_hash) { task_hash.merge('status' => 'failed') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: failed_task_hash) } + + it { is_expected.to be_failed } + end + end + + describe '#succeeded?' do + it 'returns true if the task has succeeded' do + task_hash['status'] = 'succeeded' + expect(subject).to be_succeeded + end + + it 'returns false if the task has failed or been cancelled' do + task_hash['status'] = 'failed' + expect(subject).not_to be_succeeded + task_hash['status'] = 'cancelled' + expect(subject).not_to be_succeeded + end + + context 'when the task is not finished' do + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash) } + + it { is_expected.not_to be_succeeded } + + it 'warns that the task is not finished' do + subject.succeeded? + + expect(logger).to have_received(:warn).with(a_string_including('checked before finishing')) + end + end + + context 'when the task has succeeded but not refreshed' do + let(:successful_task_hash) { task_hash.merge('status' => 'succeeded') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: successful_task_hash) } + + it { is_expected.to be_succeeded } + end + end + + describe '#cancelled?' do + it 'returns false if the task has succeeded or failed' do + task_hash['status'] = 'succeeded' + expect(subject).not_to be_cancelled + task_hash['status'] = 'failed' + expect(subject).not_to be_cancelled + end + + it 'returns true if the task has been cancelled' do + task_hash['status'] = 'cancelled' + expect(subject).to be_cancelled + end + + context 'when the task is not finished' do + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash) } + + it { is_expected.not_to be_cancelled } + + it 'warns that the task is not finished' do + subject.cancelled? + + expect(logger).to have_received(:warn).with(a_string_including('checked before finishing')) + end + end + + context 'when the task has been cancelled but not refreshed' do + let(:cancelled_task_hash) { task_hash.merge('status' => 'cancelled') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: cancelled_task_hash) } + + it { is_expected.to be_cancelled } + end + end + + describe '#deleted?' do + let(:not_found_error) { MeiliSearch::ApiError.new(404, '', '') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash) } + + it 'returns false when the task can be found' do + expect(subject.deleted?).to be(false) # don't just return nil + expect(subject).not_to be_deleted + end + + context 'when it was deleted prior' do + let(:endpoint) { instance_double(MeiliSearch::Task) } + + before do + allow(endpoint).to receive(:task) { raise not_found_error } + subject.refresh + end + + it 'does not check again' do + subject.deleted? + expect(endpoint).to have_received(:task).once + end + + it { is_expected.to be_deleted } + end + + it 'refreshes and returns true when it is no longer in instance' do + allow(endpoint).to receive(:task) { raise not_found_error } + expect(subject).to be_deleted + end + end + + describe '#cancel' do + context 'when the task is still not finished' do + let(:cancellation_task) { instance_double(described_class, await: nil) } + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash, cancel_tasks: cancellation_task) } + + it 'sends a request to cancel itself' do + subject.cancel + expect(endpoint).to have_received(:cancel_tasks) + end + + it 'returns true when the cancellation succeeds' do + task_hash['status'] = 'cancelled' + expect(subject.cancel).to be(true) + end + + it 'returns false when the cancellation fails' do + task_hash['status'] = 'succeeded' + expect(subject.cancel).to be(false) + end + end + + context 'when the task is already finished' do + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash, cancel_tasks: nil) } + before { task_hash['status'] = 'succeeded' } + + it 'sends no request' do + subject.cancel + expect(endpoint).not_to have_received(:cancel_tasks) + end + + it { is_expected.not_to be_cancelled } + end + + context 'when the task is already cancelled' do + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash, cancel_tasks: nil) } + before { task_hash['status'] = 'cancelled' } + + it 'sends no request' do + subject.cancel + expect(endpoint).not_to have_received(:cancel_tasks) + end + + it { is_expected.to be_cancelled } + end + end + + describe '#delete' do + let(:deletion_task) { instance_double(described_class, await: nil) } + let(:endpoint) { instance_double(MeiliSearch::Task, delete_tasks: deletion_task) } + + context 'when the task is unfinished' do + it 'makes no request' do + subject.delete + expect(endpoint).not_to have_received(:delete_tasks) + end + + it 'returns false' do + expect(subject.delete).to be(false) + end + end + + context 'when the task is finished' do + before do + task_hash['status'] = 'failed' + not_found_error = MeiliSearch::ApiError.new(404, '', '') + allow(endpoint).to receive(:task) { raise not_found_error } + end + + it 'makes a deletion request' do + subject.delete + expect(endpoint).to have_received(:delete_tasks) + end + + it 'returns true' do + expect(subject.delete).to be(true) + end + end + end + + describe '#refresh' do + let(:changed_task) { task_hash.merge('status' => 'succeeded', 'error' => 'Done too well') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: changed_task) } + + it 'calls endpoint to update task' do + expect { subject.refresh }.to change { subject['status'] }.from('enqueued').to('succeeded') + .and(change { subject['error'] }.from(nil).to('Done too well')) + end + end + + describe '#await' do + let(:changed_task) { task_hash.merge('status' => 'succeeded', 'error' => 'Done too well') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash, wait_for_task: changed_task) } + + context 'when the task is not yet completed' do + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash, wait_for_task: changed_task) } + + it 'waits for the task to complete' do + expect { subject.await }.to change { subject['status'] }.from('enqueued').to('succeeded') + .and(change { subject['error'] }.from(nil).to('Done too well')) + end + + it 'returns itself for method chaining' do + expect(subject.await).to be(subject) + end + end + + context 'when the task is already completed' do + let(:endpoint) { instance_double(MeiliSearch::Task, task: changed_task, wait_for_task: changed_task) } + + it 'does not contact the instance' do + subject.refresh + subject.await + + expect(endpoint).to have_received(:task).once + expect(endpoint).not_to have_received(:wait_for_task) + end + end + end + + describe '#error' do + let(:error) do + { 'message' => "Index `#{new_index_uid}` already exists.", + 'code' => 'index_already_exists', + 'type' => 'invalid_request', + 'link' => 'https://docs.meilisearch.com/errors#index_already_exists' } + end + + before { task_hash.merge!('error' => error, 'status' => 'failed') } + + it 'returns errors' do + expect(subject.error).to match(error) + end + end + + describe '#to_h' do + it 'returns the underlying task hash' do + expect(subject.to_h).to be(task_hash) + end + + it 'is aliased as #to_hash' do + expect(subject.to_hash).to be(subject.to_h) + end + end +end diff --git a/spec/meilisearch/utils_spec.rb b/spec/meilisearch/utils_spec.rb index ffeea67a..08da5e8e 100644 --- a/spec/meilisearch/utils_spec.rb +++ b/spec/meilisearch/utils_spec.rb @@ -1,6 +1,29 @@ # frozen_string_literal: true RSpec.describe MeiliSearch::Utils do + let(:logger) { instance_double(Logger, warn: nil) } + + describe '.soft_deprecate' do + before(:each) { described_class.logger = logger } + after(:each) { described_class.logger = nil } + + it 'outputs a warning' do + described_class.soft_deprecate('footballs', 'snowballs') + expect(logger).to have_received(:warn) + end + + it 'does not throw an error' do + expect do + described_class.soft_deprecate('footballs', 'snowballs') + end.not_to raise_error + end + + it 'includes relevant information' do + described_class.soft_deprecate('footballs', 'snowballs') + expect(logger).to have_received(:warn).with(a_string_including('footballs', 'snowballs')) + end + end + describe '.parse_query' do it 'transforms arrays into strings' do data = described_class.parse_query({ array: [1, 2, 3], other: 'string' }, [:array, :other]) @@ -22,6 +45,9 @@ end describe '.transform_attributes' do + before(:each) { described_class.logger = logger } + after(:each) { described_class.logger = nil } + it 'transforms snake_case into camelCased keys' do data = described_class.transform_attributes({ index_name: 'books', @@ -49,9 +75,10 @@ it 'warns when using camelCase' do attrs = { distinctAttribute: 'title' } - expect do - described_class.transform_attributes(attrs) - end.to output(include('Attributes will be expected to be snake_case', 'distinctAttribute')).to_stderr + described_class.transform_attributes(attrs) + + expect(logger).to have_received(:warn) + .with(a_string_including('Attributes will be expected to be snake_case', 'distinctAttribute')) end it 'warns when using camelCase in an array' do @@ -60,9 +87,10 @@ { 'indexUid' => 'books', 'q' => 'prince' } ] - expect do - described_class.transform_attributes(attrs) - end.to output(include('Attributes will be expected to be snake_case', 'indexUid')).to_stderr + described_class.transform_attributes(attrs) + + expect(logger).to have_received(:warn) + .with(a_string_including('Attributes will be expected to be snake_case', 'indexUid')) end end @@ -115,28 +143,30 @@ end describe '.warn_on_non_conforming_attribute_names' do + before(:each) { described_class.logger = logger } + after(:each) { described_class.logger = nil } + it 'warns when using camelCase attributes' do attrs = { attributesToHighlight: ['field'] } + described_class.warn_on_non_conforming_attribute_names(attrs) - expect do - described_class.warn_on_non_conforming_attribute_names(attrs) - end.to output(include('Attributes will be expected to be snake_case', 'attributesToHighlight')).to_stderr + expect(logger).to have_received(:warn) + .with(a_string_including('Attributes will be expected to be snake_case', 'attributesToHighlight')) end it 'warns when using a mixed case' do attrs = { distinct_ATTribute: 'title' } + described_class.warn_on_non_conforming_attribute_names(attrs) - expect do - described_class.warn_on_non_conforming_attribute_names(attrs) - end.to output(include('Attributes will be expected to be snake_case', 'distinct_ATTribute')).to_stderr + expect(logger).to have_received(:warn) + .with(a_string_including('Attributes will be expected to be snake_case', 'distinct_ATTribute')) end it 'does not warn when using snake_case' do attrs = { q: 'query', attributes_to_highlight: ['field'] } + described_class.warn_on_non_conforming_attribute_names(attrs) - expect do - described_class.warn_on_non_conforming_attribute_names(attrs) - end.not_to output.to_stderr + expect(logger).not_to have_received(:warn) end end end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 0df0b569..5bb14253 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -146,7 +146,6 @@ # Helpers config.include IndexesHelpers - config.include TaskHelpers config.include ExceptionsHelpers config.include KeysHelpers config.include ExperimentalFeatureHelpers diff --git a/spec/support/books_contexts.rb b/spec/support/books_contexts.rb index 35b65090..83e40d2c 100644 --- a/spec/support/books_contexts.rb +++ b/spec/support/books_contexts.rb @@ -14,10 +14,7 @@ ] end - before do - response = index.add_documents(documents) - index.wait_for_task(response['taskUid']) - end + before { index.add_documents(documents).await } end RSpec.shared_context 'search books with author, genre, year' do @@ -89,10 +86,7 @@ ] end - before do - response = index.add_documents(documents) - index.wait_for_task(response['taskUid']) - end + before { index.add_documents(documents).await } end RSpec.shared_context 'search books with nested fields' do @@ -162,8 +156,5 @@ ] end - before do - response = index.add_documents(documents) - index.wait_for_task(response['taskUid']) - end + before { index.add_documents(documents).await } end diff --git a/spec/support/task_helpers.rb b/spec/support/task_helpers.rb deleted file mode 100644 index 6d399618..00000000 --- a/spec/support/task_helpers.rb +++ /dev/null @@ -1,9 +0,0 @@ -# frozen_string_literal: true - -module TaskHelpers - def wait_for_it(task) - raise('The param `task` does not have an taskUid key.') unless task.key?('taskUid') - - client.wait_for_task(task['taskUid']) - end -end