diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml
index c5fa472d..0c7dbb72 100644
--- a/.code-samples.meilisearch.yaml
+++ b/.code-samples.meilisearch.yaml
@@ -3,10 +3,26 @@
# the documentation on build
# You can read more on https://github.com/meilisearch/documentation/tree/master/.vuepress/code-samples
---
+get_all_tasks_filtering_1: |-
+ client.index('movies').tasks
+get_all_tasks_filtering_2: |-
+ client.tasks(status: ['succeeded', 'failed'], type: ['documentAdditionOrUpdate'])
+get_all_tasks_paginating_1: |-
+ client.tasks(limit: 2, from: 10)
+get_all_tasks_paginating_2: |-
+ client.tasks(limit: 2, from: 8)
+get_pagination_settings_1: |-
+update_pagination_settings_1: |-
+reset_pagination_settings_1: |-
+get_faceting_settings_1: |-
+update_faceting_settings_1: |-
+reset_faceting_settings_1: |-
+settings_guide_faceting_1: |-
+settings_guide_pagination_1: |-
get_one_index_1: |-
client.fetch_index('movies')
list_all_indexes_1: |-
- client.indexes
+ client.indexes(limit: 3)
create_an_index_1: |-
client.create_index('movies', primary_key: 'id')
primary_field_guide_create_index_primary_key: |-
@@ -18,7 +34,7 @@ update_an_index_1: |-
delete_an_index_1: |-
client.delete_index('movies')
get_one_document_1: |-
- client.index('movies').document(25684)
+ client.index('movies').document(25684, fields: ['id', 'title', 'poster', 'release_date'])
get_documents_1: |-
client.index('movies').documents(limit: 2)
primary_field_guide_add_document_primary_key: |-
@@ -57,10 +73,6 @@ delete_documents_1: |-
client.index('movies').delete_documents([23488, 153738, 437035, 363869])
search_post_1: |-
client.index('movies').search('american ninja')
-get_task_by_index_1: |-
- client.index('movies').task(1)
-get_all_tasks_by_index_1: |-
- client.index('movies').tasks
get_task_1: |-
client.task(1)
get_all_tasks_1: |-
@@ -103,6 +115,12 @@ update_settings_1: |-
synonyms: {
wolverine: ['xmen', 'logan'],
logan: ['wolverine']
+ },
+ pagination: {
+ max_total_hits: 5000
+ },
+ faceting: {
+ max_values_per_facet: 200
}
})
reset_settings_1: |-
@@ -243,9 +261,9 @@ search_parameter_guide_highlight_tag_1: |-
highlight_pre_tag: '',
highlight_post_tag: ''
})
-search_parameter_guide_matches_1: |-
+search_parameter_guide_show_matches_position_1: |-
client.index('movies').search('winter feast', {
- matches: true
+ show_matches_position: true
})
settings_guide_synonyms_1: |-
client.index('tops').update_settings({
@@ -345,18 +363,14 @@ faceted_search_filter_1: |-
client.index('movies').search('thriller', {
filter: [['genres = Horror', 'genres = Mystery'], 'director = "Jordan Peele"']
})
-faceted_search_facets_distribution_1: |-
- client.index('movies').search('Batman', {
- facets_distribution: ['genres']
- })
+faceted_search_facets_1: |-
+ client.index('movies').search('Batman', { facets: ['genres'] })
faceted_search_walkthrough_filter_1: |-
client.index('movies').search('thriller', {
filter: [['genres = Horror', 'genres = Mystery'], 'director = "Jordan Peele"']
})
post_dump_1: |-
client.create_dump
-get_dump_status_1: |-
- client.get_dump_status('20201006-053243949')
phrase_search_1: |-
client.index('movies').search('"african american" horror')
sorting_guide_update_sortable_attributes_1: |-
@@ -401,9 +415,9 @@ authorization_header_1: |-
client = MeiliSearch::Client.new('http://127.0.0.1:7700', 'masterKey')
client.keys
get_one_key_1: |-
- client.key('d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4')
+ client.key('6062abda-a5aa-4414-ac91-ecd7944c0f8d')
get_all_keys_1: |-
- client.keys
+ client.keys(limit: 3)
create_a_key_1: |-
client.create_key(
description: 'Add documents: Products API key',
@@ -413,28 +427,20 @@ create_a_key_1: |-
)
update_a_key_1: |-
client.update_key(
- 'd0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4',
+ '6062abda-a5aa-4414-ac91-ecd7944c0f8d',
{
description: 'Manage documents: Products/Reviews API key',
- actions: [
- 'documents.add',
- 'documents.delete'
- ],
- indexes: [
- 'products',
- 'reviews'
- ],
- expires_at: '2042-04-02T00:42:42Z'
+ name: 'Products/Reviews API key'
}
)
delete_a_key_1: |-
- client.delete_key('d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4')
+ client.delete_key('6062abda-a5aa-4414-ac91-ecd7944c0f8d')
security_guide_search_key_1: |-
client = MeiliSearch::Client.new('http://127.0.0.1:7700', 'apiKey')
client.index('patient_medical_records').search
security_guide_update_key_1: |-
client = MeiliSearch::Client.new('http://127.0.0.1:7700', 'masterKey')
- client.update_key('d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4', indexes: ['doctors'])
+ client.update_key('74c9c733-3368-4738-bbe5-1d18a5fecb37', description: 'Default Search API Key')
security_guide_create_key_1: |-
client = MeiliSearch::Client.new('http://127.0.0.1:7700', 'masterKey')
client.create_key(
@@ -448,8 +454,9 @@ security_guide_list_keys_1: |-
client.keys
security_guide_delete_key_1: |-
client = MeiliSearch::Client.new('http://127.0.0.1:7700', 'masterKey')
- client.delete_key('d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4')
+ client.delete_key('ac5cd97d-5a4b-4226-a868-2d0eb6d197ab')
tenant_token_guide_generate_sdk_1: |-
+ uid = '85c3c2f9-bdd6-41f1-abd8-11fcf80e0f76'
api_key = 'B5KdX2MY2jV6EXfUs6scSfmC...'
expires_at = Time.new(2025, 12, 20).utc
search_rules = {
@@ -458,7 +465,7 @@ tenant_token_guide_generate_sdk_1: |-
}
}
- token = client.generate_tenant_token(search_rules, api_key: api_key, expires_at: expires_at)
+ token = client.generate_tenant_token(uid, search_rules, api_key: api_key, expires_at: expires_at)
tenant_token_guide_search_sdk_1: |-
front_end_client = MeiliSearch::Client.new('http://127.0.0.1:7700', token)
@@ -481,7 +488,7 @@ getting_started_update_searchable_attributes: |-
getting_started_update_stop_words: |-
client.index('movies').update_stop_words(['the'])
getting_started_check_task_status: |-
- client.index('movies').task(0)
+ client.task(0)
getting_started_synonyms: |-
client.index('movies').update_synonyms({
winnie: ['piglet'],
diff --git a/.rubocop_todo.yml b/.rubocop_todo.yml
index a6e2df64..73f8c29d 100644
--- a/.rubocop_todo.yml
+++ b/.rubocop_todo.yml
@@ -1,6 +1,6 @@
# This configuration was generated by
# `rubocop --auto-gen-config`
-# on 2022-04-26 02:54:29 UTC using RuboCop version 1.26.1.
+# on 2022-07-11 13:40:51 UTC using RuboCop version 1.29.1.
# The point is for the user to remove these configuration records
# one by one as the offenses are removed from the code base.
# Note that changes in the inspected code, or installation of new
@@ -14,22 +14,16 @@ Gemspec/RequireMFA:
Exclude:
- 'meilisearch.gemspec'
-# Offense count: 1
-# This cop supports safe auto-correction (--auto-correct).
-Layout/HeredocIndentation:
- Exclude:
- - 'spec/meilisearch/index/documents_spec.rb'
-
# Offense count: 43
# Configuration parameters: CountComments, CountAsOne, ExcludedMethods, IgnoredMethods.
# IgnoredMethods: refine
Metrics/BlockLength:
- Max: 558
+ Max: 524
# Offense count: 2
# Configuration parameters: CountComments, CountAsOne.
Metrics/ClassLength:
- Max: 277
+ Max: 280
# Offense count: 1
# Configuration parameters: Max, CountKeywordArgs.
diff --git a/README.md b/README.md
index 3a6cfd6f..cad76847 100644
--- a/README.md
+++ b/README.md
@@ -187,8 +187,7 @@ JSON output:
]
}
],
- "nbHits": 1,
- "exhaustiveNbHits": false,
+ "estimatedTotalHits": 1,
"query": "wonder",
"limit": 20,
"offset": 0,
@@ -198,7 +197,7 @@ JSON output:
## 🤖 Compatibility with Meilisearch
-This package only guarantees the compatibility with the [version v0.27.0 of Meilisearch](https://github.com/meilisearch/meilisearch/releases/tag/v0.27.0).
+This package only guarantees the compatibility with the [version v0.28.0 of Meilisearch](https://github.com/meilisearch/meilisearch/releases/tag/v0.28.0).
## 💡 Learn More
diff --git a/lib/meilisearch/client.rb b/lib/meilisearch/client.rb
index ea426bdd..7b7761b5 100644
--- a/lib/meilisearch/client.rb
+++ b/lib/meilisearch/client.rb
@@ -6,14 +6,20 @@ class Client < HTTPRequest
### INDEXES
- def raw_indexes
- http_get('/indexes')
+ def raw_indexes(options = {})
+ body = Utils.transform_attributes(options.transform_keys(&:to_sym).slice(:limit, :offset))
+
+ http_get('/indexes', body)
end
- def indexes
- raw_indexes.map do |index_hash|
+ def indexes(options = {})
+ response = raw_indexes(options)
+
+ response['results'].map! do |index_hash|
index_object(index_hash['uid'], index_hash['primaryKey'])
end
+
+ response
end
# Usage:
@@ -29,7 +35,7 @@ def create_index(index_uid, options = {})
# Waits for the task to be achieved, be careful when using it.
def create_index!(index_uid, options = {})
task = create_index(index_uid, options)
- wait_for_task(task['uid'])
+ wait_for_task(task['taskUid'])
end
def delete_index(index_uid)
@@ -52,12 +58,14 @@ def fetch_raw_index(index_uid)
### KEYS
- def keys
- http_get '/keys'
+ def keys(limit: nil, offset: nil)
+ body = { limit: limit, offset: offset }.compact
+
+ http_get '/keys', body
end
- def key(key_uid)
- http_get "/keys/#{key_uid}"
+ def key(uid_or_key)
+ http_get "/keys/#{uid_or_key}"
end
def create_key(key_options)
@@ -66,14 +74,15 @@ def create_key(key_options)
http_post '/keys', body
end
- def update_key(key_uid, key_options)
+ def update_key(uid_or_key, key_options)
body = Utils.transform_attributes(key_options)
+ body = body.slice('description', 'name')
- http_patch "/keys/#{key_uid}", body
+ http_patch "/keys/#{uid_or_key}", body
end
- def delete_key(key_uid)
- http_delete "/keys/#{key_uid}"
+ def delete_key(uid_or_key)
+ http_delete "/keys/#{uid_or_key}"
end
### HEALTH
@@ -105,15 +114,10 @@ def create_dump
http_post '/dumps'
end
- def dump_status(dump_uid)
- http_get "/dumps/#{dump_uid}/status"
- end
- alias get_dump_status dump_status
-
### TASKS
- def tasks
- task_endpoint.task_list
+ def tasks(options = {})
+ task_endpoint.task_list(options)
end
def task(task_uid)
diff --git a/lib/meilisearch/index.rb b/lib/meilisearch/index.rb
index 999b6d5c..ea7d1201 100644
--- a/lib/meilisearch/index.rb
+++ b/lib/meilisearch/index.rb
@@ -30,7 +30,7 @@ def fetch_raw_info
end
def update(body)
- http_put indexes_path(id: @uid), Utils.transform_attributes(body)
+ http_patch indexes_path(id: @uid), Utils.transform_attributes(body)
end
alias update_index update
@@ -54,15 +54,20 @@ def set_base_properties(index_hash)
### DOCUMENTS
- def document(document_id)
+ def document(document_id, fields: nil)
encode_document = URI.encode_www_form_component(document_id)
- http_get "/indexes/#{@uid}/documents/#{encode_document}"
+ body = { fields: fields&.join(',') }.compact
+
+ http_get("/indexes/#{@uid}/documents/#{encode_document}", body)
end
alias get_document document
alias get_one_document document
def documents(options = {})
- http_get "/indexes/#{@uid}/documents", Utils.transform_attributes(options)
+ body = Utils.transform_attributes(options.transform_keys(&:to_sym).slice(:limit, :offset, :fields))
+ body = body.transform_values { |v| v.respond_to?(:join) ? v.join(',') : v }
+
+ http_get "/indexes/#{@uid}/documents", body
end
alias get_documents documents
@@ -75,7 +80,7 @@ def add_documents(documents, primary_key = nil)
def add_documents!(documents, primary_key = nil)
task = add_documents(documents, primary_key)
- wait_for_task(task['uid'])
+ wait_for_task(task['taskUid'])
end
alias replace_documents! add_documents!
alias add_or_replace_documents! add_documents!
@@ -109,7 +114,7 @@ def update_documents(documents, primary_key = nil)
def update_documents!(documents, primary_key = nil)
task = update_documents(documents, primary_key)
- wait_for_task(task['uid'])
+ wait_for_task(task['taskUid'])
end
alias add_or_update_documents! update_documents!
@@ -125,7 +130,7 @@ def add_documents_in_batches!(documents, batch_size = 1000, primary_key = nil)
tasks = add_documents_in_batches(documents, batch_size, primary_key)
responses = []
tasks.each do |task_obj|
- responses.append(wait_for_task(task_obj['uid']))
+ responses.append(wait_for_task(task_obj['taskUid']))
end
responses
end
@@ -142,7 +147,7 @@ def update_documents_in_batches!(documents, batch_size = 1000, primary_key = nil
tasks = update_documents_in_batches(documents, batch_size, primary_key)
responses = []
tasks.each do |task_obj|
- responses.append(wait_for_task(task_obj['uid']))
+ responses.append(wait_for_task(task_obj['taskUid']))
end
responses
end
@@ -158,7 +163,7 @@ def delete_documents(documents_ids)
def delete_documents!(documents_ids)
task = delete_documents(documents_ids)
- wait_for_task(task['uid'])
+ wait_for_task(task['taskUid'])
end
alias delete_multiple_documents! delete_documents!
@@ -170,7 +175,7 @@ def delete_document(document_id)
def delete_document!(document_id)
task = delete_document(document_id)
- wait_for_task(task['uid'])
+ wait_for_task(task['taskUid'])
end
alias delete_one_document! delete_document!
@@ -180,7 +185,7 @@ def delete_all_documents
def delete_all_documents!
task = delete_all_documents
- wait_for_task(task['uid'])
+ wait_for_task(task['taskUid'])
end
### SEARCH
@@ -199,7 +204,7 @@ def task_endpoint
private :task_endpoint
def task(task_uid)
- task_endpoint.index_task(@uid, task_uid)
+ task_endpoint.index_task(task_uid)
end
def tasks
@@ -236,7 +241,7 @@ def settings
alias get_settings settings
def update_settings(settings)
- http_post "/indexes/#{@uid}/settings", Utils.transform_attributes(settings)
+ http_patch "/indexes/#{@uid}/settings", Utils.transform_attributes(settings)
end
alias settings= update_settings
@@ -252,7 +257,7 @@ def ranking_rules
alias get_ranking_rules ranking_rules
def update_ranking_rules(ranking_rules)
- http_post "/indexes/#{@uid}/settings/ranking-rules", ranking_rules
+ http_put "/indexes/#{@uid}/settings/ranking-rules", ranking_rules
end
alias ranking_rules= update_ranking_rules
@@ -268,7 +273,7 @@ def synonyms
alias get_synonyms synonyms
def update_synonyms(synonyms)
- http_post "/indexes/#{@uid}/settings/synonyms", synonyms
+ http_put "/indexes/#{@uid}/settings/synonyms", synonyms
end
alias synonyms= update_synonyms
@@ -285,7 +290,7 @@ def stop_words
def update_stop_words(stop_words)
body = stop_words.nil? || stop_words.is_a?(Array) ? stop_words : [stop_words]
- http_post "/indexes/#{@uid}/settings/stop-words", body
+ http_put "/indexes/#{@uid}/settings/stop-words", body
end
alias stop_words= update_stop_words
@@ -301,7 +306,7 @@ def distinct_attribute
alias get_distinct_attribute distinct_attribute
def update_distinct_attribute(distinct_attribute)
- http_post "/indexes/#{@uid}/settings/distinct-attribute", distinct_attribute
+ http_put "/indexes/#{@uid}/settings/distinct-attribute", distinct_attribute
end
alias distinct_attribute= update_distinct_attribute
@@ -317,7 +322,7 @@ def searchable_attributes
alias get_searchable_attributes searchable_attributes
def update_searchable_attributes(searchable_attributes)
- http_post "/indexes/#{@uid}/settings/searchable-attributes", searchable_attributes
+ http_put "/indexes/#{@uid}/settings/searchable-attributes", searchable_attributes
end
alias searchable_attributes= update_searchable_attributes
@@ -333,7 +338,7 @@ def displayed_attributes
alias get_displayed_attributes displayed_attributes
def update_displayed_attributes(displayed_attributes)
- http_post "/indexes/#{@uid}/settings/displayed-attributes", displayed_attributes
+ http_put "/indexes/#{@uid}/settings/displayed-attributes", displayed_attributes
end
alias displayed_attributes= update_displayed_attributes
@@ -349,7 +354,7 @@ def filterable_attributes
alias get_filterable_attributes filterable_attributes
def update_filterable_attributes(filterable_attributes)
- http_post "/indexes/#{@uid}/settings/filterable-attributes", filterable_attributes
+ http_put "/indexes/#{@uid}/settings/filterable-attributes", filterable_attributes
end
alias filterable_attributes= update_filterable_attributes
@@ -365,7 +370,7 @@ def sortable_attributes
alias get_sortable_attributes sortable_attributes
def update_sortable_attributes(sortable_attributes)
- http_post "/indexes/#{@uid}/settings/sortable-attributes", sortable_attributes
+ http_put "/indexes/#{@uid}/settings/sortable-attributes", sortable_attributes
end
alias sortable_attributes= update_sortable_attributes
diff --git a/lib/meilisearch/task.rb b/lib/meilisearch/task.rb
index 00dec031..5dd24df9 100644
--- a/lib/meilisearch/task.rb
+++ b/lib/meilisearch/task.rb
@@ -5,8 +5,13 @@
module MeiliSearch
class Task < HTTPRequest
- def task_list
- http_get '/tasks/'
+ ALLOWED_PARAMS = [:limit, :from, :index_uid, :type, :status].freeze
+
+ def task_list(options = {})
+ body = Utils.transform_attributes(options.transform_keys(&:to_sym).slice(*ALLOWED_PARAMS))
+ body = body.transform_values { |v| v.respond_to?(:join) ? v.join(',') : v }
+
+ http_get '/tasks/', body
end
def task(task_uid)
@@ -14,11 +19,11 @@ def task(task_uid)
end
def index_tasks(index_uid)
- http_get "/indexes/#{index_uid}/tasks"
+ http_get '/tasks', { indexUid: [index_uid].flatten.join(',') }
end
- def index_task(index_uid, task_uid)
- http_get "/indexes/#{index_uid}/tasks/#{task_uid}"
+ def index_task(task_uid)
+ http_get "/tasks/#{task_uid}"
end
def wait_for_task(task_uid, timeout_in_ms = 5000, interval_in_ms = 50)
diff --git a/lib/meilisearch/tenant_token.rb b/lib/meilisearch/tenant_token.rb
index 290a8941..2d1364d9 100644
--- a/lib/meilisearch/tenant_token.rb
+++ b/lib/meilisearch/tenant_token.rb
@@ -7,21 +7,21 @@ module TenantToken
alg: 'HS256'
}.freeze
- def generate_tenant_token(search_rules, api_key: nil, expires_at: nil)
+ def generate_tenant_token(api_key_uid, search_rules, api_key: nil, expires_at: nil)
signature = retrieve_valid_key!(api_key, @api_key)
expiration = validate_expires_at!(expires_at)
rules = validate_search_rules!(search_rules)
- unsigned_data = build_payload(expiration, rules, signature)
+ unsigned_data = build_payload(expiration, rules, api_key_uid)
combine(unsigned_data, to_base64(sign_data(signature, unsigned_data)))
end
private
- def build_payload(expiration, rules, signature)
+ def build_payload(expiration, rules, api_key_uid)
payload = {
searchRules: rules,
- apiKeyPrefix: signature[0..7],
+ apiKeyUid: api_key_uid,
exp: expiration
}
diff --git a/spec/meilisearch/client/dumps_spec.rb b/spec/meilisearch/client/dumps_spec.rb
index db682c2e..e4606b5a 100644
--- a/spec/meilisearch/client/dumps_spec.rb
+++ b/spec/meilisearch/client/dumps_spec.rb
@@ -4,26 +4,10 @@
it 'creates a new dump' do
response = client.create_dump
expect(response).to be_a(Hash)
- expect(response['uid']).to_not be_nil
+ expect(response['taskUid']).to_not be_nil
expect(response['status']).to_not be_nil
- expect(response['status']).to eq('in_progress')
- wait_for_dump_creation(client, response['uid'])
- end
-
- it 'gets dump status' do
- dump = client.create_dump
- response = client.dump_status(dump['uid'])
- expect(response['status']).to_not be_nil
- wait_for_dump_creation(client, dump['uid'])
- end
-
- it 'fails to get dump status without uid' do
- expect do
- client.dump_status('uid_not_exists')
- end.to raise_meilisearch_api_error_with(404, 'dump_not_found', 'invalid_request')
- end
-
- it 'works with method aliases' do
- expect(client.method(:dump_status) == client.method(:get_dump_status)).to be_truthy
+ expect(response['status']).to eq('enqueued')
+ response = client.wait_for_task(response['taskUid'])
+ expect(response['status']).to eq('succeeded')
end
end
diff --git a/spec/meilisearch/client/indexes_spec.rb b/spec/meilisearch/client/indexes_spec.rb
index 55760a09..e205da72 100644
--- a/spec/meilisearch/client/indexes_spec.rb
+++ b/spec/meilisearch/client/indexes_spec.rb
@@ -8,7 +8,7 @@
expect(task['type']).to eq('indexCreation')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
index = client.fetch_index('new_index')
expect(index).to be_a(MeiliSearch::Index)
@@ -36,7 +36,7 @@
expect(task['type']).to eq('indexCreation')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
index = client.fetch_index('new_index')
expect(index).to be_a(MeiliSearch::Index)
@@ -63,7 +63,7 @@
it 'creates an index' do
task = client.create_index('new_index', primary_key: 'primary_key')
expect(task['type']).to eq('indexCreation')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
index = client.fetch_index('new_index')
expect(index).to be_a(MeiliSearch::Index)
@@ -83,7 +83,7 @@
expect(task['type']).to eq('indexCreation')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
index = client.fetch_index('new_index')
expect(index).to be_a(MeiliSearch::Index)
@@ -120,7 +120,7 @@
it 'returns MeiliSearch::Index objects' do
client.create_index!('index')
- index = client.indexes.first
+ index = client.indexes['results'].first
expect(index).to be_a(MeiliSearch::Index)
end
@@ -128,20 +128,32 @@
it 'gets a list of indexes' do
['first_index', 'second_index', 'third_index'].each { |name| client.create_index!(name) }
- indexes = client.indexes
+ indexes = client.indexes['results']
expect(indexes).to be_a(Array)
expect(indexes.length).to eq(3)
uids = indexes.map(&:uid)
expect(uids).to contain_exactly('first_index', 'second_index', 'third_index')
end
+
+ it 'paginates indexes list with limit and offset' do
+ ['first_index', 'second_index', 'third_index'].each { |name| client.create_index!(name) }
+
+ indexes = client.indexes(limit: 1, offset: 2)
+
+ expect(indexes['results']).to be_a(Array)
+ expect(indexes['total']).to eq(3)
+ expect(indexes['limit']).to eq(1)
+ expect(indexes['offset']).to eq(2)
+ expect(indexes['results'].map(&:uid)).to eq(['third_index'])
+ end
end
describe '#raw_indexes' do
it 'returns raw indexes' do
client.create_index!('index')
- response = client.raw_indexes.first
+ response = client.raw_indexes['results'].first
expect(response).to be_a(Hash)
expect(response['uid']).to eq('index')
@@ -150,7 +162,7 @@
it 'gets a list of raw indexes' do
['first_index', 'second_index', 'third_index'].each { |name| client.create_index!(name) }
- indexes = client.raw_indexes
+ indexes = client.raw_indexes['results']
expect(indexes).to be_a(Array)
expect(indexes.length).to eq(3)
@@ -212,7 +224,7 @@
expect(task['type']).to eq('indexDeletion')
- achieved_task = client.wait_for_task(task['uid'])
+ achieved_task = client.wait_for_task(task['taskUid'])
expect(achieved_task['status']).to eq('succeeded')
expect { client.fetch_index('existing_index') }.to raise_index_not_found_meilisearch_api_error
diff --git a/spec/meilisearch/client/keys_spec.rb b/spec/meilisearch/client/keys_spec.rb
index dba8212d..4a83274c 100644
--- a/spec/meilisearch/client/keys_spec.rb
+++ b/spec/meilisearch/client/keys_spec.rb
@@ -1,55 +1,8 @@
# frozen_string_literal: true
RSpec.describe 'MeiliSearch::Client - Keys' do
- context 'When a client uses default key roles' do
- let(:search_key) { client.keys['results'].find { |k| k['description'].start_with? 'Default Search' } }
- let(:admin_key) { client.keys['results'].find { |k| k['description'].start_with? 'Default Admin' } }
-
- it 'fails to get settings if public key used' do
- new_client = MeiliSearch::Client.new(URL, search_key['key'])
-
- expect do
- new_client.index(random_uid).settings
- end.to raise_meilisearch_api_error_with(403, 'invalid_api_key', 'auth')
- end
-
- it 'fails to get keys if private key used' do
- new_client = MeiliSearch::Client.new(URL, admin_key['key'])
-
- expect do
- new_client.keys
- end.to raise_meilisearch_api_error_with(403, 'invalid_api_key', 'auth')
- end
-
- it 'fails to get settings if no key is used' do
- new_client = MeiliSearch::Client.new(URL)
-
- expect do
- new_client.index(random_uid).settings
- end.to raise_meilisearch_api_error_with(401, 'missing_authorization_header', 'auth')
- end
-
- it 'succeeds to search when using public key' do
- uid = random_uid
- index = client.index(uid)
- index.add_documents!(title: 'Test')
- new_client = MeiliSearch::Client.new(URL, search_key['key'])
- response = new_client.index(uid).search('test')
-
- expect(response).to have_key('hits')
- end
-
- it 'succeeds to get settings when using private key' do
- uid = random_uid
- client.create_index!(uid)
- new_client = MeiliSearch::Client.new(URL, admin_key['key'])
- response = new_client.index(uid).settings
-
- expect(response).to have_key('rankingRules')
- end
- end
-
context 'When managing keys' do
+ let(:uuid_v4) { 'c483e150-cff1-4a45-ac26-bb8eb8e01d36' }
let(:delete_docs_key_options) do
{
description: 'A new key to delete docs',
@@ -67,13 +20,6 @@
}
end
- it 'gets the list of the default keys' do
- results = client.keys['results']
-
- expect(results).to be_a(Array)
- expect(results.count).to be >= 2
- end
-
it 'creates a key' do
new_key = client.create_key(add_docs_key_options)
@@ -85,10 +31,12 @@
expect(new_key['description']).to eq('A new key to add docs')
end
- it 'creates a key using snake_case' do
- new_key = client.create_key(add_docs_key_options)
+ it 'creates a key with setting uid' do
+ new_key = client.create_key(add_docs_key_options.merge(uid: uuid_v4))
expect(new_key['expiresAt']).to be_nil
+ expect(new_key['name']).to be_nil
+ expect(new_key['uid']).to eq(uuid_v4)
expect(new_key['key']).to be_a(String)
expect(new_key['createdAt']).to be_a(String)
expect(new_key['updatedAt']).to be_a(String)
@@ -96,7 +44,7 @@
expect(new_key['description']).to eq('A new key to add docs')
end
- it 'gets a key' do
+ it 'gets a key with their key data' do
new_key = client.create_key(delete_docs_key_options)
expect(client.key(new_key['key'])['description']).to eq('A new key to delete docs')
@@ -111,29 +59,55 @@
expect(key['description']).to eq('A new key to delete docs')
end
- it 'updates a key' do
- new_key = client.create_key(delete_docs_key_options)
- new_updated_key = client.update_key(new_key['key'], indexes: ['coco'])
+ it 'retrieves a list of keys' do
+ new_key = client.create_key(add_docs_key_options)
- expect(new_updated_key['key']).to eq(new_key['key'])
- expect(new_updated_key['description']).to eq(new_key['description'])
- expect(new_updated_key['indexes']).to eq(['coco'])
+ list = client.keys
+
+ expect(list.keys).to contain_exactly('limit', 'offset', 'results', 'total')
+ expect(list['results']).to eq([new_key])
+ expect(list['total']).to eq(1)
end
- it 'updates a key using snake_case' do
+ it 'paginates keys list with limit/offset' do
+ client.create_key(add_docs_key_options)
+
+ expect(client.keys(limit: 0, offset: 20)['results']).to be_empty
+ expect(client.keys(limit: 5, offset: 199)['results']).to be_empty
+ end
+
+ it 'gets a key with their uid' do
+ new_key = client.create_key(delete_docs_key_options.merge(uid: uuid_v4))
+
+ key = client.key(uuid_v4)
+
+ expect(key).to eq(new_key)
+ end
+
+ it 'updates a key with their key data' do
new_key = client.create_key(delete_docs_key_options)
- new_updated_key = client.update_key(new_key['key'], indexes: ['coco'])
+ new_updated_key = client.update_key(new_key['key'], indexes: ['coco'], description: 'no coco')
expect(new_updated_key['key']).to eq(new_key['key'])
- expect(new_updated_key['description']).to eq(new_key['description'])
- expect(new_updated_key['indexes']).to eq(['coco'])
+ expect(new_updated_key['description']).to eq('no coco')
+ # remain untouched since v0.28.0 Meilisearch just support updating name/description.
+ expect(new_updated_key['indexes']).to eq(['*'])
+ end
+
+ it 'updates a key with their uid data' do
+ client.create_key(delete_docs_key_options.merge(uid: uuid_v4))
+ new_updated_key = client.update_key(uuid_v4, name: 'coco')
+
+ expect(new_updated_key['name']).to eq('coco')
end
it 'deletes a key' do
new_key = client.create_key(add_docs_key_options)
client.delete_key(new_key['key'])
- expect(client.keys.filter { |k| k['key'] == new_key['key'] }).to be_empty
+ expect do
+ client.key(new_key['key'])
+ end.to raise_error(MeiliSearch::ApiError)
end
end
end
diff --git a/spec/meilisearch/client/tasks_spec.rb b/spec/meilisearch/client/tasks_spec.rb
index 7c725e0b..55c75a1f 100644
--- a/spec/meilisearch/client/tasks_spec.rb
+++ b/spec/meilisearch/client/tasks_spec.rb
@@ -24,7 +24,7 @@
expect(last_task.keys).to include(*succeeded_task_keys)
end
- it 'gets a task of the MeiliSearch instance' do
+ it 'gets a task of the Meilisearch instance' do
task = client.task(0)
expect(task).to be_a(Hash)
@@ -32,7 +32,7 @@
expect(task.keys).to include(*succeeded_task_keys)
end
- it 'gets all the tasks of the MeiliSearch instance' do
+ it 'gets tasks of the Meilisearch instance' do
tasks = client.tasks
expect(tasks['results']).to be_a(Array)
@@ -42,10 +42,28 @@
expect(last_task.keys).to include(*succeeded_task_keys)
end
+ it 'paginates tasks with limit/from/next' do
+ tasks = client.tasks(limit: 2)
+
+ expect(tasks['results'].count).to be <= 2
+ expect(tasks['from']).to be_a(Integer)
+ expect(tasks['next']).to be_a(Integer)
+ end
+
+ it 'filters tasks with index_uid/type/status' do
+ tasks = client.tasks(index_uid: ['a-cool-index-name'])
+
+ expect(tasks['results'].count).to eq(0)
+
+ tasks = client.tasks(index_uid: ['books'], type: ['documentAdditionOrUpdate'], status: ['succeeded'])
+
+ expect(tasks['results'].count).to be > 1
+ end
+
describe '#index.wait_for_task' do
it 'waits for task with default values' do
task = index.add_documents(documents)
- task = index.wait_for_task(task['uid'])
+ task = index.wait_for_task(task['taskUid'])
expect(task).to be_a(Hash)
expect(task['status']).not_to eq('enqueued')
@@ -54,7 +72,7 @@
it 'waits for task with default values after several updates' do
5.times { index.add_documents(documents) }
task = index.add_documents(documents)
- status = index.wait_for_task(task['uid'])
+ status = index.wait_for_task(task['taskUid'])
expect(status).to be_a(Hash)
expect(status['status']).not_to eq('enqueued')
@@ -64,7 +82,7 @@
index.add_documents(documents)
task = index.add_documents(documents)
expect do
- index.wait_for_task(task['uid'], 1)
+ index.wait_for_task(task['taskUid'], 1)
end.to raise_error(MeiliSearch::TimeoutError)
end
@@ -73,7 +91,7 @@
task = index.add_documents(documents)
expect do
Timeout.timeout(0.1) do
- index.wait_for_task(task['uid'], 5000, 200)
+ index.wait_for_task(task['taskUid'], 5000, 200)
end
end.to raise_error(Timeout::Error)
end
@@ -82,7 +100,7 @@
describe '#client.wait_for_task' do
it 'waits for task with default values' do
task = index.add_documents!(documents)
- task = client.wait_for_task(task['uid'])
+ task = client.wait_for_task(task['taskUid'])
expect(task).to be_a(Hash)
expect(task['status']).not_to eq('enqueued')
@@ -91,7 +109,7 @@
it 'waits for task with default values after several updates' do
5.times { index.add_documents(documents) }
task = index.add_documents(documents)
- status = client.wait_for_task(task['uid'])
+ status = client.wait_for_task(task['taskUid'])
expect(status).to be_a(Hash)
expect(status['status']).not_to eq('enqueued')
@@ -101,7 +119,7 @@
index.add_documents(documents)
task = index.add_documents(documents)
expect do
- client.wait_for_task(task['uid'], 1)
+ client.wait_for_task(task['taskUid'], 1)
end.to raise_error(MeiliSearch::TimeoutError)
end
@@ -110,7 +128,7 @@
task = index.add_documents(documents)
expect do
Timeout.timeout(0.1) do
- client.wait_for_task(task['uid'], 5000, 200)
+ client.wait_for_task(task['taskUid'], 5000, 200)
end
end.to raise_error(Timeout::Error)
end
diff --git a/spec/meilisearch/client/token_spec.rb b/spec/meilisearch/client/token_spec.rb
index 3c031752..1728557c 100644
--- a/spec/meilisearch/client/token_spec.rb
+++ b/spec/meilisearch/client/token_spec.rb
@@ -3,7 +3,7 @@
require 'jwt'
VERIFY_OPTIONS = {
- required_claims: ['exp', 'apiKeyPrefix', 'searchRules'],
+ required_claims: ['exp', 'apiKeyUid', 'searchRules'],
algorithm: 'HS256'
}.freeze
@@ -30,7 +30,7 @@ def initialize(api_key)
describe '#generate_tenant_token' do
subject(:token) do
- instance.generate_tenant_token(search_rules, api_key: api_key, expires_at: expires_at)
+ instance.generate_tenant_token('uid', search_rules, api_key: api_key, expires_at: expires_at)
end
context 'with api_key param' do
@@ -66,7 +66,7 @@ def initialize(api_key)
client = dummy_class.new(nil)
expect do
- client.generate_tenant_token(search_rules)
+ client.generate_tenant_token('uid', search_rules)
end.to raise_error(described_class::InvalidApiKey)
end
@@ -74,7 +74,7 @@ def initialize(api_key)
client = dummy_class.new('')
expect do
- client.generate_tenant_token(search_rules, api_key: '')
+ client.generate_tenant_token('uid', search_rules, api_key: '')
end.to raise_error(described_class::InvalidApiKey)
end
end
@@ -82,13 +82,13 @@ def initialize(api_key)
context 'with expires_at' do
it 'raises error when expires_at is in the past' do
expect do
- instance.generate_tenant_token(search_rules, expires_at: Time.now.utc - 10)
+ instance.generate_tenant_token('uid', search_rules, expires_at: Time.now.utc - 10)
end.to raise_error(described_class::ExpireOrInvalidSignature)
end
it 'allows generate token with a nil expires_at' do
expect do
- instance.generate_tenant_token(search_rules, expires_at: nil)
+ instance.generate_tenant_token('uid', search_rules, expires_at: nil)
end.to_not raise_error
end
@@ -101,14 +101,14 @@ def initialize(api_key)
it 'raises error when expires_at has a invalid type' do
['2042-01-01', 78_126_717_684, []].each do |exp|
expect do
- instance.generate_tenant_token(search_rules, expires_at: exp)
+ instance.generate_tenant_token('uid', search_rules, expires_at: exp)
end.to raise_error(described_class::ExpireOrInvalidSignature)
end
end
it 'raises error when expires_at is not a UTC' do
expect do
- instance.generate_tenant_token(search_rules, expires_at: Time.now + 10)
+ instance.generate_tenant_token('uid', search_rules, expires_at: Time.now + 10)
end.to raise_error(described_class::ExpireOrInvalidSignature)
end
end
@@ -116,7 +116,7 @@ def initialize(api_key)
context 'without expires_at param' do
it 'allows generate token without expires_at' do
expect do
- instance.generate_tenant_token(search_rules)
+ instance.generate_tenant_token('uid', search_rules)
end.to_not raise_error
end
end
@@ -126,7 +126,7 @@ def initialize(api_key)
before do
filterable_task = index.update_filterable_attributes(['genre', 'objectId'])
- index.wait_for_task(filterable_task['uid'])
+ index.wait_for_task(filterable_task['taskUid'])
end
let(:adm_client) { MeiliSearch::Client.new(URL, adm_key['key']) }
@@ -153,7 +153,7 @@ def initialize(api_key)
it 'accepts the token in the search request' do
rules.each do |data|
- token = adm_client.generate_tenant_token(data)
+ token = adm_client.generate_tenant_token(adm_key['uid'], data)
custom = MeiliSearch::Client.new(URL, token)
expect(custom.index('books').search('')).to have_key('hits')
@@ -162,16 +162,15 @@ def initialize(api_key)
it 'requires a non-nil payload in the search_rules' do
expect do
- client.generate_tenant_token(nil)
+ client.generate_tenant_token('uid', nil)
end.to raise_error(described_class::InvalidSearchRules)
end
end
- it 'has apiKeyPrefix with first 8 characters of the signature' do
- decoded = JWT.decode(token, api_key, true, VERIFY_OPTIONS).dig(0, 'apiKeyPrefix')
+ it 'has apiKeyUid with the uid of the key' do
+ decoded = JWT.decode(token, api_key, true, VERIFY_OPTIONS).dig(0, 'apiKeyUid')
- expect(decoded.size).to eq(8)
- expect(decoded).to eq(api_key[0, 8])
+ expect(decoded).to eq('uid')
end
end
end
diff --git a/spec/meilisearch/index/base_spec.rb b/spec/meilisearch/index/base_spec.rb
index cd0869db..c413b5c0 100644
--- a/spec/meilisearch/index/base_spec.rb
+++ b/spec/meilisearch/index/base_spec.rb
@@ -56,7 +56,7 @@
task = client.index('uid').update(primaryKey: 'new_primary_key')
expect(task['type']).to eq('indexUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
index = client.fetch_index('uid')
expect(index).to be_a(MeiliSearch::Index)
@@ -74,7 +74,7 @@
task = client.index('uid').update(primaryKey: 'new_primary_key')
expect(task['type']).to eq('indexUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
index = client.fetch_index('uid')
expect(index).to be_a(MeiliSearch::Index)
@@ -93,7 +93,7 @@
task = index.update(primaryKey: 'new_primary_key')
expect(task['type']).to eq('indexUpdate')
- achieved_task = client.wait_for_task(task['uid'])
+ achieved_task = client.wait_for_task(task['taskUid'])
expect(achieved_task['status']).to eq('failed')
expect(achieved_task['error']['code']).to eq('index_primary_key_already_exists')
@@ -131,7 +131,7 @@
task = client.index('uid').delete
expect(task['type']).to eq('indexDeletion')
- achieved_task = client.wait_for_task(task['uid'])
+ achieved_task = client.wait_for_task(task['taskUid'])
expect(achieved_task['status']).to eq('succeeded')
expect { client.fetch_index('uid') }.to raise_index_not_found_meilisearch_api_error
end
@@ -141,7 +141,7 @@
task = client.index('uid').delete
expect(task['type']).to eq('indexDeletion')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
index = client.index('uid')
expect { index.fetch_primary_key }.to raise_index_not_found_meilisearch_api_error
@@ -163,7 +163,7 @@
task = client.index('uid').update(primary_key: 'new_primary_key')
expect(task['type']).to eq('indexUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
index = client.fetch_index('uid')
expect(index).to be_a(MeiliSearch::Index)
diff --git a/spec/meilisearch/index/documents_spec.rb b/spec/meilisearch/index/documents_spec.rb
index 1a62c977..38079f97 100644
--- a/spec/meilisearch/index/documents_spec.rb
+++ b/spec/meilisearch/index/documents_spec.rb
@@ -19,11 +19,10 @@
describe 'adding documents' do
it 'adds documents (as a array of documents)' do
task = index.add_documents(documents)
- expect(task).to be_a(Hash)
- expect(task).to have_key('uid')
- expect(task['type']).to eq('documentAddition')
- client.wait_for_task(task['uid'])
- expect(index.documents.count).to eq(documents.count)
+
+ expect(task['type']).to eq('documentAdditionOrUpdate')
+ client.wait_for_task(task['taskUid'])
+ expect(index.documents['results'].count).to eq(documents.count)
end
it 'keeps the structure of the original documents' do
@@ -32,9 +31,9 @@
]
task = index.add_documents(docs)
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
- expect(index.documents.first.keys).to eq(docs.first.keys.map(&:to_s))
+ expect(index.documents['results'].first.keys).to eq(docs.first.keys.map(&:to_s))
end
it 'adds JSON documents (as a array of documents)' do
@@ -49,10 +48,8 @@
JSON
response = index.add_documents_json(documents, 'objectRef')
- expect(response).to be_a(Hash)
- expect(response).to have_key('uid')
- index.wait_for_task(response['uid'])
- expect(index.documents.count).to eq(5)
+ index.wait_for_task(response['taskUid'])
+ expect(index.documents['results'].count).to eq(5)
end
it 'adds NDJSON documents (as a array of documents)' do
@@ -63,45 +60,42 @@
{ "objectRef": 4, "title": "Harry Potter and the Half-Blood Prince", "comment": "The best book" }
NDJSON
response = index.add_documents_ndjson(documents, 'objectRef')
- expect(response).to be_a(Hash)
- expect(response).to have_key('uid')
- index.wait_for_task(response['uid'])
- expect(index.documents.count).to eq(4)
+
+ index.wait_for_task(response['taskUid'])
+ expect(index.documents['results'].count).to eq(4)
end
it 'adds CSV documents (as a array of documents)' do
- documents = <Prince')
end
- it 'does a custom search with facetsDistribution and limit' do
+ it 'does a custom search with facets and limit' do
response = index.update_filterable_attributes(['genre'])
- index.wait_for_task(response['uid'])
- response = index.search('prinec', facetsDistribution: ['genre'], limit: 1)
+ index.wait_for_task(response['taskUid'])
+ response = index.search('prinec', facets: ['genre'], limit: 1)
expect(response.keys).to contain_exactly(
*DEFAULT_SEARCH_RESPONSE_KEYS,
- 'facetsDistribution',
- 'exhaustiveFacetsCount'
+ 'facetDistribution'
)
- expect(response['nbHits']).to eq(2)
+ expect(response['estimatedTotalHits']).to eq(2)
expect(response['hits'].count).to eq(1)
- expect(response['facetsDistribution'].keys).to contain_exactly('genre')
- expect(response['facetsDistribution']['genre'].keys).to contain_exactly('adventure', 'fantasy')
- expect(response['facetsDistribution']['genre']['adventure']).to eq(1)
- expect(response['facetsDistribution']['genre']['fantasy']).to eq(1)
- expect(response['exhaustiveFacetsCount']).to be false
+ expect(response['facetDistribution'].keys).to contain_exactly('genre')
+ expect(response['facetDistribution']['genre'].keys).to contain_exactly('adventure', 'fantasy')
+ expect(response['facetDistribution']['genre']['adventure']).to eq(1)
+ expect(response['facetDistribution']['genre']['fantasy']).to eq(1)
end
context 'with snake_case options' do
diff --git a/spec/meilisearch/index/search/offset_spec.rb b/spec/meilisearch/index/search/offset_spec.rb
index 7cad424a..ec297bf6 100644
--- a/spec/meilisearch/index/search/offset_spec.rb
+++ b/spec/meilisearch/index/search/offset_spec.rb
@@ -17,7 +17,7 @@
it 'does a placeholder search with an offset set to 3 and custom ranking rules' do
response = index.update_ranking_rules(['objectId:asc'])
- index.wait_for_task(response['uid'])
+ index.wait_for_task(response['taskUid'])
response = index.search('')
response_with_offset = index.search('', offset: 3)
expect(response['hits'].first['objectId']).to eq(1)
diff --git a/spec/meilisearch/index/search/q_spec.rb b/spec/meilisearch/index/search/q_spec.rb
index 062dcf1a..627e13f9 100644
--- a/spec/meilisearch/index/search/q_spec.rb
+++ b/spec/meilisearch/index/search/q_spec.rb
@@ -36,9 +36,9 @@
'exactness',
'objectId:asc'
])
- index.wait_for_task(response['uid'])
+ index.wait_for_task(response['taskUid'])
response = index.search('')
- expect(response['nbHits']).to eq(documents.count)
+ expect(response['estimatedTotalHits']).to eq(documents.count)
expect(response['hits'].first['objectId']).to eq(1)
end
diff --git a/spec/meilisearch/index/search/sort_spec.rb b/spec/meilisearch/index/search/sort_spec.rb
index 69ab33ee..a196af07 100644
--- a/spec/meilisearch/index/search/sort_spec.rb
+++ b/spec/meilisearch/index/search/sort_spec.rb
@@ -4,7 +4,7 @@
include_context 'search books with author, genre, year'
before do
response = index.update_sortable_attributes(['year', 'author'])
- index.wait_for_task(response['uid'])
+ index.wait_for_task(response['taskUid'])
response = index.update_ranking_rules([
'sort',
@@ -14,7 +14,7 @@
'attribute',
'exactness'
])
- index.wait_for_task(response['uid'])
+ index.wait_for_task(response['taskUid'])
end
it 'does a custom search with one sort' do
diff --git a/spec/meilisearch/index/settings_spec.rb b/spec/meilisearch/index/settings_spec.rb
index 34d4f6c6..ced405f7 100644
--- a/spec/meilisearch/index/settings_spec.rb
+++ b/spec/meilisearch/index/settings_spec.rb
@@ -23,7 +23,9 @@
'synonyms',
'filterableAttributes',
'sortableAttributes',
- 'typoTolerance'
+ 'typoTolerance',
+ 'faceting',
+ 'pagination'
]
end
let(:uid) { random_uid }
@@ -52,9 +54,9 @@
rankingRules: ['title:asc', 'typo'],
distinctAttribute: 'title'
)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
settings = index.settings
expect(settings['rankingRules']).to eq(['title:asc', 'typo'])
expect(settings['distinctAttribute']).to eq('title')
@@ -63,9 +65,9 @@
it 'updates one setting without reset the others' do
task = index.update_settings(stopWords: ['the'])
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
settings = index.settings
expect(settings['rankingRules']).to eq(default_ranking_rules)
expect(settings['distinctAttribute']).to be_nil
@@ -80,12 +82,12 @@
stopWords: ['the', 'a'],
synonyms: { wow: ['world of warcraft'] }
)
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
task = index.reset_settings
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
settings = index.settings
expect(settings['rankingRules']).to eq(default_ranking_rules)
@@ -102,7 +104,7 @@
stopWords: ['a']
)
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
settings = index.settings
expect(settings['rankingRules']).to eq(['typo'])
@@ -126,29 +128,27 @@
it 'updates ranking rules' do
task = index.update_ranking_rules(ranking_rules)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
expect(index.ranking_rules).to eq(ranking_rules)
end
it 'updates ranking rules at null' do
task = index.update_ranking_rules(ranking_rules)
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
task = index.update_ranking_rules(nil)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
expect(index.ranking_rules).to eq(default_ranking_rules)
end
it 'fails when updating with wrong ranking rules name' do
task = index.update_ranking_rules(wrong_ranking_rules)
- client.wait_for_task(task['uid'])
-
- task = index.task(task['uid'])
+ task = client.wait_for_task(task['taskUid'])
expect(task['type']).to eq('settingsUpdate')
expect(task.keys).to include('error')
@@ -157,12 +157,12 @@
it 'resets ranking rules' do
task = index.update_ranking_rules(ranking_rules)
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
task = index.reset_ranking_rules
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
expect(index.ranking_rules).to eq(default_ranking_rules)
end
@@ -181,9 +181,9 @@
it 'updates distinct attribute' do
task = index.update_distinct_attribute(distinct_attribute)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
expect(index.distinct_attribute).to eq(distinct_attribute)
end
@@ -191,11 +191,11 @@
it 'updates distinct attribute at null' do
task = index.update_distinct_attribute(distinct_attribute)
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
task = index.update_distinct_attribute(nil)
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
expect(index.distinct_attribute).to be_nil
end
@@ -203,11 +203,11 @@
it 'resets distinct attribute' do
task = index.update_distinct_attribute(distinct_attribute)
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
task = index.reset_distinct_attribute
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
expect(index.distinct_attribute).to be_nil
end
@@ -226,21 +226,21 @@
it 'updates searchable attributes' do
task = index.update_searchable_attributes(searchable_attributes)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
expect(index.searchable_attributes).to eq(searchable_attributes)
end
it 'updates searchable attributes at null' do
task = index.update_searchable_attributes(searchable_attributes)
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
task = index.update_searchable_attributes(nil)
expect(task['type']).to eq('settingsUpdate')
- expect(task).to have_key('uid')
- client.wait_for_task(task['uid'])
+
+ client.wait_for_task(task['taskUid'])
expect(index.searchable_attributes).to eq(default_searchable_attributes)
end
@@ -248,14 +248,14 @@
it 'resets searchable attributes' do
task = index.update_searchable_attributes(searchable_attributes)
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
task = index.reset_searchable_attributes
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
- expect(index.task(task['uid'])['status']).to eq('succeeded')
+ expect(index.task(task['taskUid'])['status']).to eq('succeeded')
expect(index.searchable_attributes).to eq(default_searchable_attributes)
end
end
@@ -273,9 +273,9 @@
it 'updates displayed attributes' do
task = index.update_displayed_attributes(displayed_attributes)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
expect(index.displayed_attributes).to contain_exactly(*displayed_attributes)
end
@@ -283,12 +283,12 @@
it 'updates displayed attributes at null' do
task = index.update_displayed_attributes(displayed_attributes)
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
task = index.update_displayed_attributes(nil)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
expect(index.displayed_attributes).to eq(default_displayed_attributes)
end
@@ -296,14 +296,14 @@
it 'resets displayed attributes' do
task = index.update_displayed_attributes(displayed_attributes)
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
task = index.reset_displayed_attributes
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
- expect(index.task(task['uid'])['status']).to eq('succeeded')
+ expect(index.task(task['taskUid'])['status']).to eq('succeeded')
expect(index.displayed_attributes).to eq(default_displayed_attributes)
end
end
@@ -329,9 +329,9 @@
it 'returns an uid when updating' do
task = index.update_synonyms(synonyms)
expect(task).to be_a(Hash)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
end
it 'gets all the synonyms' do
@@ -370,9 +370,9 @@
task = index.reset_synonyms
expect(task).to be_a(Hash)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
expect(index.synonyms).to be_a(Hash)
end.to(change { index.synonyms.length }.from(3).to(0))
@@ -395,14 +395,14 @@
it 'updates stop-words when the body is valid (as an array)' do
task = index.update_stop_words(stop_words_array)
expect(task).to be_a(Hash)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
end
it 'gets list of stop-words' do
task = index.update_stop_words(stop_words_array)
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
settings = index.stop_words
expect(settings).to be_a(Array)
expect(settings).to contain_exactly(*stop_words_array)
@@ -411,9 +411,9 @@
it 'updates stop-words when the body is valid (as single string)' do
task = index.update_stop_words(stop_words_string)
expect(task).to be_a(Hash)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
sw = index.stop_words
expect(sw).to be_a(Array)
expect(sw).to contain_exactly(stop_words_string)
@@ -422,12 +422,12 @@
it 'updates stop-words at null' do
task = index.update_stop_words(stop_words_string)
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
task = index.update_stop_words(nil)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
expect(index.stop_words).to be_empty
end
@@ -441,13 +441,13 @@
it 'resets stop-words' do
task = index.update_stop_words(stop_words_string)
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
task = index.reset_stop_words
expect(task).to be_a(Hash)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
expect(index.stop_words).to be_a(Array)
expect(index.stop_words).to be_empty
@@ -468,36 +468,36 @@
it 'updates filterable attributes' do
task = index.update_filterable_attributes(filterable_attributes)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
expect(index.filterable_attributes).to contain_exactly(*filterable_attributes)
end
it 'updates filterable attributes at null' do
task = index.update_filterable_attributes(filterable_attributes)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
task = index.update_filterable_attributes(nil)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
expect(index.filterable_attributes).to be_empty
end
it 'resets filterable attributes' do
task = index.update_filterable_attributes(filterable_attributes)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
task = index.reset_filterable_attributes
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
- expect(index.task(task['uid'])['status']).to eq('succeeded')
+ expect(index.task(task['taskUid'])['status']).to eq('succeeded')
expect(index.filterable_attributes).to be_empty
end
end
@@ -516,8 +516,8 @@
it 'updates sortable attributes' do
task = index.update_sortable_attributes(sortable_attributes)
- expect(task).to have_key('uid')
- client.wait_for_task(task['uid'])
+
+ client.wait_for_task(task['taskUid'])
expect(task['type']).to eq('settingsUpdate')
expect(index.sortable_attributes).to contain_exactly(*sortable_attributes)
end
@@ -525,12 +525,12 @@
it 'updates sortable attributes at null' do
task = index.update_sortable_attributes(sortable_attributes)
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
task = index.update_sortable_attributes(nil)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
expect(index.sortable_attributes).to be_empty
end
@@ -538,14 +538,14 @@
it 'resets sortable attributes' do
task = index.update_sortable_attributes(sortable_attributes)
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
task = index.reset_sortable_attributes
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
- expect(index.task(task['uid'])['status']).to eq('succeeded')
+ expect(index.task(task['taskUid'])['status']).to eq('succeeded')
expect(index.sortable_attributes).to be_empty
end
end
@@ -572,9 +572,9 @@
rankingRules: ['title:asc', 'typo'],
distinctAttribute: 'title'
)
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
settings = index.settings
expect(settings['rankingRules']).to eq(['title:asc', 'typo'])
expect(settings['distinctAttribute']).to eq('title')
@@ -583,9 +583,9 @@
it 'updates one setting without reset the others' do
task = index.update_settings(stopWords: ['the'])
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
settings = index.settings
expect(settings['rankingRules']).to eq(default_ranking_rules)
expect(settings['distinctAttribute']).to be_nil
@@ -603,12 +603,12 @@
}
)
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
task = index.reset_settings
- expect(task).to have_key('uid')
+
expect(task['type']).to eq('settingsUpdate')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
settings = index.settings
expect(settings['rankingRules']).to eq(default_ranking_rules)
@@ -623,35 +623,35 @@
it 'does not add document when there is no primary-key' do
task = index.add_documents(title: 'Test')
- client.wait_for_task(task['uid'])
- task = index.task(task['uid'])
+ task = client.wait_for_task(task['taskUid'])
+
expect(task.keys).to include('error')
expect(task['error']['code']).to eq('primary_key_inference_failed')
end
it 'adds documents when there is a primary-key' do
task = index.add_documents(objectId: 1, title: 'Test')
- expect(task).to have_key('uid')
- client.wait_for_task(task['uid'])
- expect(index.documents.count).to eq(1)
+
+ client.wait_for_task(task['taskUid'])
+ expect(index.documents['results'].count).to eq(1)
end
it 'resets searchable/displayed attributes' do
task = index.update_displayed_attributes(['title', 'description'])
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
task = index.update_searchable_attributes(['title'])
- expect(task).to have_key('uid')
- client.wait_for_task(task['uid'])
+
+ client.wait_for_task(task['taskUid'])
task = index.reset_displayed_attributes
- expect(task).to have_key('uid')
- client.wait_for_task(task['uid'])
- expect(index.task(task['uid'])['status']).to eq('succeeded')
+
+ client.wait_for_task(task['taskUid'])
+ expect(index.task(task['taskUid'])['status']).to eq('succeeded')
task = index.reset_searchable_attributes
- expect(task).to have_key('uid')
- client.wait_for_task(task['uid'])
- expect(index.task(task['uid'])['status']).to eq('succeeded')
+
+ client.wait_for_task(task['taskUid'])
+ expect(index.task(task['taskUid'])['status']).to eq('succeeded')
expect(index.displayed_attributes).to eq(['*'])
expect(index.searchable_attributes).to eq(['*'])
@@ -678,6 +678,6 @@
def update_synonyms(index, synonyms)
task = index.update_synonyms(synonyms)
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
end
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 93ffab34..59a8070a 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -35,8 +35,7 @@
'hits',
'offset',
'limit',
- 'nbHits',
- 'exhaustiveNbHits',
+ 'estimatedTotalHits',
'processingTimeMs',
'query'
].freeze
@@ -132,5 +131,5 @@
config.include IndexesHelpers
config.include TaskHelpers
config.include ExceptionsHelpers
- config.include DumpsHelpers
+ config.include KeysHelpers
end
diff --git a/spec/support/books_contexts.rb b/spec/support/books_contexts.rb
index fb67a676..5af79c28 100644
--- a/spec/support/books_contexts.rb
+++ b/spec/support/books_contexts.rb
@@ -16,7 +16,7 @@
before do
response = index.add_documents(documents)
- index.wait_for_task(response['uid'])
+ index.wait_for_task(response['taskUid'])
end
end
@@ -91,7 +91,7 @@
before do
response = index.add_documents(documents)
- index.wait_for_task(response['uid'])
+ index.wait_for_task(response['taskUid'])
end
end
@@ -156,6 +156,6 @@
before do
response = index.add_documents(documents)
- index.wait_for_task(response['uid'])
+ index.wait_for_task(response['taskUid'])
end
end
diff --git a/spec/support/default_shared_context.rb b/spec/support/default_shared_context.rb
index c7eee7b8..7cc04769 100644
--- a/spec/support/default_shared_context.rb
+++ b/spec/support/default_shared_context.rb
@@ -5,6 +5,7 @@
before do
clear_all_indexes(client)
+ clear_all_keys(client)
end
def random_uid
diff --git a/spec/support/dumps_helpers.rb b/spec/support/dumps_helpers.rb
deleted file mode 100644
index 13ed1034..00000000
--- a/spec/support/dumps_helpers.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-# frozen_string_literal: true
-
-module DumpsHelpers
- def wait_for_dump_creation(client, dump_uid, timeout_in_ms = 5000, interval_in_ms = 50)
- Timeout.timeout(timeout_in_ms.to_f / 1000) do
- loop do
- dump_status = client.dump_status(dump_uid)
- return dump_status if dump_status['status'] != 'in_progress'
-
- sleep interval_in_ms.to_f / 1000
- end
- end
- rescue Timeout::Error
- raise MeiliSearch::TimeoutError
- end
-end
diff --git a/spec/support/indexes_helpers.rb b/spec/support/indexes_helpers.rb
index a981ec16..f562ae81 100644
--- a/spec/support/indexes_helpers.rb
+++ b/spec/support/indexes_helpers.rb
@@ -3,7 +3,7 @@
module IndexesHelpers
def clear_all_indexes(client)
indexes = client.indexes
- uids = indexes.map(&:uid)
+ uids = indexes['results'].map(&:uid)
uids.each do |uid|
client.delete_index(uid)
end
diff --git a/spec/support/keys_helpers.rb b/spec/support/keys_helpers.rb
new file mode 100644
index 00000000..f0f939aa
--- /dev/null
+++ b/spec/support/keys_helpers.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module KeysHelpers
+ def clear_all_keys(client)
+ client.keys['results']
+ .map { |h| h['uid'] }
+ .each { |uid| client.delete_key(uid) }
+ end
+end
diff --git a/spec/support/task_helpers.rb b/spec/support/task_helpers.rb
index 0ebc4a90..6d399618 100644
--- a/spec/support/task_helpers.rb
+++ b/spec/support/task_helpers.rb
@@ -2,8 +2,8 @@
module TaskHelpers
def wait_for_it(task)
- raise('The param `task` does not have an uid key.') unless task.key?('uid')
+ raise('The param `task` does not have an taskUid key.') unless task.key?('taskUid')
- client.wait_for_task(task['uid'])
+ client.wait_for_task(task['taskUid'])
end
end