1
0
Fork 0

Adding upstream version 0.31.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-05-18 21:42:39 +02:00
parent 091495b2f3
commit 5d4914ed7f
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
61 changed files with 30627 additions and 0 deletions

View file

@ -0,0 +1,957 @@
# This code-samples file is used by the Meilisearch documentation
# Every example written here will be automatically fetched by
# the documentation on build
# You can read more on https://github.com/meilisearch/documentation/tree/master/.vuepress/code-samples
---
create_snapshot_1: |-
client.CreateSnapshot()
get_one_index_1: |-
client.GetIndex("movies")
list_all_indexes_1: |-
client.GetIndexes(&meilisearch.IndexesQuery{
Limit: 3,
})
create_an_index_1: |-
client.CreateIndex(&meilisearch.IndexConfig{
Uid: "movies",
PrimaryKey: "id",
})
update_an_index_1: |-
client.Index("movies").UpdateIndex("id")
delete_an_index_1: |-
client.DeleteIndex("movies")
// OR
client.Index("movies").Delete()
swap_indexes_1: |-
client.SwapIndexes([]SwapIndexesParams{
{Indexes: []string{"indexA", "indexB"}},
{Indexes: []string{"indexX", "indexY"}},
})
get_one_document_1: |-
var a interface{}
client.Index("movies").GetDocument("25684",&meilisearch.DocumentQuery{
Fields: []string{"id", "title", "poster", "release_date"},
}, &a)
get_documents_1: |-
var result meilisearch.DocumentsResult
client.Index("movies").GetDocuments(&meilisearch.DocumentsQuery{
Limit: 2,
Filter: "genres = action",
}, &result)
get_documents_post_1: |-
var result meilisearch.DocumentsResult
client.Index("books").GetDocuments(&meilisearch.DocumentsQuery{
Fields: []string{"title", "genres", "rating", "language"},
Filter: "(rating > 3 AND (genres = Adventure OR genres = Fiction)) AND language = English",
}, &result)
add_or_replace_documents_1: |-
documents := []map[string]interface{}{
{
"id": 287947,
"title": "Shazam",
"poster": "https://image.tmdb.org/t/p/w1280/xnopI5Xtky18MPhK40cZAGAOVeV.jpg",
"overview": "A boy is given the ability to become an adult superhero in times of need with a single magic word.",
"release_date": "2019-03-23",
},
}
client.Index("movies").AddDocuments(documents)
add_or_update_documents_1: |-
documents := []map[string]interface{}{
{
"id": 287947,
"title": "Shazam ⚡️",
"genres": "comedy",
},
}
client.Index("movies").UpdateDocuments(documents)
delete_all_documents_1: |-
client.Index("movies").DeleteAllDocuments()
delete_one_document_1: |-
client.Index("movies").DeleteDocument("25684")
delete_documents_by_batch_1: |-
client.Index("movies").DeleteDocuments([]string{
"23488",
"153738",
"437035",
"363869",
})
delete_documents_by_filter_1: |-
client.Index("movies").DeleteDocumentsByFilter("genres=action OR genres=adventure")
search_post_1: |-
client.Index("movies").Search("american ninja", &meilisearch.SearchRequest{})
multi_search_1: |-
client.MultiSearch(&MultiSearchRequest{
Queries: []SearchRequest{
{
IndexUID: "movies",
Query: "pooh",
Limit: 5,
},
{
IndexUID: "movies",
Query: "nemo",
Limit: 5,
},
{
IndexUID: "movie_ratings",
Query: "us",
},
},
})
get_experimental_features_1: |-
client.ExperimentalFeatures().Get()
update_experimental_features_1: |-
client.ExperimentalFeatures().SetMetrics(true).Update()
facet_search_1: |-
client.Index("books").FacetSearch(&meilisearch.FacetSearchRequest{
FacetQuery: "fiction",
FacetName: "genres",
Filter: "rating > 3",
})
facet_search_3: |-
client.Index("books").FacetSearch(&meilisearch.FacetSearchRequest{
FacetQuery: "c",
FacetName: "genres",
})
delete_tasks_1: |-
client.DeleteTaks(&meilisearch.DeleteTasksQuery{
UIDS: []int64{1, 2},
});
cancel_tasks_1: |-
client.CancelTasks(&meilisearch.CancelTasksQuery{
UIDS: []int64{1, 2},
});
get_task_1: |-
client.GetTask(1);
get_all_tasks_1: |-
client.GetTasks(nil);
get_all_tasks_paginating_1: |-
client.GetTasks(&meilisearch.TasksQuery{
Limit: 2,
From: 10,
});
get_all_tasks_paginating_2: |-
client.GetTasks(&meilisearch.TasksQuery{
Limit: 2,
From: 8,
});
async_guide_filter_by_date_1: |-
client.GetTasks(&meilisearch.TasksQuery{
AfterEnqueuedAt: time.Date(2020, time.October, 11, 11, 49, 53, 0, time.UTC),
})
async_guide_multiple_filters_1: |-
client.GetTasks(&meilisearch.TasksQuery{
IndexUIDS: []string{"movie"},
Types: []meilisearch.TaskType{
meilisearch.TaskTypeDocumentAdditionOrUpdate,
meilisearch.TaskTypeDocumentDeletion,
},
Statuses: []meilisearch.TaskStatus{
meilisearch.TaskStatusProcessing,
},
})
async_guide_filter_by_ids_1: |-
client.GetTasks(&meilisearch.TasksQuery{
UIDS: []int64{5, 10, 13},
})
async_guide_filter_by_statuses_1: |-
client.GetTasks(&meilisearch.TasksQuery{
Statuses: []meilisearch.TaskStatus{
meilisearch.TaskStatusFailed,
meilisearch.TaskStatusCanceled,
},
})
async_guide_filter_by_types_1: |-
client.GetTasks(&meilisearch.TasksQuery{
Types: []meilisearch.TaskType{
meilisearch.TaskTypeDumpCreation,
meilisearch.TaskTypeIndexSwap,
},
})
async_guide_filter_by_index_uids_1: |-
client.GetTasks(&meilisearch.TasksQuery{
IndexUIDS: []string{"movie"},
})
async_guide_canceled_by_1: |-
client.GetTasks(&meilisearch.TasksQuery{
CanceledBy: []int64{9, 15},
})
get_one_key_1: |-
client.GetKey("6062abda-a5aa-4414-ac91-ecd7944c0f8d")
get_all_keys_1: |-
client.GetKeys(&meilisearch.KeysQuery{
Limit: 3
});
create_a_key_1: |-
client.CreateKey(&meilisearch.Key{
Description: "Add documents: Products API key",
Actions: []string{"documents.add"},
Indexes: []string{"products"},
ExpiresAt: time.Date(2042, time.April, 02, 0, 42, 42, 0, time.UTC),
})
update_a_key_1: |-
client.UpdateKey("6062abda-a5aa-4414-ac91-ecd7944c0f8d", &meilisearch.Key{
Description: "Manage documents: Products/Reviews API key",
Actions: []string{"documents.add", "document.delete"},
Indexes: []string{"products", "reviews"},
ExpiresAt: time.Date(2042, time.April, 02, 0, 42, 42, 0, time.UTC),
})
delete_a_key_1: |-
client.DeleteKey("6062abda-a5aa-4414-ac91-ecd7944c0f8d")
get_settings_1: |-
client.Index("movies").GetSettings()
update_settings_1: |-
distinctAttribute := "movie_id"
settings := meilisearch.Settings{
RankingRules: []string{
"words",
"typo",
"proximity",
"attribute",
"sort",
"exactness",
"release_date:desc",
"rank:desc",
},
DistinctAttribute: &distinctAttribute,
SearchableAttributes: []string{
"title",
"overview",
"genres",
},
DisplayedAttributes: []string{
"title",
"overview",
"genres",
"release_date",
},
StopWords: []string{
"the",
"a",
"an",
},
SortableAttributes: []string{
"title",
"release_date",
},
Synonyms: map[string][]string{
"wolverine": []string{"xmen", "logan"},
"logan": []string{"wolverine"},
},
TypoTolerance: &meilisearch.TypoTolerance{
MinWordSizeForTypos: meilisearch.MinWordSizeForTypos{
OneTypo: 8,
TwoTypos: 10,
},
DisableOnAttributes: []string{"title"},
},
Pagination: &meilisearch.Pagination{
MaxTotalHits: 5000,
},
Faceting: &meilisearch.Faceting{
MaxValuesPerFacet: 200,
},
SearchCutoffMs: 150,
}
client.Index("movies").UpdateSettings(&settings)
reset_settings_1: |-
client.Index("movies").ResetSettings()
get_synonyms_1: |-
client.Index("movies").GetSynonyms()
update_synonyms_1: |-
synonyms := map[string][]string{
"wolverine": []string{"xmen", "logan"},
"logan": []string{"wolverine", "xmen"},
"wow": []string{"world of warcraft"},
}
client.Index("movies").UpdateSynonyms(&synonyms)
reset_synonyms_1: |-
client.Index("movies").ResetSynonyms()
get_stop_words_1: |-
client.Index("movies").GetStopWords()
update_stop_words_1: |-
stopWords := []string{"of", "the", "to"}
client.Index("movies").UpdateStopWords(&stopWords)
reset_stop_words_1: |-
client.Index("movies").ResetStopWords()
get_ranking_rules_1: |-
client.Index("movies").GetRankingRules()
update_ranking_rules_1: |-
rankingRules := []string{
"words",
"typo",
"proximity",
"attribute",
"sort",
"exactness",
"release_date:asc",
"rank:desc",
}
client.Index("movies").UpdateRankingRules(&rankingRules)
reset_ranking_rules_1: |-
client.Index("movies").ResetRankingRules()
get_distinct_attribute_1: |-
client.Index("shoes").GetDistinctAttribute()
update_distinct_attribute_1: |-
client.Index("shoes").UpdateDistinctAttribute("skuid")
reset_distinct_attribute_1: |-
client.Index("shoes").ResetDistinctAttribute()
get_filterable_attributes_1: |-
client.Index("movies").GetFilterableAttributes()
update_filterable_attributes_1: |-
filterableAttributes := []string{
"genres",
"director",
}
client.Index("movies").UpdateFilterableAttributes(&filterableAttributes)
reset_filterable_attributes_1: |-
client.Index("movies").ResetFilterableAttributes()
get_searchable_attributes_1: |-
client.Index("movies").GetSearchableAttributes()
update_searchable_attributes_1: |-
searchableAttributes := []string{
"title",
"overview",
"genres",
}
client.Index("movies").UpdateSearchableAttributes(&searchableAttributes)
reset_searchable_attributes_1: |-
client.Index("movies").ResetSearchableAttributes()
get_displayed_attributes_1: |-
client.Index("movies").GetDisplayedAttributes()
update_displayed_attributes_1: |-
displayedAttributes := []string{
"title",
"overview",
"genres",
"release_date",
}
client.Index("movies").UpdateDisplayedAttributes(&displayedAttributes)
reset_displayed_attributes_1: |-
client.Index("movies").ResetDisplayedAttributes()
get_sortable_attributes_1: |-
client.Index("books").GetSortableAttributes()
update_sortable_attributes_1: |-
sortableAttributes := []string{
"price",
"author",
}
client.Index("books").UpdateSortableAttributes(&sortableAttributes)
reset_sortable_attributes_1: |-
client.Index("books").ResetSortableAttributes()
getting_started_typo_tolerance: |-
client.index("movies").UpdateTypoTolerance({
MinWordSizeForTypos: meilisearch.MinWordSizeForTypos{
OneTypo: 4,
},
})
get_typo_tolerance_1: |-
client.Index("books").GetTypoTolerance()
update_typo_tolerance_1: |-
client.Index("books").UpdateTypoTolerance(&meilisearch.TypoTolerance{
MinWordSizeForTypos: meilisearch.MinWordSizeForTypos{
OneTypo: 4,
TwoTypos: 10,
},
DisableOnAttributes: []string{"title"},
})
reset_typo_tolerance_1: |-
client.Index("books").ResetTypoTolerance()
get_pagination_settings_1: |-
client.Index("books").GetPagination()
update_pagination_settings_1: |-
client.Index("books").UpdatePagination(&meilisearch.Pagination{
MaxTotalHits: 100,
})
reset_pagination_settings_1: |-
client.Index("books").ResetPagination()
get_faceting_settings_1: |-
client.Index("books").GetFaceting()
update_faceting_settings_1: |-
client.Index("books").UpdateFaceting(&meilisearch.Faceting{
MaxValuesPerFacet: 2,
SortFacetValuesBy: {
"*": SortFacetTypeAlpha,
"genres": SortFacetTypeCount,
}
})
reset_faceting_settings_1: |-
client.Index("books").ResetFaceting()
facet_search_2: |-
client.Index("books").UpdateFaceting(&meilisearch.Faceting{
SortFacetValuesBy: {
"genres": SortFacetTypeCount,
}
})
get_index_stats_1: |-
client.Index("movies").GetStats()
get_indexes_stats_1: |-
client.GetStats()
get_health_1: |-
client.Health()
get_version_1: |-
client.GetVersion()
distinct_attribute_guide_1: |-
client.Index("jackets").UpdateDistinctAttribute("product_id")
field_properties_guide_searchable_1: |-
searchableAttributes := []string{
"title",
"overview",
"genres",
}
client.Index("movies").UpdateSearchableAttributes(&searchableAttributes)
field_properties_guide_displayed_1: |-
displayedAttributes := []string{
"title",
"overview",
"genres",
"release_date",
}
client.Index("movies").UpdateDisplayedAttributes(&displayedAttributes)
filtering_guide_1: |-
resp, err := client.Index("movie_ratings").Search("Avengers", &meilisearch.SearchRequest{
Filter: "release_date > \"795484800\"",
})
filtering_guide_2: |-
resp, err := client.Index("movie_ratings").Search("Batman", &meilisearch.SearchRequest{
Filter: "release_date > 795484800 AND (director = \"Tim Burton\" OR director = \"Christopher Nolan\")",
})
filtering_guide_3: |-
resp, err := client.Index("movie_ratings").Search("Planet of the Apes", &meilisearch.SearchRequest{
Filter: "release_date > 1577884550 AND (NOT director = \"Tim Burton\")",
})
filtering_guide_nested_1: |-
resp, err := client.Index("movie_ratings").Search("thriller", &meilisearch.SearchRequest{
Filter: "rating.users >= 90",
})
search_parameter_guide_query_1: |-
resp, err := client.Index("movies").Search("shifu", &meilisearch.SearchRequest{})
search_parameter_guide_offset_1: |-
resp, err := client.Index("movies").Search("shifu", &meilisearch.SearchRequest{
Offset: 1,
})
search_parameter_guide_limit_1: |-
resp, err := client.Index("movies").Search("shifu", &meilisearch.SearchRequest{
Limit: 2,
})
search_parameter_guide_retrieve_1: |-
resp, err := client.Index("movies").Search("shifu", &meilisearch.SearchRequest{
AttributesToRetrieve: []string{"overview", "title"},
})
search_parameter_guide_crop_1: |-
resp, err := client.Index("movies").Search("shifu", &meilisearch.SearchRequest{
AttributesToCrop: []string{"overview"},
CropLength: 5,
})
search_parameter_guide_crop_marker_1: |-
resp, err := client.Index("movies").Search("shifu", &meilisearch.SearchRequest{
AttributesToCrop: []string{"overview"},
CropMarker: "[…]",
})
search_parameter_guide_highlight_1: |-
resp, err := client.Index("movies").Search("winter feast", &meilisearch.SearchRequest{
AttributesToHighlight: []string{"overview"},
})
search_parameter_guide_highlight_tag_1: |-
resp, err := client.Index("movies").Search("winter feast", &meilisearch.SearchRequest{
AttributesToHighlight: []string{"overview"},
HighlightPreTag: "<span class=\"highlight\">",
HighlightPostTag: "</span>",
})
search_parameter_guide_show_matches_position_1: |-
resp, err := client.Index("movies").Search("winter feast", &meilisearch.SearchRequest{
ShowMatchesPosition: true,
})
search_parameter_guide_show_ranking_score_1: |-
resp, err := client.Index("movies").Search("dragon", &meilisearch.SearchRequest{
showRankingScore: true,
})
search_parameter_guide_show_ranking_score_details_1: |-
resp, err := client.Index("movies").Search("dragon", &meilisearch.SearchRequest{
showRankingScoreDetails: true,
})
search_parameter_guide_matching_strategy_1: |-
resp, err := client.Index("movies").Search("big fat liar", &meilisearch.SearchRequest{
MatchingStrategy: Last,
})
search_parameter_guide_matching_strategy_2: |-
resp, err := client.Index("movies").Search("big fat liar", &meilisearch.SearchRequest{
MatchingStrategy: All,
})
search_parameter_guide_matching_strategy_3: |-
client.Index("movies").Search("white shirt", &meilisearch.SearchRequest{
MatchingStrategy: Frequency,
})
search_parameter_guide_hitsperpage_1: |-
client.Index("movies").Search("", &meilisearch.SearchRequest{
HitsPerPage: 15,
})
search_parameter_guide_page_1: |-
client.Index("movies").Search("", &meilisearch.SearchRequest{
Page: 2,
})
search_parameter_guide_facet_stats_1: |-
client.Index("movie_ratings").Search("Batman", &meilisearch.SearchRequest{
Facets: []string{
"genres",
"rating",
},
})
search_parameter_guide_attributes_to_search_on_1: |-
resp, err := client.Index("movies").Search("adventure", &meilisearch.SearchRequest{
AttributesToSearchOn: []string{"overview"},
})
typo_tolerance_guide_1: |-
client.Index("movies").UpdateTypoTolerance(&meilisearch.TypoTolerance{
Enabled: false,
})
typo_tolerance_guide_2: |-
client.Index("movies").UpdateTypoTolerance(&meilisearch.TypoTolerance{
DisableOnAttributes: []string{"title"},
})
typo_tolerance_guide_3: |-
client.Index("movies").UpdateTypoTolerance(&meilisearch.TypoTolerance{
DisableOnWords: []string{"shrek"},
})
typo_tolerance_guide_4: |-
client.Index("movies").UpdateTypoTolerance(&meilisearch.TypoTolerance{
MinWordSizeForTypos: meilisearch.MinWordSizeForTypos{
OneTypo: 4,
TwoTypos: 10,
},
})
add_movies_json_1: |-
import (
"encoding/json"
"os"
)
file, _ := os.ReadFile("movies.json")
var movies interface{}
json.Unmarshal([]byte(file), &movies)
client.Index("movies").AddDocuments(&movies)
landing_getting_started_1: |-
client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey"))
documents := []map[string]interface{}{
{ "id": 1, "title": "Carol" },
{ "id": 2, "title": "Wonder Woman" },
{ "id": 3, "title": "Life of Pi" },
{ "id": 4, "title": "Mad Max: Fury Road" },
{ "id": 5, "title": "Moana" },
{ "id": 6, "title": "Philadelphia" },
}
client.Index("movies").AddDocuments(documents)
getting_started_check_task_status: |-
client.GetTask(0)
getting_started_add_documents: |-
// In the command line:
// go get -u github.com/meilisearch/meilisearch-go
// In your .go file:
package main
import (
"os"
"encoding/json"
"io"
"github.com/meilisearch/meilisearch-go"
)
func main() {
client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey"))
jsonFile, _ := os.Open("movies.json")
defer jsonFile.Close()
byteValue, _ := io.ReadAll(jsonFile)
var movies []map[string]interface{}
json.Unmarshal(byteValue, &movies)
_, err := client.Index("movies").AddDocuments(movies)
if err != nil {
panic(err)
}
}
getting_started_search: |-
client.Index("movies").Search("botman", &meilisearch.SearchRequest{})
getting_started_add_meteorites: |-
client := meilisearch.New("http://localhost:7700")
jsonFile, _ := os.Open("meteorites.json")
defer jsonFile.Close()
byteValue, _ := io.ReadAll(jsonFile)
var meteorites []map[string]interface{}
json.Unmarshal(byteValue, &meteorites)
client.Index("meteorites").AddDocuments(meteorites)
getting_started_update_ranking_rules: |-
rankingRules := []string{
"exactness",
"words",
"typo",
"proximity",
"attribute",
"sort",
"release_date:asc",
"rank:desc",
}
client.Index("movies").UpdateRankingRules(&rankingRules)
getting_started_update_displayed_attributes: |-
displayedAttributes := []string{
"title",
"overview",
"poster",
}
client.Index("movies").UpdateDisplayedAttributes(&displayedAttributes)
getting_started_update_searchable_attributes: |-
searchableAttributes := []string{
"title",
}
client.Index("movies").UpdateSearchableAttributes(&searchableAttributes)
getting_started_update_stop_words: |-
stopWords := []string{"the"}
client.Index("movies").UpdateStopWords(&stopWords)
getting_started_synonyms: |-
synonyms := map[string][]string{
"winnie": []string{"piglet"},
"piglet": []string{"winnie"},
}
client.Index("movies").UpdateSynonyms(&synonyms)
getting_started_filtering: |-
resp, err := client.Index("meteorites").Search("", &meilisearch.SearchRequest{
Filter: "mass < 200",
})
getting_started_geo_radius: |-
resp, err := client.Index("meteorites").Search("", &meilisearch.SearchRequest{
Filter: "_geoRadius(46.9480, 7.4474, 210000)",
})
getting_started_geo_point: |-
resp, err := client.Index("meteorites").Search("", &meilisearch.SearchRequest{
Sort: []string{
"_geoPoint(48.8583701,2.2922926):asc",
},
})
getting_started_sorting: |-
resp, err := client.Index("meteorites").Search("", &meilisearch.SearchRequest{
Filter: "mass < 200",
Sort: []string{
"mass:asc",
},
})
getting_started_configure_settings: |-
settings := meilisearch.Settings{
FilterableAttributes: []string{
"mass",
"_geo",
},
SortableAttributes: []string{
"mass",
"_geo",
},
}
client.Index("meteorites").UpdateSettings(&settings)
getting_started_faceting: |-
client.Index("movies").UpdateFaceting(&meilisearch.Faceting{
MaxValuesPerFacet: 2,
SortFacetValuesBy: {
"*": SortFacetTypeCount,
}
})
getting_started_pagination: |-
client.Index("movies").UpdatePagination(&meilisearch.Pagination{
MaxTotalHits: 500,
})
filtering_update_settings_1: |-
resp, err := client.Index("movies").UpdateFilterableAttributes(&[]string{
"director",
"genres",
})
faceted_search_walkthrough_filter_1: |-
resp, err := client.Index("movies").Search("thriller", &meilisearch.SearchRequest{
Filter: [][]string{
[]string{"genres = Horror", "genres = Mystery"},
[]string{"director = \"Jordan Peele\""},
},
})
faceted_search_update_settings_1: |-
filterableAttributes := []string{
"genres",
"rating",
"language",
}
client.Index("movie_ratings").UpdateFilterableAttributes(&filterableAttributes)
faceted_search_1: |-
resp, err := client.Index("books").Search("classic", &meilisearch.SearchRequest{
Facets: []string{
"genres",
"rating",
"language",
},
})
post_dump_1: |-
resp, err := client.CreateDump()
phrase_search_1: |-
resp, err := client.Index("movies").Search("\"african american\" horror", &meilisearch.SearchRequest{})
sorting_guide_update_sortable_attributes_1: |-
sortableAttributes := []string{
"author",
"price",
}
client.Index("books").UpdateSortableAttributes(&sortableAttributes)
sorting_guide_update_ranking_rules_1: |-
rankingRules := []string{
"words",
"sort",
"typo",
"proximity",
"attribute",
"exactness",
}
client.Index("books").UpdateRankingRules(&rankingRules)
sorting_guide_sort_parameter_1: |-
resp, err := client.Index("books").Search("science fiction", &meilisearch.SearchRequest{
Sort: []string{
"price:asc",
},
})
sorting_guide_sort_parameter_2: |-
resp, err := client.Index("books").Search("butler", &meilisearch.SearchRequest{
Sort: []string{
"author:desc",
},
})
sorting_guide_sort_nested_1: |-
resp, err := client.Index("books").Search("science fiction", &meilisearch.SearchRequest{
Sort: []string{
"rating.users:asc",
},
})
search_parameter_guide_sort_1: |-
resp, err := client.Index("books").Search("science fiction", &meilisearch.SearchRequest{
Sort: []string{
"price:asc",
},
})
geosearch_guide_filter_settings_1: |-
filterableAttributes := []string{
"_geo",
}
client.Index("restaurants").UpdateFilterableAttributes(&filterableAttributes)
geosearch_guide_filter_usage_1: |-
resp, err := client.Index("restaurants").Search("", &meilisearch.SearchRequest{
Filter: "_geoRadius(45.472735, 9.184019, 2000)",
})
geosearch_guide_filter_usage_2: |-
resp, err := client.Index("restaurants").Search("", &meilisearch.SearchRequest{
Filter: "_geoRadius(45.472735, 9.184019, 2000) AND type = pizza",
})
geosearch_guide_sort_settings_1: |-
sortableAttributes := []string{
"_geo",
}
client.Index("restaurants").UpdateSortableAttributes(&sortableAttributes)
geosearch_guide_sort_usage_1: |-
resp, err := client.Index("restaurants").Search("", &meilisearch.SearchRequest{
Sort: []string{
"_geoPoint(48.8561446,2.2978204):asc",
},
})
geosearch_guide_sort_usage_2: |-
resp, err := client.Index("restaurants").Search("", &meilisearch.SearchRequest{
Sort: []string{
"_geoPoint(48.8561446,2.2978204):asc",
"rating:desc",
},
})
geosearch_guide_filter_usage_3: |-
client.Index("restaurants").Search("", &meilisearch.SearchRequest{
Filter: "_geoBoundingBox([45.494181, 9.214024], [45.449484, 9.179175])",
})
primary_field_guide_create_index_primary_key: |-
client.CreateIndex(&meilisearch.IndexConfig{
Uid: "books",
PrimaryKey: "reference_number",
})
primary_field_guide_add_document_primary_key: |-
documents := []map[string]interface{}{
{
"reference_number": 287947,
"title": "Diary of a Wimpy Kid",
"author": "Jeff Kinney",
"genres": []string{"comedy", "humor"},
"price": 5.00,
},
}
client.Index("books").AddDocuments(documents, "reference_number")
primary_field_guide_update_document_primary_key: |-
client.Index("books").UpdateIndex("title")
security_guide_search_key_1: |-
client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey"))
client.Index("patient_medical_records").Search();
security_guide_update_key_1: |-
client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey"))
client.UpdateKey("74c9c733-3368-4738-bbe5-1d18a5fecb37", &meilisearch.Key{
Indexes: []string{"doctors"},
})
security_guide_create_key_1: |-
client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey"))
client.CreateKey(&meilisearch.Key{
Description: "Search patient records key",
Actions: []string{"search"},
Indexes: []string{"patient_medical_records"},
ExpiresAt: time.Date(2042, time.April, 02, 0, 42, 42, 0, time.UTC),
})
security_guide_list_keys_1: |-
client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey"))
client.GetKeys(nil);
security_guide_delete_key_1: |-
client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey"))
client.DeleteKey("74c9c733-3368-4738-bbe5-1d18a5fecb37");
authorization_header_1: |-
client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey"))
client.GetKeys(nil);
tenant_token_guide_generate_sdk_1: |-
searchRules := map[string]interface{}{
"patient_medical_records": map[string]string{
"filter": "user_id = 1",
},
}
options := &meilisearch.TenantTokenOptions{
APIKey: "B5KdX2MY2jV6EXfUs6scSfmC...",
ExpiresAt: time.Date(2025, time.December, 20, 0, 0, 0, 0, time.UTC),
}
token, err := client.GenerateTenantToken(searchRules, options);
tenant_token_guide_search_sdk_1: |-
client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey"))
client.Index("patient_medical_records").Search("blood test", &meilisearch.SearchRequest{});
synonyms_guide_1: |-
synonyms := map[string][]string{
"great": []string{"fantastic"},
"fantastic": []string{"great"},
}
client.Index("movies").UpdateSynonyms(&synonyms)
date_guide_index_1: |-
jsonFile, _ := os.Open("games.json")
defer jsonFile.Close()
byteValue, _ := io.ReadAll(jsonFile)
var games []map[string]interface{}
json.Unmarshal(byteValue, &games)
client.Index("games").AddDocuments(games)
date_guide_filterable_attributes_1: |-
filterableAttributes := []string{"release_timestamp"}
client.Index("games").UpdateFilterableAttributes(&filterableAttributes)
date_guide_filter_1: |-
client.Index("games").Search("", &meilisearch.SearchRequest{
Filter: "release_timestamp >= 1514761200 AND release_timestamp < 1672527600",
})
date_guide_sortable_attributes_1: |-
sortableAttributes := []string{"release_timestamp","author"}
client.Index("games").UpdateSortableAttributes(&sortableAttributes)
date_guide_sort_1: |-
client.Index("games").Search("", &meilisearch.SearchRequest{
Sort: []string{
"release_timestamp:desc",
},
})
search_parameter_reference_distinct_1: |-
client.Index("INDEX_NAME").Search("QUERY TERMS", &meilisearch.SearchRequest{
Distinct: "ATTRIBUTE_A",
})
distinct_attribute_guide_distinct_parameter_1: |-
client.Index("products").Search("white shirt", &meilisearch.SearchRequest{
Distinct: "sku",
})
distinct_attribute_guide_filterable_1: |-
filterableAttributes := []string{
"product_id",
"sku",
"url",
}
client.Index("products").UpdateFilterableAttributes(&filterableAttributes)
get_similar_post_1: |-
resp := new(meilisearch.SimilarDocumentResult)
client.Index("INDEX_NAME").SearchSimilarDocuments(&meilisearch.SimilarDocumentQuery{
Id: "TARGET_DOCUMENT_ID",
Embedder: "default",
}, resp)
search_parameter_reference_ranking_score_threshold_1: |-
client.Index("INDEX_NAME").Search("badman", &meilisearch.SearchRequest{
RankingScoreThreshold: 0.2,
})
get_search_cutoff_1: |-
client.Index("movies").GetSearchCutoffMs()
update_search_cutoff_1: |-
client.Index("movies").UpdateSearchCutoffMs(150)
reset_search_cutoff_1: |-
client.Index("books").ResetSearchCutoffMs()
get_dictionary_1: |-
client.Index("books").GetDictionary()
update_dictionary_1: |-
client.Index("books").UpdateDictionary([]string{
"J. R. R.",
"W. E. B.",
})
reset_dictionary_1: |-
client.Index("books").ResetDictionary()
get_separator_tokens_1: |-
client.Index("articles").GetSeparatorTokens()
update_separator_tokens_1: |-
client.Index("articles").UpdateSeparatorTokens([]string{
"|",
"&hellip;",
})
reset_separator_tokens_1: |-
client.Index("articles").ResetSeparatorTokens()
get_non_separator_tokens_1: |-
client.Index("articles").GetNonSeparatorTokens()
update_non_separator_tokens_1: |-
client.Index("articles").UpdateNonSeparatorTokens([]string{
"@",
"#",
})
reset_non_separator_tokens_1: |-
client.Index("articles").ResetNonSeparatorTokens()
get_proximity_precision_settings_1: |-
client.Index("books").GetProximityPrecision()
update_proximity_precision_settings_1: |-
client.Index("books").UpdateProximityPrecision(ByAttribute)
reset_proximity_precision_settings_1: |-
client.Index("books").ResetProximityPrecision()
search_parameter_reference_locales_1: |-
client.index("INDEX_NAME").Search("QUERY TEXT IN JAPANESE", &meilisearch.SearchRequest{
Locates: []string{"jpn"}
})
get_localized_attribute_settings_1: |-
client.index("INDEX_NAME").GetLocalizedAttributes()
update_localized_attribute_settings_1: |-
client.index("INDEX_NAME").UpdateLocalizedAttributes([]*LocalizedAttributes{
{ AttributePatterns: ["*_ja"], Locales: ["jpn"] },
})
reset_localized_attribute_settings_1: |-
client.index("INDEX_NAME").ResetLocalizedAttributes()
get_facet_search_settings_1: |-
client.Index("books").GetFacetSearch()
update_facet_search_settings_1: |-
client.Index("books").UpdateFacetSearch(false)
reset_facet_search_settings_1: |-
client.Index("books").ResetFacetSearch()
get_prefix_search_settings_1: |-
client.Index("books").GetPrefixSearch()
update_prefix_search_settings_1: |-
client.Index("books").UpdatePrefixSearch("disabled")
reset_prefix_search_settings_1: |-
client.Index("books").ResetPrefixSearch()

24
.editorconfig Normal file
View file

@ -0,0 +1,24 @@
# see http://editorconfig.org/
root = true
[*]
trim_trailing_whitespace = true
insert_final_newline = true
end_of_line = lf
charset = utf-8
tab_width = 4
indent_style = space
[{Makefile,go.mod,go.sum,*.go,.gitmodules}]
indent_style = tab
indent_size = 8
[*.{yaml,yml}]
indent_size = 2
indent_style = space
[*.md]
indent_style = space
max_line_length = 80
indent_size = 4

26
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View file

@ -0,0 +1,26 @@
---
name: Bug Report 🐞
about: Create a report to help us improve.
title: ''
labels: ''
assignees: ''
---
<!-- This is not an exhaustive model but a help. No step is mandatory. -->
**Description**
Description of what the bug is about.
**Expected behavior**
What you expected to happen.
**Current behavior**
What happened.
**Screenshots or Logs**
If applicable, add screenshots or logs to help explain your problem.
**Environment (please complete the following information):**
- OS: [e.g. Debian GNU/Linux]
- Meilisearch version: [e.g. v.0.20.0]
- meilisearch-go version: [e.g v0.14.1]

5
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View file

@ -0,0 +1,5 @@
blank_issues_enabled: false
contact_links:
- name: Support questions & other
url: https://discord.meilisearch.com/
about: Support is not handled here but on our Discord

View file

@ -0,0 +1,18 @@
---
name: Feature Request & Enhancement 💡
about: Suggest a new idea for the project.
title: ''
labels: ''
assignees: ''
---
<!-- This is not an exhaustive model but a help. No step is mandatory. -->
**Description**
Brief explanation of the feature.
**Basic example**
If the proposal involves something new or a change, include a basic example. How would you use the feature? In which context?
**Other**
Any other things you want to add.

21
.github/dependabot.yml vendored Normal file
View file

@ -0,0 +1,21 @@
version: 2
updates:
- package-ecosystem: 'github-actions'
directory: '/'
schedule:
interval: "monthly"
labels:
- 'skip-changelog'
- 'dependencies'
rebase-strategy: disabled
- package-ecosystem: gomod
directory: '/'
schedule:
interval: "monthly"
time: '04:00'
open-pull-requests-limit: 10
labels:
- skip-changelog
- dependencies
rebase-strategy: disabled

39
.github/release-draft-template.yml vendored Normal file
View file

@ -0,0 +1,39 @@
name-template: 'v$RESOLVED_VERSION 🐹'
tag-template: 'v$RESOLVED_VERSION'
exclude-labels:
- 'skip-changelog'
version-resolver:
minor:
labels:
- 'breaking-change'
default: patch
categories:
- title: '⚠️ Breaking changes'
label: 'breaking-change'
- title: '🚀 Enhancements'
label: 'enhancement'
- title: '🐛 Bug Fixes'
label: 'bug'
- title: '🔒 Security'
label: 'security'
- title: '⚙️ Maintenance/misc'
label:
- 'maintenance'
- 'documentation'
template: |
$CHANGES
Thanks again to $CONTRIBUTORS! 🎉
no-changes-template: 'Changes are coming soon 😎'
sort-direction: 'ascending'
replacers:
- search: '/(?:and )?@dependabot-preview(?:\[bot\])?,?/g'
replace: ''
- search: '/(?:and )?@dependabot(?:\[bot\])?,?/g'
replace: ''
- search: '/(?:and )?@bors(?:\[bot\])?,?/g'
replace: ''
- search: '/(?:and )?@meili-bot,?/g'
replace: ''
- search: '/(?:and )?@meili-bors(?:\[bot\])?,?/g'
replace: ''

16
.github/scripts/check-release.sh vendored Normal file
View file

@ -0,0 +1,16 @@
#!/bin/sh
# Checking if current tag matches the package version
current_tag=$(echo $GITHUB_REF | cut -d '/' -f 3 | sed -r 's/^v//')
file1='version.go'
file_tag1=$(grep 'const VERSION' -A 0 $file1 | cut -d '=' -f2 | tr -d '"' | tr -d ' ')
if [ "$current_tag" != "$file_tag1" ]; then
echo "Error: the current tag does not match the version in package file(s)."
echo "$file1: found $file_tag1 - expected $current_tag"
exit 1
fi
echo 'OK'
exit 0

44
.github/workflows/pre-release-tests.yml vendored Normal file
View file

@ -0,0 +1,44 @@
# Testing the code base against the Meilisearch pre-releases
name: Pre-Release Tests
# Will only run for PRs and pushes to bump-meilisearch-v*
on:
push:
branches: bump-meilisearch-v*
pull_request:
branches: bump-meilisearch-v*
jobs:
integration_tests:
runs-on: ubuntu-latest
strategy:
matrix:
# Current go.mod version and latest stable go version
go: [1.16, 1.17]
include:
- go: 1.16
tag: current
- go: 1.17
tag: latest
name: integration-tests-against-rc (go ${{ matrix.tag }} version)
steps:
- name: Set up Go ${{ matrix.go }}
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.go }}
- name: Check out code into the Go module directory
uses: actions/checkout@v4
- name: Get dependencies
run: |
go get -v -t -d ./...
if [ -f Gopkg.toml ]; then
curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
dep ensure
fi
- name: Get the latest Meilisearch RC
run: echo "MEILISEARCH_VERSION=$(curl https://raw.githubusercontent.com/meilisearch/integration-guides/main/scripts/get-latest-meilisearch-rc.sh | bash)" >> $GITHUB_ENV
- name: Meilisearch (${{ env.MEILISEARCH_VERSION }}) setup with Docker
run: docker run -d -p 7700:7700 getmeili/meilisearch:${{ env.MEILISEARCH_VERSION }} meilisearch --master-key=masterKey --no-analytics
- name: Run integration tests
run: go test -v ./...

15
.github/workflows/release-check.yml vendored Normal file
View file

@ -0,0 +1,15 @@
name: Validate release version
on:
push:
tags:
- '[0-9]+.[0-9]+.[0-9]+'
jobs:
publish:
name: Validate release version
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Check release validity
run: sh .github/scripts/check-release.sh

16
.github/workflows/release-drafter.yml vendored Normal file
View file

@ -0,0 +1,16 @@
name: Release Drafter
on:
push:
branches:
- main
jobs:
update_release_draft:
runs-on: ubuntu-latest
steps:
- uses: release-drafter/release-drafter@v6
with:
config-name: release-draft-template.yml
env:
GITHUB_TOKEN: ${{ secrets.RELEASE_DRAFTER_TOKEN }}

69
.github/workflows/tests.yml vendored Normal file
View file

@ -0,0 +1,69 @@
name: Tests
on:
pull_request:
push:
# trying and staging branches are for BORS config
branches:
- trying
- staging
- main
jobs:
linter:
name: linter
runs-on: ubuntu-latest
steps:
- uses: actions/setup-go@v5
with:
go-version: 1.17
- uses: actions/checkout@v4
- name: golangci-lint
uses: golangci/golangci-lint-action@v6
with:
version: v1.45.2
- name: Run go vet
run: go vet
- name: Yaml linter
uses: ibiqlik/action-yamllint@v3
with:
config_file: .yamllint.yml
integration_tests:
runs-on: ubuntu-latest
# Will not run if the event is a PR to bump-meilisearch-v* (so a pre-release PR)
# Will still run for each push to bump-meilisearch-v*
if: github.event_name != 'pull_request' || !startsWith(github.base_ref, 'bump-meilisearch-v')
strategy:
matrix:
# Current go.mod version and latest stable go version
go: [1.16, 1.17]
include:
- go: 1.16
tag: current
- go: 1.17
tag: latest
name: integration-tests (go ${{ matrix.tag }} version)
steps:
- name: Set up Go ${{ matrix.go }}
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.go }}
- name: Check out code into the Go module directory
uses: actions/checkout@v4
- name: Get dependencies
run: |
go get -v -t -d ./...
if [ -f Gopkg.toml ]; then
curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
dep ensure
fi
- name: Meilisearch setup (latest version) with Docker
run: docker run -d -p 7700:7700 getmeili/meilisearch:latest meilisearch --master-key=masterKey --no-analytics
- name: Run integration tests
run: go test -v -race -coverprofile=coverage.txt -covermode=atomic ./...
- name: Upload coverage report
uses: codecov/codecov-action@v5
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

3
.gitignore vendored Normal file
View file

@ -0,0 +1,3 @@
.idea
meilisearch-go.iml
vendor

9
.yamllint.yml Normal file
View file

@ -0,0 +1,9 @@
extends: default
ignore: |
node_modules
rules:
line-length: disable
document-start: disable
brackets: disable
truthy: disable

112
CONTRIBUTING.md Normal file
View file

@ -0,0 +1,112 @@
# Contributing <!-- omit in TOC -->
First of all, thank you for contributing to Meilisearch! The goal of this document is to provide everything you need to know in order to contribute to Meilisearch and its different integrations.
- [Assumptions](#assumptions)
- [How to Contribute](#how-to-contribute)
- [Development Workflow](#development-workflow)
- [Git Guidelines](#git-guidelines)
- [Release Process (for internal team only)](#release-process-for-internal-team-only)
## Assumptions
1. **You're familiar with [GitHub](https://github.com) and the [Pull Request](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.**
2. **You've read the Meilisearch [documentation](https://www.meilisearch.com/docs) and the [README](/README.md).**
3. **You know about the [Meilisearch community](https://discord.meilisearch.com). Please use this for help.**
## How to Contribute
1. Make sure that the contribution you want to make is explained or detailed in a GitHub issue! Find an [existing issue](https://github.com/meilisearch/meilisearch-go/issues/) or [open a new one](https://github.com/meilisearch/meilisearch-go/issues/new).
2. Once done, [fork the meilisearch-go repository](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) in your own GitHub account. Ask a maintainer if you want your issue to be checked before making a PR.
3. [Create a new Git branch](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-and-deleting-branches-within-your-repository).
4. Review the [Development Workflow](#development-workflow) section that describes the steps to maintain the repository.
5. Make the changes on your branch.
6. [Submit the branch as a PR](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request-from-a-fork) pointing to the `main` branch of the main meilisearch-go repository. A maintainer should comment and/or review your Pull Request within a few days. Although depending on the circumstances, it may take longer.<br>
We do not enforce a naming convention for the PRs, but **please use something descriptive of your changes**, having in mind that the title of your PR will be automatically added to the next [release changelog](https://github.com/meilisearch/meilisearch-go/releases/).
## Development Workflow
### Requirements <!-- omit in TOC -->
- `docker`: for running integration tests and linting
- `easyjson`: for generating type marshalers and unmarshalers
- Retrieve SDK dependencies
You can install these tools and dependencies by using the `make requirements` command.
### Test <!-- omit in TOC -->
You can run integration test and linter check by command:
```shell
make test
```
### EasyJson <!-- omit in TOC -->
[`easyjson`](https://github.com/mailru/easyjson) is a package used for optimizing marshal/unmarshal Go structs to/from JSON.
It takes the `types.go` file as an input, and auto-generates `types_easyjson.go` with optimized
marshalling and unmarshalling methods for this SDK.
If for any reason `types.go` is modified, this file should be regenerated by running easyjson again.
```shell
make easyjson
```
## Git Guidelines
### Git Branches <!-- omit in TOC -->
All changes must be made in a branch and submitted as PR.
We do not enforce any branch naming style, but please use something descriptive of your changes.
### Git Commits <!-- omit in TOC -->
As minimal requirements, your commit message should:
- be capitalized
- not finish by a dot or any other punctuation character (!,?)
- start with a verb so that we can read your commit message this way: "This commit will ...", where "..." is the commit message.
e.g.: "Fix the home page button" or "Add more tests for create_index method"
We don't follow any other convention, but if you want to use one, we recommend [this one](https://chris.beams.io/posts/git-commit/).
### GitHub Pull Requests <!-- omit in TOC -->
Some notes on GitHub PRs:
- [Convert your PR as a draft](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/changing-the-stage-of-a-pull-request) if your changes are a work in progress: no one will review it until you pass your PR as ready for review.<br>
The draft PR can be very useful if you want to show that you are working on something and make your work visible.
- The branch related to the PR must be **up-to-date with `main`** before merging. Fortunately, this project [integrates a bot](https://github.com/meilisearch/integration-guides/blob/main/resources/bors.md) to automatically enforce this requirement without the PR author having to do it manually.
- All PRs must be reviewed and approved by at least one maintainer.
- The PR title should be accurate and descriptive of the changes. The title of the PR will be indeed automatically added to the next [release changelogs](https://github.com/meilisearch/meilisearch-go/releases/).
## Release Process (for the internal team only)
Meilisearch tools follow the [Semantic Versioning Convention](https://semver.org/).
### Automation to Rebase and Merge the PRs <!-- omit in TOC -->
This project integrates a bot that helps us manage pull requests merging.<br>
_[Read more about this](https://github.com/meilisearch/integration-guides/blob/main/resources/bors.md)._
### Automated Changelogs <!-- omit in TOC -->
This project integrates a tool to create automated changelogs.<br>
_[Read more about this](https://github.com/meilisearch/integration-guides/blob/main/resources/release-drafter.md)._
### How to Publish the Release <!-- omit in TOC -->
⚠️ Before doing anything, make sure you got through the guide about [Releasing an Integration](https://github.com/meilisearch/integration-guides/blob/main/resources/integration-release.md).
Make a PR updating the version with the new one on this file:
- [`version.go`](/version.go):
```go
const VERSION = "X.X.X"
```
Once the changes are merged on `main`, you can publish the current draft release via the [GitHub interface](https://github.com/meilisearch/meilisearch-go/releases): on this page, click on `Edit` (related to the draft release) > update the description (be sure you apply [these recommendations](https://github.com/meilisearch/integration-guides/blob/main/resources/integration-release.md#writting-the-release-description)) > when you are ready, click on `Publish release`.
<hr>
Thank you again for reading this through. We can not wait to begin to work with you if you make your way through this contributing guide ❤️

11
Dockerfile Normal file
View file

@ -0,0 +1,11 @@
FROM golang:1.17.11-buster
WORKDIR /home/package
COPY go.mod .
COPY go.sum .
COPY --from=golangci/golangci-lint:v1.42.0 /usr/bin/golangci-lint /usr/local/bin/golangci-lint
RUN go mod download
RUN go mod verify

21
LICENSE Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020-2025 Meili SAS
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

297
README.md Normal file
View file

@ -0,0 +1,297 @@
<p align="center">
<img src="https://raw.githubusercontent.com/meilisearch/integration-guides/main/assets/logos/meilisearch_go.svg" alt="Meilisearch-Go" width="200" height="200" />
</p>
<h1 align="center">Meilisearch Go</h1>
<h4 align="center">
<a href="https://github.com/meilisearch/meilisearch">Meilisearch</a> |
<a href="https://www.meilisearch.com/cloud?utm_campaign=oss&utm_source=github&utm_medium=meilisearch-go">Meilisearch Cloud</a> |
<a href="https://www.meilisearch.com/docs">Documentation</a> |
<a href="https://discord.meilisearch.com">Discord</a> |
<a href="https://roadmap.meilisearch.com/tabs/1-under-consideration">Roadmap</a> |
<a href="https://www.meilisearch.com">Website</a> |
<a href="https://www.meilisearch.com/docs/faq">FAQ</a>
</h4>
<p align="center">
<a href="https://github.com/meilisearch/meilisearch-go/actions"><img src="https://github.com/meilisearch/meilisearch-go/workflows/Tests/badge.svg" alt="GitHub Workflow Status"></a>
<a href="https://goreportcard.com/report/github.com/meilisearch/meilisearch-go"><img src="https://goreportcard.com/badge/github.com/meilisearch/meilisearch-go" alt="Test"></a>
<a href="https://codecov.io/gh/meilisearch/meilisearch-go"><img src="https://codecov.io/gh/meilisearch/meilisearch-go/branch/main/graph/badge.svg?token=8N6N60D5UI" alt="CodeCov"></a>
<a href="https://pkg.go.dev/github.com/meilisearch/meilisearch-go"><img src="https://pkg.go.dev/badge/github.com/meilisearch/meilisearch-go.svg" alt="Go Reference"></a>
<a href="https://github.com/meilisearch/meilisearch-go/blob/main/LICENSE"><img src="https://img.shields.io/badge/license-MIT-informational" alt="License"></a>
<a href="https://ms-bors.herokuapp.com/repositories/58"><img src="https://bors.tech/images/badge_small.svg" alt="Bors enabled"></a>
</p>
<p align="center">⚡ The Meilisearch API client written for Golang</p>
**Meilisearch Go** is the Meilisearch API client for Go developers.
**Meilisearch** is an open-source search engine. [Learn more about Meilisearch.](https://github.com/meilisearch/Meilisearch)
## Table of Contents
- [📖 Documentation](#-documentation)
- [🔧 Installation](#-installation)
- [🚀 Getting started](#-getting-started)
- [Add documents](#add-documents)
- [Basic search](#basic-search)
- [Custom search](#custom-search)
- [Custom search with filter](#custom-search-with-filters)
- [Customize client](#customize-client)
- [🤖 Compatibility with Meilisearch](#-compatibility-with-meilisearch)
- [⚡️ Benchmark performance](#-benchmark-performance)
- [💡 Learn more](#-learn-more)
- [⚙️ Contributing](#-contributing)
## 📖 Documentation
This readme contains all the documentation you need to start using this Meilisearch SDK.
For general information on how to use Meilisearch—such as our API reference, tutorials, guides, and in-depth articles—refer to our [main documentation website](https://www.meilisearch.com/docs/).
## 🔧 Installation
With `go get` in command line:
```bash
go get github.com/meilisearch/meilisearch-go
```
### Run Meilisearch <!-- omit in toc -->
⚡️ **Launch, scale, and streamline in minutes with Meilisearch Cloud**—no maintenance, no commitment, cancel anytime. [Try it free now](https://cloud.meilisearch.com/login?utm_campaign=oss&utm_source=github&utm_medium=meilisearch-go).
🪨 Prefer to self-host? [Download and deploy](https://www.meilisearch.com/docs/learn/self_hosted/getting_started_with_self_hosted_meilisearch?utm_campaign=oss&utm_source=github&utm_medium=meilisearch-go) our fast, open-source search engine on your own infrastructure.
## 🚀 Getting started
#### Add documents
```go
package main
import (
"fmt"
"os"
"github.com/meilisearch/meilisearch-go"
)
func main() {
client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("foobar"))
// An index is where the documents are stored.
index := client.Index("movies")
// If the index 'movies' does not exist, Meilisearch creates it when you first add the documents.
documents := []map[string]interface{}{
{ "id": 1, "title": "Carol", "genres": []string{"Romance", "Drama"} },
{ "id": 2, "title": "Wonder Woman", "genres": []string{"Action", "Adventure"} },
{ "id": 3, "title": "Life of Pi", "genres": []string{"Adventure", "Drama"} },
{ "id": 4, "title": "Mad Max: Fury Road", "genres": []string{"Adventure", "Science Fiction"} },
{ "id": 5, "title": "Moana", "genres": []string{"Fantasy", "Action"} },
{ "id": 6, "title": "Philadelphia", "genres": []string{"Drama"} },
}
task, err := index.AddDocuments(documents)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
fmt.Println(task.TaskUID)
}
```
With the `taskUID`, you can check the status (`enqueued`, `canceled`, `processing`, `succeeded` or `failed`) of your documents addition using the [task endpoint](https://www.meilisearch.com/docs/reference/api/tasks).
#### Basic Search
```go
package main
import (
"fmt"
"os"
"github.com/meilisearch/meilisearch-go"
)
func main() {
// Meilisearch is typo-tolerant:
searchRes, err := client.Index("movies").Search("philoudelphia",
&meilisearch.SearchRequest{
Limit: 10,
})
if err != nil {
fmt.Println(err)
os.Exit(1)
}
fmt.Println(searchRes.Hits)
}
```
JSON output:
```json
{
"hits": [{
"id": 6,
"title": "Philadelphia",
"genres": ["Drama"]
}],
"offset": 0,
"limit": 10,
"processingTimeMs": 1,
"query": "philoudelphia"
}
```
#### Custom Search
All the supported options are described in the [search parameters](https://www.meilisearch.com/docs/reference/api/search#search-parameters) section of the documentation.
```go
func main() {
searchRes, err := client.Index("movies").Search("wonder",
&meilisearch.SearchRequest{
AttributesToHighlight: []string{"*"},
})
if err != nil {
fmt.Println(err)
os.Exit(1)
}
fmt.Println(searchRes.Hits)
}
```
JSON output:
```json
{
"hits": [
{
"id": 2,
"title": "Wonder Woman",
"genres": ["Action", "Adventure"],
"_formatted": {
"id": 2,
"title": "<em>Wonder</em> Woman"
}
}
],
"offset": 0,
"limit": 20,
"processingTimeMs": 0,
"query": "wonder"
}
```
#### Custom Search With Filters
If you want to enable filtering, you must add your attributes to the `filterableAttributes` index setting.
```go
task, err := index.UpdateFilterableAttributes(&[]string{"id", "genres"})
```
You only need to perform this operation once.
Note that Meilisearch will rebuild your index whenever you update `filterableAttributes`. Depending on the size of your dataset, this might take time. You can track the process using the [task status](https://www.meilisearch.com/docs/learn/advanced/asynchronous_operations).
Then, you can perform the search:
```go
searchRes, err := index.Search("wonder",
&meilisearch.SearchRequest{
Filter: "id > 1 AND genres = Action",
})
```
```json
{
"hits": [
{
"id": 2,
"title": "Wonder Woman",
"genres": ["Action","Adventure"]
}
],
"offset": 0,
"limit": 20,
"estimatedTotalHits": 1,
"processingTimeMs": 0,
"query": "wonder"
}
```
#### Customize Client
The client supports many customization options:
- `WithCustomClient` sets a custom `http.Client`.
- `WithCustomClientWithTLS` enables TLS for the HTTP client.
- `WithAPIKey` sets the API key or master [key](https://www.meilisearch.com/docs/reference/api/keys).
- `WithContentEncoding` configures [content encoding](https://www.meilisearch.com/docs/reference/api/overview#content-encoding) for requests and responses. Currently, gzip, deflate, and brotli are supported.
- `WithCustomRetries` customizes retry behavior based on specific HTTP status codes (`retryOnStatus`, defaults to 502, 503, and 504) and allows setting the maximum number of retries.
- `DisableRetries` disables the retry logic. By default, retries are enabled.
```go
package main
import (
"net/http"
"github.com/meilisearch/meilisearch-go"
)
func main() {
client := meilisearch.New("http://localhost:7700",
meilisearch.WithAPIKey("foobar"),
meilisearch.WithCustomClient(http.DefaultClient),
meilisearch.WithContentEncoding(meilisearch.GzipEncoding, meilisearch.BestCompression),
meilisearch.WithCustomRetries([]int{502}, 20),
)
}
```
## 🤖 Compatibility with Meilisearch
This package guarantees compatibility with [version v1.x of Meilisearch](https://github.com/meilisearch/meilisearch/releases/latest), but some features may not be present. Please check the [issues](https://github.com/meilisearch/meilisearch-go/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22+label%3Aenhancement) for more info.
## ⚡️ Benchmark Performance
The Meilisearch client performance was tested in [client_bench_test.go](/client_bench_test.go).
```shell
goos: linux
goarch: amd64
pkg: github.com/meilisearch/meilisearch-go
cpu: AMD Ryzen 7 5700U with Radeon Graphics
```
**Results**
```shell
Benchmark_ExecuteRequest-16 10000 105880 ns/op 7241 B/op 87 allocs/op
Benchmark_ExecuteRequestWithEncoding-16 2716 455548 ns/op 1041998 B/op 169 allocs/op
Benchmark_ExecuteRequestWithoutRetries-16 1 3002787257 ns/op 56528 B/op 332 allocs/op
```
## 💡 Learn more
The following sections in our main documentation website may interest you:
- **Manipulate documents**: see the [API references](https://www.meilisearch.com/docs/reference/api/documents) or read more about [documents](https://www.meilisearch.com/docs/learn/core_concepts/documents).
- **Search**: see the [API references](https://www.meilisearch.com/docs/reference/api/search) or follow our guide on [search parameters](https://www.meilisearch.com/docs/reference/api/search#search-parameters).
- **Manage the indexes**: see the [API references](https://www.meilisearch.com/docs/reference/api/indexes) or read more about [indexes](https://www.meilisearch.com/docs/learn/core_concepts/indexes).
- **ClientConfigure the index settings**: see the [API references](https://www.meilisearch.com/docs/reference/api/settings) or follow our guide on [settings parameters](https://www.meilisearch.com/docs/reference/api/settings#settings_parameters).
## ⚙️ Contributing
Any new contribution is more than welcome in this project!
If you want to know more about the development workflow or want to contribute, please visit our [contributing guidelines](/CONTRIBUTING.md) for detailed instructions!
<hr>
**Meilisearch** provides and maintains many **SDKs and Integration tools** like this one. We want to provide everyone with an **amazing search experience for any kind of project**. If you want to contribute, make suggestions, or just know what's going on right now, visit us in the [integration-guides](https://github.com/meilisearch/integration-guides) repository.

3
bors.toml Normal file
View file

@ -0,0 +1,3 @@
status = ['integration-tests (go current version)', 'integration-tests (go latest version)', 'linter']
# 1 hour timeout
timeout-sec = 3600

357
client.go Normal file
View file

@ -0,0 +1,357 @@
package meilisearch
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"sync"
"time"
)
type client struct {
client *http.Client
host string
apiKey string
bufferPool *sync.Pool
encoder encoder
contentEncoding ContentEncoding
retryOnStatus map[int]bool
disableRetry bool
maxRetries uint8
retryBackoff func(attempt uint8) time.Duration
}
type clientConfig struct {
contentEncoding ContentEncoding
encodingCompressionLevel EncodingCompressionLevel
retryOnStatus map[int]bool
disableRetry bool
maxRetries uint8
}
type internalRequest struct {
endpoint string
method string
contentType string
withRequest interface{}
withResponse interface{}
withQueryParams map[string]string
acceptedStatusCodes []int
functionName string
}
func newClient(cli *http.Client, host, apiKey string, cfg clientConfig) *client {
c := &client{
client: cli,
host: host,
apiKey: apiKey,
bufferPool: &sync.Pool{
New: func() interface{} {
return new(bytes.Buffer)
},
},
disableRetry: cfg.disableRetry,
maxRetries: cfg.maxRetries,
retryOnStatus: cfg.retryOnStatus,
}
if c.retryOnStatus == nil {
c.retryOnStatus = map[int]bool{
502: true,
503: true,
504: true,
}
}
if !c.disableRetry && c.retryBackoff == nil {
c.retryBackoff = func(attempt uint8) time.Duration {
return time.Second * time.Duration(attempt)
}
}
if !cfg.contentEncoding.IsZero() {
c.contentEncoding = cfg.contentEncoding
c.encoder = newEncoding(cfg.contentEncoding, cfg.encodingCompressionLevel)
}
return c
}
func (c *client) executeRequest(ctx context.Context, req *internalRequest) error {
internalError := &Error{
Endpoint: req.endpoint,
Method: req.method,
Function: req.functionName,
RequestToString: "empty request",
ResponseToString: "empty response",
MeilisearchApiError: meilisearchApiError{
Message: "empty meilisearch message",
},
StatusCodeExpected: req.acceptedStatusCodes,
encoder: c.encoder,
}
resp, err := c.sendRequest(ctx, req, internalError)
if err != nil {
return err
}
defer func() {
_ = resp.Body.Close()
}()
internalError.StatusCode = resp.StatusCode
b, err := io.ReadAll(resp.Body)
if err != nil {
return err
}
err = c.handleStatusCode(req, resp.StatusCode, b, internalError)
if err != nil {
return err
}
err = c.handleResponse(req, b, internalError)
if err != nil {
return err
}
return nil
}
func (c *client) sendRequest(
ctx context.Context,
req *internalRequest,
internalError *Error,
) (*http.Response, error) {
apiURL, err := url.Parse(c.host + req.endpoint)
if err != nil {
return nil, fmt.Errorf("unable to parse url: %w", err)
}
if req.withQueryParams != nil {
query := apiURL.Query()
for key, value := range req.withQueryParams {
query.Set(key, value)
}
apiURL.RawQuery = query.Encode()
}
// Create request body
var body io.Reader = nil
if req.withRequest != nil {
if req.method == http.MethodGet || req.method == http.MethodHead {
return nil, ErrInvalidRequestMethod
}
if req.contentType == "" {
return nil, ErrRequestBodyWithoutContentType
}
rawRequest := req.withRequest
buf := c.bufferPool.Get().(*bytes.Buffer)
buf.Reset()
if b, ok := rawRequest.([]byte); ok {
buf.Write(b)
body = buf
} else if reader, ok := rawRequest.(io.Reader); ok {
// If the request body is an io.Reader then stream it directly
body = reader
} else {
// Otherwise convert it to JSON
var (
data []byte
err error
)
if marshaler, ok := rawRequest.(json.Marshaler); ok {
data, err = marshaler.MarshalJSON()
if err != nil {
return nil, internalError.WithErrCode(ErrCodeMarshalRequest,
fmt.Errorf("failed to marshal with MarshalJSON: %w", err))
}
if data == nil {
return nil, internalError.WithErrCode(ErrCodeMarshalRequest,
errors.New("MarshalJSON returned nil data"))
}
} else {
data, err = json.Marshal(rawRequest)
if err != nil {
return nil, internalError.WithErrCode(ErrCodeMarshalRequest,
fmt.Errorf("failed to marshal with json.Marshal: %w", err))
}
}
buf.Write(data)
body = buf
}
if !c.contentEncoding.IsZero() {
// Get the data from the buffer before encoding
var bufData []byte
if buf, ok := body.(*bytes.Buffer); ok {
bufData = buf.Bytes()
encodedBuf, err := c.encoder.Encode(bytes.NewReader(bufData))
if err != nil {
if buf, ok := body.(*bytes.Buffer); ok {
c.bufferPool.Put(buf)
}
return nil, internalError.WithErrCode(ErrCodeMarshalRequest,
fmt.Errorf("failed to encode request body: %w", err))
}
// Return the original buffer to the pool since we have a new one
if buf, ok := body.(*bytes.Buffer); ok {
c.bufferPool.Put(buf)
}
body = encodedBuf
}
}
}
// Create the HTTP request
request, err := http.NewRequestWithContext(ctx, req.method, apiURL.String(), body)
if err != nil {
return nil, fmt.Errorf("unable to create request: %w", err)
}
// adding request headers
if req.contentType != "" {
request.Header.Set("Content-Type", req.contentType)
}
if c.apiKey != "" {
request.Header.Set("Authorization", "Bearer "+c.apiKey)
}
if req.withResponse != nil && !c.contentEncoding.IsZero() {
request.Header.Set("Accept-Encoding", c.contentEncoding.String())
}
if req.withRequest != nil && !c.contentEncoding.IsZero() {
request.Header.Set("Content-Encoding", c.contentEncoding.String())
}
request.Header.Set("User-Agent", GetQualifiedVersion())
resp, err := c.do(request, internalError)
if err != nil {
return nil, err
}
if body != nil {
if buf, ok := body.(*bytes.Buffer); ok {
c.bufferPool.Put(buf)
}
}
return resp, nil
}
func (c *client) do(req *http.Request, internalError *Error) (resp *http.Response, err error) {
retriesCount := uint8(0)
for {
resp, err = c.client.Do(req)
if err != nil {
if errors.Is(err, context.DeadlineExceeded) {
return nil, internalError.WithErrCode(MeilisearchTimeoutError, err)
}
return nil, internalError.WithErrCode(MeilisearchCommunicationError, err)
}
// Exit if retries are disabled
if c.disableRetry {
break
}
// Check if response status is retryable and we haven't exceeded max retries
if c.retryOnStatus[resp.StatusCode] && retriesCount < c.maxRetries {
retriesCount++
// Close response body to prevent memory leaks
resp.Body.Close()
// Handle backoff with context cancellation support
backoff := c.retryBackoff(retriesCount)
timer := time.NewTimer(backoff)
select {
case <-req.Context().Done():
err := req.Context().Err()
timer.Stop()
return nil, internalError.WithErrCode(MeilisearchTimeoutError, err)
case <-timer.C:
// Retry after backoff
timer.Stop()
}
continue
}
break
}
// Return error if retries exceeded the maximum limit
if !c.disableRetry && retriesCount >= c.maxRetries {
return nil, internalError.WithErrCode(MeilisearchMaxRetriesExceeded, nil)
}
return resp, nil
}
func (c *client) handleStatusCode(req *internalRequest, statusCode int, body []byte, internalError *Error) error {
if req.acceptedStatusCodes != nil {
// A successful status code is required so check if the response status code is in the
// expected status code list.
for _, acceptedCode := range req.acceptedStatusCodes {
if statusCode == acceptedCode {
return nil
}
}
internalError.ErrorBody(body)
if internalError.MeilisearchApiError.Code == "" {
return internalError.WithErrCode(MeilisearchApiErrorWithoutMessage)
}
return internalError.WithErrCode(MeilisearchApiError)
}
return nil
}
func (c *client) handleResponse(req *internalRequest, body []byte, internalError *Error) (err error) {
if req.withResponse != nil {
if !c.contentEncoding.IsZero() {
if err := c.encoder.Decode(body, req.withResponse); err != nil {
return internalError.WithErrCode(ErrCodeResponseUnmarshalBody, err)
}
} else {
internalError.ResponseToString = string(body)
if internalError.ResponseToString == nullBody {
req.withResponse = nil
return nil
}
var err error
if resp, ok := req.withResponse.(json.Unmarshaler); ok {
err = resp.UnmarshalJSON(body)
req.withResponse = resp
} else {
err = json.Unmarshal(body, req.withResponse)
}
if err != nil {
return internalError.WithErrCode(ErrCodeResponseUnmarshalBody, err)
}
}
}
return nil
}

146
client_bench_test.go Normal file
View file

@ -0,0 +1,146 @@
package meilisearch
import (
"bytes"
"context"
"encoding/json"
"io"
"net/http"
"net/http/httptest"
"testing"
)
func Benchmark_ExecuteRequest(b *testing.B) {
b.ReportAllocs()
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == http.MethodGet && r.URL.Path == "/test" {
w.WriteHeader(http.StatusOK)
_, _ = w.Write([]byte(`{"message":"get successful"}`))
} else {
w.WriteHeader(http.StatusNotFound)
}
}))
defer ts.Close()
c := newClient(&http.Client{}, ts.URL, "testApiKey", clientConfig{
disableRetry: true,
})
b.ResetTimer()
for i := 0; i < b.N; i++ {
err := c.executeRequest(context.Background(), &internalRequest{
endpoint: "/test",
method: http.MethodGet,
withResponse: &mockResponse{},
acceptedStatusCodes: []int{http.StatusOK},
})
if err != nil {
b.Fatal(err)
}
}
}
func Benchmark_ExecuteRequestWithEncoding(b *testing.B) {
b.ReportAllocs()
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == http.MethodPost && r.URL.Path == "/test" {
accept := r.Header.Get("Accept-Encoding")
ce := r.Header.Get("Content-Encoding")
reqEnc := newEncoding(ContentEncoding(ce), DefaultCompression)
respEnc := newEncoding(ContentEncoding(accept), DefaultCompression)
req := new(mockData)
if len(ce) != 0 {
body, err := io.ReadAll(r.Body)
if err != nil {
b.Fatal(err)
}
err = reqEnc.Decode(body, req)
if err != nil {
b.Fatal(err)
}
}
if len(accept) != 0 {
d, err := json.Marshal(req)
if err != nil {
b.Fatal(err)
}
res, err := respEnc.Encode(bytes.NewReader(d))
if err != nil {
b.Fatal(err)
}
_, _ = w.Write(res.Bytes())
w.WriteHeader(http.StatusOK)
}
} else {
w.WriteHeader(http.StatusNotFound)
}
}))
defer ts.Close()
c := newClient(&http.Client{}, ts.URL, "testApiKey", clientConfig{
disableRetry: true,
contentEncoding: GzipEncoding,
encodingCompressionLevel: DefaultCompression,
})
b.ResetTimer()
for i := 0; i < b.N; i++ {
err := c.executeRequest(context.Background(), &internalRequest{
endpoint: "/test",
method: http.MethodPost,
contentType: contentTypeJSON,
withRequest: &mockData{Name: "foo", Age: 30},
withResponse: &mockData{},
acceptedStatusCodes: []int{http.StatusOK},
})
if err != nil {
b.Fatal(err)
}
}
}
func Benchmark_ExecuteRequestWithoutRetries(b *testing.B) {
b.ReportAllocs()
retryCount := 0
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == http.MethodGet && r.URL.Path == "/test" {
if retryCount == 2 {
w.WriteHeader(http.StatusOK)
return
}
w.WriteHeader(http.StatusBadGateway)
retryCount++
} else {
w.WriteHeader(http.StatusNotFound)
}
}))
defer ts.Close()
c := newClient(&http.Client{}, ts.URL, "testApiKey", clientConfig{
disableRetry: false,
maxRetries: 3,
retryOnStatus: map[int]bool{
502: true,
503: true,
504: true,
},
})
b.ResetTimer()
for i := 0; i < b.N; i++ {
err := c.executeRequest(context.Background(), &internalRequest{
endpoint: "/test",
method: http.MethodGet,
withResponse: nil,
withRequest: nil,
acceptedStatusCodes: []int{http.StatusOK},
})
if err != nil {
b.Fatal(err)
}
}
}

504
client_test.go Normal file
View file

@ -0,0 +1,504 @@
package meilisearch
import (
"bytes"
"context"
"encoding/json"
"errors"
"io"
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/stretchr/testify/require"
)
// Mock structures for testing
type mockResponse struct {
Message string `json:"message"`
}
type mockJsonMarshaller struct {
valid bool
null bool
Foo string `json:"foo"`
Bar string `json:"bar"`
}
// failingEncoder is used to simulate encoder failure
type failingEncoder struct{}
func (fe failingEncoder) Encode(r io.Reader) (*bytes.Buffer, error) {
return nil, errors.New("dummy encoding failure")
}
// Implement Decode method to satisfy the encoder interface, though it won't be used here
func (fe failingEncoder) Decode(b []byte, v interface{}) error {
return errors.New("dummy decode failure")
}
func TestExecuteRequest(t *testing.T) {
retryCount := 0
// Create a mock server
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == http.MethodGet && r.URL.Path == "/test-get" {
w.WriteHeader(http.StatusOK)
_, _ = w.Write([]byte(`{"message":"get successful"}`))
} else if r.Method == http.MethodGet && r.URL.Path == "/test-get-encoding" {
encode := r.Header.Get("Accept-Encoding")
if len(encode) != 0 {
enc := newEncoding(ContentEncoding(encode), DefaultCompression)
d := &mockData{Name: "foo", Age: 30}
b, err := json.Marshal(d)
require.NoError(t, err)
res, err := enc.Encode(bytes.NewReader(b))
require.NoError(t, err)
_, _ = w.Write(res.Bytes())
w.WriteHeader(http.StatusOK)
return
}
_, _ = w.Write([]byte("invalid message"))
w.WriteHeader(http.StatusInternalServerError)
} else if r.Method == http.MethodPost && r.URL.Path == "/test-req-resp-encoding" {
accept := r.Header.Get("Accept-Encoding")
ce := r.Header.Get("Content-Encoding")
reqEnc := newEncoding(ContentEncoding(ce), DefaultCompression)
respEnc := newEncoding(ContentEncoding(accept), DefaultCompression)
req := new(mockData)
if len(ce) != 0 {
b, err := io.ReadAll(r.Body)
require.NoError(t, err)
err = reqEnc.Decode(b, req)
require.NoError(t, err)
}
if len(accept) != 0 {
d, err := json.Marshal(req)
require.NoError(t, err)
res, err := respEnc.Encode(bytes.NewReader(d))
require.NoError(t, err)
_, _ = w.Write(res.Bytes())
w.WriteHeader(http.StatusOK)
}
} else if r.Method == http.MethodPost && r.URL.Path == "/test-post" {
w.WriteHeader(http.StatusCreated)
msg := []byte(`{"message":"post successful"}`)
_, _ = w.Write(msg)
} else if r.Method == http.MethodGet && r.URL.Path == "/test-null-body" {
w.WriteHeader(http.StatusOK)
msg := []byte(`null`)
_, _ = w.Write(msg)
} else if r.Method == http.MethodPost && r.URL.Path == "/test-post-encoding" {
w.WriteHeader(http.StatusCreated)
msg := []byte(`{"message":"post successful"}`)
enc := r.Header.Get("Accept-Encoding")
if len(enc) != 0 {
e := newEncoding(ContentEncoding(enc), DefaultCompression)
b, err := e.Encode(bytes.NewReader(msg))
require.NoError(t, err)
_, _ = w.Write(b.Bytes())
return
}
_, _ = w.Write(msg)
} else if r.URL.Path == "/test-bad-request" {
w.WriteHeader(http.StatusBadRequest)
_, _ = w.Write([]byte(`{"message":"bad request"}`))
} else if r.URL.Path == "/invalid-response-body" {
w.WriteHeader(http.StatusInternalServerError)
_, _ = w.Write([]byte(`{"message":"bad response body"}`))
} else if r.URL.Path == "/io-reader" {
w.WriteHeader(http.StatusOK)
_, _ = w.Write([]byte(`{"message":"io reader"}`))
} else if r.URL.Path == "/failed-retry" {
w.WriteHeader(http.StatusBadGateway)
} else if r.URL.Path == "/success-retry" {
if retryCount == 2 {
w.WriteHeader(http.StatusOK)
return
}
w.WriteHeader(http.StatusBadGateway)
retryCount++
} else if r.URL.Path == "/dummy" {
w.WriteHeader(http.StatusOK)
} else {
w.WriteHeader(http.StatusNotFound)
}
}))
defer ts.Close()
tests := []struct {
name string
internalReq *internalRequest
expectedResp interface{}
contentEncoding ContentEncoding
withTimeout bool
disableRetry bool
wantErr bool
}{
{
name: "Successful GET request",
internalReq: &internalRequest{
endpoint: "/test-get",
method: http.MethodGet,
withResponse: &mockResponse{},
acceptedStatusCodes: []int{http.StatusOK},
},
expectedResp: &mockResponse{Message: "get successful"},
wantErr: false,
},
{
name: "Successful POST request",
internalReq: &internalRequest{
endpoint: "/test-post",
method: http.MethodPost,
withRequest: map[string]string{"key": "value"},
contentType: contentTypeJSON,
withResponse: &mockResponse{},
acceptedStatusCodes: []int{http.StatusCreated},
},
expectedResp: &mockResponse{Message: "post successful"},
wantErr: false,
},
{
name: "404 Not Found",
internalReq: &internalRequest{
endpoint: "/not-found",
method: http.MethodGet,
withResponse: &mockResponse{},
acceptedStatusCodes: []int{http.StatusOK},
},
expectedResp: nil,
wantErr: true,
},
{
name: "Invalid URL",
internalReq: &internalRequest{
endpoint: "/invalid-url$%^*()*#",
method: http.MethodGet,
withResponse: &mockResponse{},
acceptedStatusCodes: []int{http.StatusOK},
},
expectedResp: nil,
wantErr: true,
},
{
name: "Invalid response body",
internalReq: &internalRequest{
endpoint: "/invalid-response-body",
method: http.MethodGet,
withResponse: struct{}{},
acceptedStatusCodes: []int{http.StatusInternalServerError},
},
expectedResp: nil,
wantErr: true,
},
{
name: "Invalid request method",
internalReq: &internalRequest{
endpoint: "/invalid-request-method",
method: http.MethodGet,
withResponse: nil,
withRequest: struct{}{},
acceptedStatusCodes: []int{http.StatusBadRequest},
},
expectedResp: nil,
wantErr: true,
},
{
name: "Invalid request content type",
internalReq: &internalRequest{
endpoint: "/invalid-request-content-type",
method: http.MethodPost,
withResponse: nil,
contentType: "",
withRequest: struct{}{},
acceptedStatusCodes: []int{http.StatusBadRequest},
},
expectedResp: nil,
wantErr: true,
},
{
name: "Invalid json marshaler",
internalReq: &internalRequest{
endpoint: "/invalid-marshaler",
method: http.MethodPost,
withResponse: nil,
withRequest: &mockJsonMarshaller{
valid: false,
},
contentType: "application/json",
},
expectedResp: nil,
wantErr: true,
},
{
name: "Null data marshaler",
internalReq: &internalRequest{
endpoint: "/null-data-marshaler",
method: http.MethodPost,
withResponse: nil,
withRequest: &mockJsonMarshaller{
valid: true,
null: true,
},
contentType: "application/json",
},
expectedResp: nil,
wantErr: true,
},
{
name: "Test null body response",
internalReq: &internalRequest{
endpoint: "/test-null-body",
method: http.MethodGet,
withResponse: make([]byte, 0),
contentType: "application/json",
acceptedStatusCodes: []int{http.StatusOK},
},
expectedResp: nil,
wantErr: false,
},
{
name: "400 Bad Request",
internalReq: &internalRequest{
endpoint: "/test-bad-request",
method: http.MethodGet,
withResponse: &mockResponse{},
acceptedStatusCodes: []int{http.StatusOK},
},
expectedResp: nil,
wantErr: true,
},
{
name: "Test request encoding gzip",
internalReq: &internalRequest{
endpoint: "/test-post-encoding",
method: http.MethodPost,
withRequest: map[string]string{"key": "value"},
contentType: contentTypeJSON,
withResponse: &mockResponse{},
acceptedStatusCodes: []int{http.StatusCreated},
},
expectedResp: &mockResponse{Message: "post successful"},
contentEncoding: GzipEncoding,
wantErr: false,
},
{
name: "Test request encoding deflate",
internalReq: &internalRequest{
endpoint: "/test-post-encoding",
method: http.MethodPost,
withRequest: map[string]string{"key": "value"},
contentType: contentTypeJSON,
withResponse: &mockResponse{},
acceptedStatusCodes: []int{http.StatusCreated},
},
expectedResp: &mockResponse{Message: "post successful"},
contentEncoding: DeflateEncoding,
wantErr: false,
},
{
name: "Test request encoding brotli",
internalReq: &internalRequest{
endpoint: "/test-post-encoding",
method: http.MethodPost,
withRequest: map[string]string{"key": "value"},
contentType: contentTypeJSON,
withResponse: &mockResponse{},
acceptedStatusCodes: []int{http.StatusCreated},
},
expectedResp: &mockResponse{Message: "post successful"},
contentEncoding: BrotliEncoding,
wantErr: false,
},
{
name: "Test response decoding gzip",
internalReq: &internalRequest{
endpoint: "/test-get-encoding",
method: http.MethodGet,
withRequest: nil,
withResponse: &mockData{},
acceptedStatusCodes: []int{http.StatusOK},
},
expectedResp: &mockData{Name: "foo", Age: 30},
contentEncoding: GzipEncoding,
wantErr: false,
},
{
name: "Test response decoding deflate",
internalReq: &internalRequest{
endpoint: "/test-get-encoding",
method: http.MethodGet,
withRequest: nil,
withResponse: &mockData{},
acceptedStatusCodes: []int{http.StatusOK},
},
expectedResp: &mockData{Name: "foo", Age: 30},
contentEncoding: DeflateEncoding,
wantErr: false,
},
{
name: "Test response decoding brotli",
internalReq: &internalRequest{
endpoint: "/test-get-encoding",
method: http.MethodGet,
withRequest: nil,
withResponse: &mockData{},
acceptedStatusCodes: []int{http.StatusOK},
},
expectedResp: &mockData{Name: "foo", Age: 30},
contentEncoding: BrotliEncoding,
wantErr: false,
},
{
name: "Test request and response encoding",
internalReq: &internalRequest{
endpoint: "/test-req-resp-encoding",
method: http.MethodPost,
contentType: contentTypeJSON,
withRequest: &mockData{Name: "foo", Age: 30},
withResponse: &mockData{},
acceptedStatusCodes: []int{http.StatusOK},
},
expectedResp: &mockData{Name: "foo", Age: 30},
contentEncoding: GzipEncoding,
wantErr: false,
},
{
name: "Test successful retries",
internalReq: &internalRequest{
endpoint: "/success-retry",
method: http.MethodGet,
withResponse: nil,
withRequest: nil,
acceptedStatusCodes: []int{http.StatusOK},
},
expectedResp: nil,
wantErr: false,
},
{
name: "Test failed retries",
internalReq: &internalRequest{
endpoint: "/failed-retry",
method: http.MethodGet,
withResponse: nil,
withRequest: nil,
acceptedStatusCodes: []int{http.StatusOK},
},
expectedResp: nil,
wantErr: true,
},
{
name: "Test disable retries",
internalReq: &internalRequest{
endpoint: "/test-get",
method: http.MethodGet,
withResponse: nil,
withRequest: nil,
acceptedStatusCodes: []int{http.StatusOK},
},
expectedResp: nil,
disableRetry: true,
wantErr: false,
},
{
name: "Test request timeout on retries",
internalReq: &internalRequest{
endpoint: "/failed-retry",
method: http.MethodGet,
withResponse: nil,
withRequest: nil,
acceptedStatusCodes: []int{http.StatusOK},
},
expectedResp: nil,
withTimeout: true,
wantErr: true,
},
{
name: "Test request encoding with []byte encode failure",
internalReq: &internalRequest{
endpoint: "/dummy",
method: http.MethodPost,
withRequest: []byte("test data"),
contentType: "text/plain",
acceptedStatusCodes: []int{http.StatusOK},
},
expectedResp: nil,
contentEncoding: GzipEncoding,
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c := newClient(&http.Client{}, ts.URL, "testApiKey", clientConfig{
contentEncoding: tt.contentEncoding,
encodingCompressionLevel: DefaultCompression,
maxRetries: 3,
disableRetry: tt.disableRetry,
retryOnStatus: map[int]bool{
502: true,
503: true,
504: true,
},
})
// For the specific test case, override the encoder to force an error
if tt.name == "Test request encoding with []byte encode failure" {
c.encoder = failingEncoder{}
}
ctx := context.Background()
if tt.withTimeout {
timeoutCtx, cancel := context.WithTimeout(ctx, 1*time.Second)
ctx = timeoutCtx
defer cancel()
}
err := c.executeRequest(ctx, tt.internalReq)
if tt.wantErr {
require.Error(t, err)
} else {
require.NoError(t, err)
require.Equal(t, tt.expectedResp, tt.internalReq.withResponse)
}
})
}
}
func TestNewClientNilRetryOnStatus(t *testing.T) {
c := newClient(&http.Client{}, "", "", clientConfig{
maxRetries: 3,
retryOnStatus: nil,
})
require.NotNil(t, c.retryOnStatus)
}
func (m mockJsonMarshaller) MarshalJSON() ([]byte, error) {
type Alias mockJsonMarshaller
if !m.valid {
return nil, errors.New("mockJsonMarshaller not valid")
}
if m.null {
return nil, nil
}
return json.Marshal(&struct {
Alias
}{
Alias: Alias(m),
})
}

10
codecov.yml Normal file
View file

@ -0,0 +1,10 @@
ignore:
- "types_easyjson.go"
coverage:
range: 60..80
round: down
precision: 2
status:
project:
default:
threshold: 1%

30
doc.go Normal file
View file

@ -0,0 +1,30 @@
// Package meilisearch is the official Meilisearch SDK for the Go programming language.
//
// The meilisearch-go SDK for Go provides APIs and utilities that developers can use to
// build Go applications that use meilisearch service.
//
// See the meilisearch package documentation for more information.
// https://www.meilisearch.com/docs/reference
//
// Example:
//
// meili := New("http://localhost:7700", WithAPIKey("foobar"))
//
// idx := meili.Index("movies")
//
// documents := []map[string]interface{}{
// {"id": 1, "title": "Carol", "genres": []string{"Romance", "Drama"}},
// {"id": 2, "title": "Wonder Woman", "genres": []string{"Action", "Adventure"}},
// {"id": 3, "title": "Life of Pi", "genres": []string{"Adventure", "Drama"}},
// {"id": 4, "title": "Mad Max: Fury Road", "genres": []string{"Adventure", "Science Fiction"}},
// {"id": 5, "title": "Moana", "genres": []string{"Fantasy", "Action"}},
// {"id": 6, "title": "Philadelphia", "genres": []string{"Drama"}},
// }
// task, err := idx.AddDocuments(documents)
// if err != nil {
// fmt.Println(err)
// os.Exit(1)
// }
//
// fmt.Println(task.TaskUID)
package meilisearch

28
docker-compose.yml Normal file
View file

@ -0,0 +1,28 @@
version: "3.8"
volumes:
gopkg:
services:
package:
build: .
tty: true
stdin_open: true
working_dir: /home/package
environment:
- MEILISEARCH_URL=http://meilisearch:7700
depends_on:
- meilisearch
links:
- meilisearch
volumes:
- gopkg:/go/pkg/mod
- ./:/home/package
meilisearch:
image: getmeili/meilisearch:latest
ports:
- "7700"
environment:
- MEILI_MASTER_KEY=masterKey
- MEILI_NO_ANALYTICS=true

223
encoding.go Normal file
View file

@ -0,0 +1,223 @@
package meilisearch
import (
"bytes"
"compress/gzip"
"compress/zlib"
"encoding/json"
"github.com/andybalholm/brotli"
"io"
"sync"
)
type encoder interface {
Encode(rc io.Reader) (*bytes.Buffer, error)
Decode(data []byte, vPtr interface{}) error
}
func newEncoding(ce ContentEncoding, level EncodingCompressionLevel) encoder {
switch ce {
case GzipEncoding:
return &gzipEncoder{
gzWriterPool: &sync.Pool{
New: func() interface{} {
w, err := gzip.NewWriterLevel(io.Discard, level.Int())
return &gzipWriter{
writer: w,
err: err,
}
},
},
bufferPool: &sync.Pool{
New: func() interface{} {
return new(bytes.Buffer)
},
},
}
case DeflateEncoding:
return &flateEncoder{
flWriterPool: &sync.Pool{
New: func() interface{} {
w, err := zlib.NewWriterLevel(io.Discard, level.Int())
return &flateWriter{
writer: w,
err: err,
}
},
},
bufferPool: &sync.Pool{
New: func() interface{} {
return new(bytes.Buffer)
},
},
}
case BrotliEncoding:
return &brotliEncoder{
brWriterPool: &sync.Pool{
New: func() interface{} {
return brotli.NewWriterLevel(io.Discard, level.Int())
},
},
bufferPool: &sync.Pool{
New: func() interface{} {
return new(bytes.Buffer)
},
},
}
default:
return nil
}
}
type gzipEncoder struct {
gzWriterPool *sync.Pool
bufferPool *sync.Pool
}
type gzipWriter struct {
writer *gzip.Writer
err error
}
func (g *gzipEncoder) Encode(rc io.Reader) (*bytes.Buffer, error) {
w := g.gzWriterPool.Get().(*gzipWriter)
defer g.gzWriterPool.Put(w)
if w.err != nil {
return nil, w.err
}
defer func() {
_ = w.writer.Close()
}()
buf := g.bufferPool.Get().(*bytes.Buffer)
defer g.bufferPool.Put(buf)
buf.Reset()
w.writer.Reset(buf)
if _, err := copyZeroAlloc(w.writer, rc); err != nil {
return nil, err
}
return buf, nil
}
func (g *gzipEncoder) Decode(data []byte, vPtr interface{}) error {
r, err := gzip.NewReader(bytes.NewReader(data))
if err != nil {
return err
}
defer func() {
_ = r.Close()
}()
if err := json.NewDecoder(r).Decode(vPtr); err != nil {
return err
}
return nil
}
type flateEncoder struct {
flWriterPool *sync.Pool
bufferPool *sync.Pool
}
type flateWriter struct {
writer *zlib.Writer
err error
}
func (d *flateEncoder) Encode(rc io.Reader) (*bytes.Buffer, error) {
w := d.flWriterPool.Get().(*flateWriter)
defer d.flWriterPool.Put(w)
if w.err != nil {
return nil, w.err
}
defer func() {
_ = w.writer.Close()
}()
buf := d.bufferPool.Get().(*bytes.Buffer)
buf.Reset()
w.writer.Reset(buf)
if _, err := copyZeroAlloc(w.writer, rc); err != nil {
return nil, err
}
return buf, nil
}
func (d *flateEncoder) Decode(data []byte, vPtr interface{}) error {
r, err := zlib.NewReader(bytes.NewBuffer(data))
if err != nil {
return err
}
defer func() {
_ = r.Close()
}()
if err := json.NewDecoder(r).Decode(vPtr); err != nil {
return err
}
return nil
}
type brotliEncoder struct {
brWriterPool *sync.Pool
bufferPool *sync.Pool
}
func (b *brotliEncoder) Encode(rc io.Reader) (*bytes.Buffer, error) {
w := b.brWriterPool.Get().(*brotli.Writer)
defer func() {
_ = w.Close()
b.brWriterPool.Put(w)
}()
buf := b.bufferPool.Get().(*bytes.Buffer)
buf.Reset()
w.Reset(buf)
if _, err := copyZeroAlloc(w, rc); err != nil {
return nil, err
}
return buf, nil
}
func (b *brotliEncoder) Decode(data []byte, vPtr interface{}) error {
r := brotli.NewReader(bytes.NewBuffer(data))
if err := json.NewDecoder(r).Decode(vPtr); err != nil {
return err
}
return nil
}
var copyBufPool = sync.Pool{
New: func() interface{} {
return make([]byte, 4096)
},
}
func copyZeroAlloc(w io.Writer, r io.Reader) (int64, error) {
if wt, ok := r.(io.WriterTo); ok {
return wt.WriteTo(w)
}
if rt, ok := w.(io.ReaderFrom); ok {
return rt.ReadFrom(r)
}
vbuf := copyBufPool.Get()
buf := vbuf.([]byte)
n, err := io.CopyBuffer(w, r, buf)
copyBufPool.Put(vbuf)
return n, err
}

148
encoding_bench_test.go Normal file
View file

@ -0,0 +1,148 @@
package meilisearch
import (
"bytes"
"encoding/json"
"testing"
)
func BenchmarkGzipEncoder(b *testing.B) {
encoder := newEncoding(GzipEncoding, DefaultCompression)
data := bytes.NewReader(make([]byte, 1024*1024)) // 1 MB of data
b.ResetTimer()
b.ReportAllocs()
for i := 0; i < b.N; i++ {
buf, err := encoder.Encode(data)
if err != nil {
b.Fatalf("Encode failed: %v", err)
}
_ = buf
}
}
func BenchmarkDeflateEncoder(b *testing.B) {
encoder := newEncoding(DeflateEncoding, DefaultCompression)
data := bytes.NewReader(make([]byte, 1024*1024)) // 1 MB of data
b.ResetTimer()
b.ReportAllocs()
for i := 0; i < b.N; i++ {
buf, err := encoder.Encode(data)
if err != nil {
b.Fatalf("Encode failed: %v", err)
}
_ = buf
}
}
func BenchmarkBrotliEncoder(b *testing.B) {
encoder := newEncoding(BrotliEncoding, DefaultCompression)
data := bytes.NewReader(make([]byte, 1024*1024)) // 1 MB of data
b.ResetTimer()
b.ReportAllocs()
for i := 0; i < b.N; i++ {
buf, err := encoder.Encode(data)
if err != nil {
b.Fatalf("Encode failed: %v", err)
}
_ = buf
}
}
func BenchmarkGzipDecoder(b *testing.B) {
encoder := newEncoding(GzipEncoding, DefaultCompression)
// Prepare a valid JSON input
data := map[string]interface{}{
"key1": "value1",
"key2": 12345,
"key3": []string{"item1", "item2", "item3"},
}
jsonData, err := json.Marshal(data)
if err != nil {
b.Fatalf("JSON marshal failed: %v", err)
}
// Encode the valid JSON data
input := bytes.NewReader(jsonData)
encoded, err := encoder.Encode(input)
if err != nil {
b.Fatalf("Encode failed: %v", err)
}
b.ResetTimer()
b.ReportAllocs()
for i := 0; i < b.N; i++ {
var result map[string]interface{}
if err := encoder.Decode(encoded.Bytes(), &result); err != nil {
b.Fatalf("Decode failed: %v", err)
}
}
}
func BenchmarkFlateDecoder(b *testing.B) {
encoder := newEncoding(DeflateEncoding, DefaultCompression)
// Prepare valid JSON input
data := map[string]interface{}{
"key1": "value1",
"key2": 12345,
"key3": []string{"item1", "item2", "item3"},
}
jsonData, err := json.Marshal(data)
if err != nil {
b.Fatalf("JSON marshal failed: %v", err)
}
// Encode the valid JSON data
input := bytes.NewReader(jsonData)
encoded, err := encoder.Encode(input)
if err != nil {
b.Fatalf("Encode failed: %v", err)
}
b.ResetTimer()
b.ReportAllocs()
for i := 0; i < b.N; i++ {
var result map[string]interface{}
if err := encoder.Decode(encoded.Bytes(), &result); err != nil {
b.Fatalf("Decode failed: %v", err)
}
}
}
func BenchmarkBrotliDecoder(b *testing.B) {
encoder := newEncoding(BrotliEncoding, DefaultCompression)
// Prepare valid JSON input
data := map[string]interface{}{
"key1": "value1",
"key2": 12345,
"key3": []string{"item1", "item2", "item3"},
}
jsonData, err := json.Marshal(data)
if err != nil {
b.Fatalf("JSON marshal failed: %v", err)
}
// Encode the valid JSON data
input := bytes.NewReader(jsonData)
encoded, err := encoder.Encode(input)
if err != nil {
b.Fatalf("Encode failed: %v", err)
}
b.ResetTimer()
b.ReportAllocs()
for i := 0; i < b.N; i++ {
var result map[string]interface{}
if err := encoder.Decode(encoded.Bytes(), &result); err != nil {
b.Fatalf("Decode failed: %v", err)
}
}
}

380
encoding_test.go Normal file
View file

@ -0,0 +1,380 @@
package meilisearch
import (
"bytes"
"compress/flate"
"compress/gzip"
"compress/zlib"
"encoding/json"
"errors"
"github.com/andybalholm/brotli"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"io"
"strings"
"sync"
"testing"
)
type mockData struct {
Name string
Age int
}
type errorWriter struct{}
func (e *errorWriter) Write(p []byte) (int, error) {
return 0, errors.New("write error")
}
type errorReader struct{}
func (e *errorReader) Read(p []byte) (int, error) {
return 0, errors.New("read error")
}
func Test_Encode_ErrorOnNewWriter(t *testing.T) {
g := &gzipEncoder{
gzWriterPool: &sync.Pool{
New: func() interface{} {
return &gzipWriter{
writer: nil,
err: errors.New("new writer error"),
}
},
},
bufferPool: &sync.Pool{
New: func() interface{} {
return new(bytes.Buffer)
},
},
}
d := &flateEncoder{
flWriterPool: &sync.Pool{
New: func() interface{} {
return &flateWriter{
writer: nil,
err: errors.New("new writer error"),
}
},
},
bufferPool: &sync.Pool{
New: func() interface{} {
return new(bytes.Buffer)
},
},
}
_, err := g.Encode(bytes.NewReader([]byte("test")))
require.Error(t, err)
_, err = d.Encode(bytes.NewReader([]byte("test")))
require.Error(t, err)
}
func Test_Encode_ErrorInCopyZeroAlloc(t *testing.T) {
g := &gzipEncoder{
gzWriterPool: &sync.Pool{
New: func() interface{} {
w, _ := gzip.NewWriterLevel(io.Discard, gzip.DefaultCompression)
return &gzipWriter{
writer: w,
err: nil,
}
},
},
bufferPool: &sync.Pool{
New: func() interface{} {
return new(bytes.Buffer)
},
},
}
d := &flateEncoder{
flWriterPool: &sync.Pool{
New: func() interface{} {
w, err := zlib.NewWriterLevel(io.Discard, flate.DefaultCompression)
return &flateWriter{
writer: w,
err: err,
}
},
},
bufferPool: &sync.Pool{
New: func() interface{} {
return new(bytes.Buffer)
},
},
}
b := &brotliEncoder{
brWriterPool: &sync.Pool{
New: func() interface{} {
return brotli.NewWriterLevel(io.Discard, brotli.DefaultCompression)
},
},
bufferPool: &sync.Pool{
New: func() interface{} {
return new(bytes.Buffer)
},
},
}
_, err := g.Encode(&errorReader{})
require.Error(t, err)
_, err = d.Encode(&errorReader{})
require.Error(t, err)
_, err = b.Encode(&errorReader{})
require.Error(t, err)
}
func Test_InvalidContentType(t *testing.T) {
enc := newEncoding("invalid", DefaultCompression)
require.Nil(t, enc)
}
func TestGzipEncoder(t *testing.T) {
encoder := newEncoding(GzipEncoding, DefaultCompression)
assert.NotNil(t, encoder, "gzip encoder should not be nil")
original := &mockData{Name: "John Doe", Age: 30}
originalJSON, err := json.Marshal(original)
assert.NoError(t, err, "marshalling original data should not produce an error")
readCloser := io.NopCloser(bytes.NewReader(originalJSON))
encodedData, err := encoder.Encode(readCloser)
assert.NoError(t, err, "encoding should not produce an error")
assert.NotNil(t, encodedData, "encoded data should not be nil")
var decoded mockData
err = encoder.Decode(encodedData.Bytes(), &decoded)
assert.NoError(t, err, "decoding should not produce an error")
assert.Equal(t, original, &decoded, "decoded data should match the original")
var invalidType int
err = encoder.Decode(encodedData.Bytes(), &invalidType)
assert.Error(t, err)
}
func TestDeflateEncoder(t *testing.T) {
encoder := newEncoding(DeflateEncoding, DefaultCompression)
assert.NotNil(t, encoder, "deflate encoder should not be nil")
original := &mockData{Name: "Jane Doe", Age: 25}
originalJSON, err := json.Marshal(original)
assert.NoError(t, err, "marshalling original data should not produce an error")
readCloser := io.NopCloser(bytes.NewReader(originalJSON))
encodedData, err := encoder.Encode(readCloser)
assert.NoError(t, err, "encoding should not produce an error")
assert.NotNil(t, encodedData, "encoded data should not be nil")
var decoded mockData
err = encoder.Decode(encodedData.Bytes(), &decoded)
assert.NoError(t, err, "decoding should not produce an error")
assert.Equal(t, original, &decoded, "decoded data should match the original")
var invalidType int
err = encoder.Decode(encodedData.Bytes(), &invalidType)
assert.Error(t, err)
}
func TestBrotliEncoder(t *testing.T) {
encoder := newEncoding(BrotliEncoding, DefaultCompression)
assert.NotNil(t, encoder, "brotli encoder should not be nil")
original := &mockData{Name: "Jane Doe", Age: 25}
originalJSON, err := json.Marshal(original)
assert.NoError(t, err, "marshalling original data should not produce an error")
readCloser := io.NopCloser(bytes.NewReader(originalJSON))
encodedData, err := encoder.Encode(readCloser)
assert.NoError(t, err, "encoding should not produce an error")
assert.NotNil(t, encodedData, "encoded data should not be nil")
var decoded mockData
err = encoder.Decode(encodedData.Bytes(), &decoded)
assert.NoError(t, err, "decoding should not produce an error")
assert.Equal(t, original, &decoded, "decoded data should match the original")
var invalidType int
err = encoder.Decode(encodedData.Bytes(), &invalidType)
assert.Error(t, err)
}
func TestGzipEncoder_EmptyData(t *testing.T) {
encoder := newEncoding(GzipEncoding, DefaultCompression)
assert.NotNil(t, encoder, "gzip encoder should not be nil")
original := &mockData{}
originalJSON, err := json.Marshal(original)
assert.NoError(t, err, "marshalling original data should not produce an error")
readCloser := io.NopCloser(bytes.NewReader(originalJSON))
encodedData, err := encoder.Encode(readCloser)
assert.NoError(t, err, "encoding should not produce an error")
assert.NotNil(t, encodedData, "encoded data should not be nil")
var decoded mockData
err = encoder.Decode(encodedData.Bytes(), &decoded)
assert.NoError(t, err, "decoding should not produce an error")
assert.Equal(t, original, &decoded, "decoded data should match the original")
}
func TestDeflateEncoder_EmptyData(t *testing.T) {
encoder := newEncoding(DeflateEncoding, DefaultCompression)
assert.NotNil(t, encoder, "deflate encoder should not be nil")
original := &mockData{}
originalJSON, err := json.Marshal(original)
assert.NoError(t, err, "marshalling original data should not produce an error")
readCloser := io.NopCloser(bytes.NewReader(originalJSON))
encodedData, err := encoder.Encode(readCloser)
assert.NoError(t, err, "encoding should not produce an error")
assert.NotNil(t, encodedData, "encoded data should not be nil")
var decoded mockData
err = encoder.Decode(encodedData.Bytes(), &decoded)
assert.NoError(t, err, "decoding should not produce an error")
assert.Equal(t, original, &decoded, "decoded data should match the original")
}
func TestBrotliEncoder_EmptyData(t *testing.T) {
encoder := newEncoding(BrotliEncoding, DefaultCompression)
assert.NotNil(t, encoder, "brotli encoder should not be nil")
original := &mockData{}
originalJSON, err := json.Marshal(original)
assert.NoError(t, err, "marshalling original data should not produce an error")
readCloser := io.NopCloser(bytes.NewReader(originalJSON))
encodedData, err := encoder.Encode(readCloser)
assert.NoError(t, err, "encoding should not produce an error")
assert.NotNil(t, encodedData, "encoded data should not be nil")
var decoded mockData
err = encoder.Decode(encodedData.Bytes(), &decoded)
assert.NoError(t, err, "decoding should not produce an error")
assert.Equal(t, original, &decoded, "decoded data should match the original")
}
func TestGzipEncoder_InvalidData(t *testing.T) {
encoder := newEncoding(GzipEncoding, DefaultCompression)
assert.NotNil(t, encoder, "gzip encoder should not be nil")
var decoded mockData
err := encoder.Decode([]byte("invalid data"), &decoded)
assert.Error(t, err, "decoding invalid data should produce an error")
}
func TestDeflateEncoder_InvalidData(t *testing.T) {
encoder := newEncoding(DeflateEncoding, DefaultCompression)
assert.NotNil(t, encoder, "deflate encoder should not be nil")
var decoded mockData
err := encoder.Decode([]byte("invalid data"), &decoded)
assert.Error(t, err, "decoding invalid data should produce an error")
}
func TestBrotliEncoder_InvalidData(t *testing.T) {
encoder := newEncoding(BrotliEncoding, DefaultCompression)
assert.NotNil(t, encoder, "brotli encoder should not be nil")
var decoded mockData
err := encoder.Decode([]byte("invalid data"), &decoded)
assert.Error(t, err, "decoding invalid data should produce an error")
}
func TestCopyZeroAlloc(t *testing.T) {
t.Run("RegularCopy", func(t *testing.T) {
src := strings.NewReader("hello world")
dst := &bytes.Buffer{}
n, err := copyZeroAlloc(dst, src)
assert.NoError(t, err, "copy should not produce an error")
assert.Equal(t, int64(11), n, "copy length should be 11")
assert.Equal(t, "hello world", dst.String(), "destination should contain the copied data")
})
t.Run("EmptySource", func(t *testing.T) {
src := strings.NewReader("")
dst := &bytes.Buffer{}
n, err := copyZeroAlloc(dst, src)
assert.NoError(t, err, "copy should not produce an error")
assert.Equal(t, int64(0), n, "copy length should be 0")
assert.Equal(t, "", dst.String(), "destination should be empty")
})
t.Run("LargeDataCopy", func(t *testing.T) {
data := strings.Repeat("a", 10000)
src := strings.NewReader(data)
dst := &bytes.Buffer{}
n, err := copyZeroAlloc(dst, src)
assert.NoError(t, err, "copy should not produce an error")
assert.Equal(t, int64(len(data)), n, "copy length should match the source data length")
assert.Equal(t, data, dst.String(), "destination should contain the copied data")
})
t.Run("ErrorOnWrite", func(t *testing.T) {
src := strings.NewReader("hello world")
dst := &errorWriter{}
n, err := copyZeroAlloc(dst, src)
assert.Error(t, err, "copy should produce an error")
assert.Equal(t, int64(0), n, "copy length should be 0 due to the error")
assert.Equal(t, "write error", err.Error(), "error should match expected error")
})
t.Run("ErrorOnRead", func(t *testing.T) {
src := &errorReader{}
dst := &bytes.Buffer{}
n, err := copyZeroAlloc(dst, src)
assert.Error(t, err, "copy should produce an error")
assert.Equal(t, int64(0), n, "copy length should be 0 due to the error")
assert.Equal(t, "read error", err.Error(), "error should match expected error")
})
t.Run("ConcurrentAccess", func(t *testing.T) {
var wg sync.WaitGroup
data := "concurrent data"
var mu sync.Mutex
dst := &bytes.Buffer{}
for i := 0; i < 10; i++ {
wg.Add(1)
go func() {
defer wg.Done()
src := strings.NewReader(data) // each goroutine gets its own reader
buf := &bytes.Buffer{} // each goroutine uses a separate buffer
_, _ = copyZeroAlloc(buf, src)
mu.Lock()
defer mu.Unlock()
dst.Write(buf.Bytes()) // safely combine results
}()
}
wg.Wait()
mu.Lock()
assert.Equal(t, strings.Repeat(data, 10), dst.String(), "destination should contain the copied data")
mu.Unlock()
})
}

189
error.go Normal file
View file

@ -0,0 +1,189 @@
package meilisearch
import (
"encoding/json"
"errors"
"fmt"
"strings"
)
// ErrCode are all possible errors found during requests
type ErrCode int
const (
// ErrCodeUnknown default error code, undefined
ErrCodeUnknown ErrCode = 0
// ErrCodeMarshalRequest impossible to serialize request body
ErrCodeMarshalRequest ErrCode = iota + 1
// ErrCodeResponseUnmarshalBody impossible deserialize the response body
ErrCodeResponseUnmarshalBody
// MeilisearchApiError send by the meilisearch api
MeilisearchApiError
// MeilisearchApiErrorWithoutMessage MeilisearchApiError send by the meilisearch api
MeilisearchApiErrorWithoutMessage
// MeilisearchTimeoutError
MeilisearchTimeoutError
// MeilisearchCommunicationError impossible execute a request
MeilisearchCommunicationError
// MeilisearchMaxRetriesExceeded used max retries and exceeded
MeilisearchMaxRetriesExceeded
)
const (
rawStringCtx = `(path "${method} ${endpoint}" with method "${function}")`
rawStringMarshalRequest = `unable to marshal body from request: '${request}'`
rawStringResponseUnmarshalBody = `unable to unmarshal body from response: '${response}' status code: ${statusCode}`
rawStringMeilisearchApiError = `unaccepted status code found: ${statusCode} expected: ${statusCodeExpected}, MeilisearchApiError Message: ${message}, Code: ${code}, Type: ${type}, Link: ${link}`
rawStringMeilisearchApiErrorWithoutMessage = `unaccepted status code found: ${statusCode} expected: ${statusCodeExpected}, MeilisearchApiError Message: ${message}`
rawStringMeilisearchTimeoutError = `MeilisearchTimeoutError`
rawStringMeilisearchCommunicationError = `MeilisearchCommunicationError unable to execute request`
rawStringMeilisearchMaxRetriesExceeded = "failed to request and max retries exceeded"
)
func (e ErrCode) rawMessage() string {
switch e {
case ErrCodeMarshalRequest:
return rawStringMarshalRequest + " " + rawStringCtx
case ErrCodeResponseUnmarshalBody:
return rawStringResponseUnmarshalBody + " " + rawStringCtx
case MeilisearchApiError:
return rawStringMeilisearchApiError + " " + rawStringCtx
case MeilisearchApiErrorWithoutMessage:
return rawStringMeilisearchApiErrorWithoutMessage + " " + rawStringCtx
case MeilisearchTimeoutError:
return rawStringMeilisearchTimeoutError + " " + rawStringCtx
case MeilisearchCommunicationError:
return rawStringMeilisearchCommunicationError + " " + rawStringCtx
case MeilisearchMaxRetriesExceeded:
return rawStringMeilisearchMaxRetriesExceeded + " " + rawStringCtx
default:
return rawStringCtx
}
}
type meilisearchApiError struct {
Message string `json:"message"`
Code string `json:"code"`
Type string `json:"type"`
Link string `json:"link"`
}
// Error is the internal error structure that all exposed method use.
// So ALL errors returned by this library can be cast to this struct (as a pointer)
type Error struct {
// Endpoint is the path of the request (host is not in)
Endpoint string
// Method is the HTTP verb of the request
Method string
// Function name used
Function string
// RequestToString is the raw request into string ('empty request' if not present)
RequestToString string
// RequestToString is the raw request into string ('empty response' if not present)
ResponseToString string
// Error info from meilisearch api
// Message is the raw request into string ('empty meilisearch message' if not present)
MeilisearchApiError meilisearchApiError
// StatusCode of the request
StatusCode int
// StatusCode expected by the endpoint to be considered as a success
StatusCodeExpected []int
rawMessage string
// OriginError is the origin error that produce the current Error. It can be nil in case of a bad status code.
OriginError error
// ErrCode is the internal error code that represent the different step when executing a request that can produce
// an error.
ErrCode ErrCode
encoder
}
// Error return a well human formatted message.
func (e *Error) Error() string {
message := namedSprintf(e.rawMessage, map[string]interface{}{
"endpoint": e.Endpoint,
"method": e.Method,
"function": e.Function,
"request": e.RequestToString,
"response": e.ResponseToString,
"statusCodeExpected": e.StatusCodeExpected,
"statusCode": e.StatusCode,
"message": e.MeilisearchApiError.Message,
"code": e.MeilisearchApiError.Code,
"type": e.MeilisearchApiError.Type,
"link": e.MeilisearchApiError.Link,
})
if e.OriginError != nil {
return fmt.Sprintf("%s: %s", message, e.OriginError.Error())
}
return message
}
// WithErrCode add an error code to an error
func (e *Error) WithErrCode(err ErrCode, errs ...error) *Error {
if errs != nil {
e.OriginError = errs[0]
}
e.rawMessage = err.rawMessage()
e.ErrCode = err
return e
}
// ErrorBody add a body to an error
func (e *Error) ErrorBody(body []byte) {
msg := meilisearchApiError{}
if e.encoder != nil {
err := e.encoder.Decode(body, &msg)
if err == nil {
e.MeilisearchApiError.Message = msg.Message
e.MeilisearchApiError.Code = msg.Code
e.MeilisearchApiError.Type = msg.Type
e.MeilisearchApiError.Link = msg.Link
}
return
}
e.ResponseToString = string(body)
err := json.Unmarshal(body, &msg)
if err == nil {
e.MeilisearchApiError.Message = msg.Message
e.MeilisearchApiError.Code = msg.Code
e.MeilisearchApiError.Type = msg.Type
e.MeilisearchApiError.Link = msg.Link
}
}
// VersionErrorHintMessage a hint to the error message if it may come from a version incompatibility with meilisearch
func VersionErrorHintMessage(err error, req *internalRequest) error {
return fmt.Errorf("%w. Hint: It might not be working because you're not up to date with the "+
"Meilisearch version that %s call requires", err, req.functionName)
}
func namedSprintf(format string, params map[string]interface{}) string {
for key, val := range params {
format = strings.ReplaceAll(format, "${"+key+"}", fmt.Sprintf("%v", val))
}
return format
}
// General errors
var (
ErrInvalidRequestMethod = errors.New("request body is not expected for GET and HEAD requests")
ErrRequestBodyWithoutContentType = errors.New("request body without Content-Type is not allowed")
ErrNoSearchRequest = errors.New("no search request provided")
ErrNoFacetSearchRequest = errors.New("no search facet request provided")
ErrConnectingFailed = errors.New("meilisearch is not connected")
)

49
error_test.go Normal file
View file

@ -0,0 +1,49 @@
package meilisearch
import (
"fmt"
"net/http"
"testing"
"github.com/stretchr/testify/require"
)
func TestError_VersionErrorHintMessage(t *testing.T) {
type args struct {
request *internalRequest
mockedError error
}
tests := []struct {
name string
args args
}{
{
name: "VersionErrorHintMessageGetDocument",
args: args{
request: &internalRequest{
functionName: "GetDocuments",
},
mockedError: &Error{
Endpoint: "endpointForGetDocuments",
Method: http.MethodPost,
Function: "GetDocuments",
RequestToString: "empty request",
ResponseToString: "empty response",
MeilisearchApiError: meilisearchApiError{
Message: "empty Meilisearch message",
},
StatusCode: 1,
rawMessage: "empty message",
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := VersionErrorHintMessage(tt.args.mockedError, tt.args.request)
require.Error(t, err)
fmt.Println(err)
require.Equal(t, tt.args.mockedError.Error()+". Hint: It might not be working because you're not up to date with the Meilisearch version that "+tt.args.request.functionName+" call requires", err.Error())
})
}
}

47
example_test.go Normal file
View file

@ -0,0 +1,47 @@
package meilisearch
import (
"fmt"
"os"
)
func ExampleNew() {
// WithAPIKey is optional
meili := New("http://localhost:7700", WithAPIKey("foobar"))
// An index is where the documents are stored.
idx := meili.Index("movies")
// If the index 'movies' does not exist, Meilisearch creates it when you first add the documents.
documents := []map[string]interface{}{
{"id": 1, "title": "Carol", "genres": []string{"Romance", "Drama"}},
{"id": 2, "title": "Wonder Woman", "genres": []string{"Action", "Adventure"}},
{"id": 3, "title": "Life of Pi", "genres": []string{"Adventure", "Drama"}},
{"id": 4, "title": "Mad Max: Fury Road", "genres": []string{"Adventure", "Science Fiction"}},
{"id": 5, "title": "Moana", "genres": []string{"Fantasy", "Action"}},
{"id": 6, "title": "Philadelphia", "genres": []string{"Drama"}},
}
task, err := idx.AddDocuments(documents)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
fmt.Println(task.TaskUID)
}
func ExampleConnect() {
meili, err := Connect("http://localhost:7700", WithAPIKey("foobar"))
if err != nil {
fmt.Println(err)
return
}
ver, err := meili.Version()
if err != nil {
fmt.Println(err)
return
}
fmt.Println(ver.PkgVersion)
}

87
features.go Normal file
View file

@ -0,0 +1,87 @@
package meilisearch
import (
"context"
"net/http"
)
// Type for experimental features with additional client field
type ExperimentalFeatures struct {
client *client
ExperimentalFeaturesBase
}
func (m *meilisearch) ExperimentalFeatures() *ExperimentalFeatures {
return &ExperimentalFeatures{client: m.client}
}
func (ef *ExperimentalFeatures) SetLogsRoute(logsRoute bool) *ExperimentalFeatures {
ef.LogsRoute = &logsRoute
return ef
}
func (ef *ExperimentalFeatures) SetMetrics(metrics bool) *ExperimentalFeatures {
ef.Metrics = &metrics
return ef
}
func (ef *ExperimentalFeatures) SetEditDocumentsByFunction(editDocumentsByFunction bool) *ExperimentalFeatures {
ef.EditDocumentsByFunction = &editDocumentsByFunction
return ef
}
func (ef *ExperimentalFeatures) SetContainsFilter(containsFilter bool) *ExperimentalFeatures {
ef.ContainsFilter = &containsFilter
return ef
}
func (ef *ExperimentalFeatures) Get() (*ExperimentalFeaturesResult, error) {
return ef.GetWithContext(context.Background())
}
func (ef *ExperimentalFeatures) GetWithContext(ctx context.Context) (*ExperimentalFeaturesResult, error) {
resp := new(ExperimentalFeaturesResult)
req := &internalRequest{
endpoint: "/experimental-features",
method: http.MethodGet,
withRequest: nil,
withResponse: &resp,
withQueryParams: map[string]string{},
acceptedStatusCodes: []int{http.StatusOK},
functionName: "GetExperimentalFeatures",
}
if err := ef.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (ef *ExperimentalFeatures) Update() (*ExperimentalFeaturesResult, error) {
return ef.UpdateWithContext(context.Background())
}
func (ef *ExperimentalFeatures) UpdateWithContext(ctx context.Context) (*ExperimentalFeaturesResult, error) {
request := ExperimentalFeaturesBase{
LogsRoute: ef.LogsRoute,
Metrics: ef.Metrics,
EditDocumentsByFunction: ef.EditDocumentsByFunction,
ContainsFilter: ef.ContainsFilter,
}
resp := new(ExperimentalFeaturesResult)
req := &internalRequest{
endpoint: "/experimental-features",
method: http.MethodPatch,
contentType: contentTypeJSON,
withRequest: request,
withResponse: resp,
withQueryParams: nil,
acceptedStatusCodes: []int{http.StatusOK},
functionName: "UpdateExperimentalFeatures",
}
if err := ef.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}

75
features_test.go Normal file
View file

@ -0,0 +1,75 @@
package meilisearch
import (
"crypto/tls"
"testing"
"github.com/stretchr/testify/require"
)
func TestGet_ExperimentalFeatures(t *testing.T) {
sv := setup(t, "")
customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{
InsecureSkipVerify: true,
}))
tests := []struct {
name string
client ServiceManager
}{
{
name: "TestGetStats",
client: sv,
},
{
name: "TestGetStatsWithCustomClient",
client: customSv,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ef := tt.client.ExperimentalFeatures()
gotResp, err := ef.Get()
require.NoError(t, err)
require.NotNil(t, gotResp, "ExperimentalFeatures.Get() should not return nil value")
})
}
}
func TestUpdate_ExperimentalFeatures(t *testing.T) {
sv := setup(t, "")
customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{
InsecureSkipVerify: true,
}))
tests := []struct {
name string
client ServiceManager
}{
{
name: "TestUpdateStats",
client: sv,
},
{
name: "TestUpdateStatsWithCustomClient",
client: customSv,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ef := tt.client.ExperimentalFeatures()
ef.SetLogsRoute(true)
ef.SetMetrics(true)
ef.SetEditDocumentsByFunction(true)
ef.SetContainsFilter(true)
gotResp, err := ef.Update()
require.NoError(t, err)
require.Equal(t, true, gotResp.LogsRoute, "ExperimentalFeatures.Update() should return logsRoute as true")
require.Equal(t, true, gotResp.Metrics, "ExperimentalFeatures.Update() should return metrics as true")
require.Equal(t, true, gotResp.EditDocumentsByFunction, "ExperimentalFeatures.Update() should return editDocumentsByFunction as true")
require.Equal(t, true, gotResp.ContainsFilter, "ExperimentalFeatures.Update() should return containsFilter as true")
})
}
}

10
go.mod Normal file
View file

@ -0,0 +1,10 @@
module github.com/meilisearch/meilisearch-go
go 1.16
require (
github.com/andybalholm/brotli v1.1.1
github.com/golang-jwt/jwt/v4 v4.5.1
github.com/mailru/easyjson v0.9.0
github.com/stretchr/testify v1.8.2
)

27
go.sum Normal file
View file

@ -0,0 +1,27 @@
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/golang-jwt/jwt/v4 v4.5.1 h1:JdqV9zKUdtaa9gdPlywC3aeoEsR681PlKC+4F5gQgeo=
github.com/golang-jwt/jwt/v4 v4.5.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/mailru/easyjson v0.9.0 h1:PrnmzHw7262yW8sTBwxi1PdJA3Iw/EKBa8psRf7d9a4=
github.com/mailru/easyjson v0.9.0/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8=
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

136
helper.go Normal file
View file

@ -0,0 +1,136 @@
package meilisearch
import (
"bytes"
"context"
"encoding/csv"
"encoding/json"
"fmt"
"net/url"
"regexp"
"strconv"
"strings"
"time"
)
func IsValidUUID(uuid string) bool {
r := regexp.MustCompile("^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-4[a-fA-F0-9]{3}-[8|9|aA|bB][a-fA-F0-9]{3}-[a-fA-F0-9]{12}$")
return r.MatchString(uuid)
}
// This function allows the user to create a Key with an ExpiresAt in time.Time
// and transform the Key structure into a KeyParsed structure to send the time format
// managed by meilisearch
func convertKeyToParsedKey(key Key) (resp KeyParsed) {
resp = KeyParsed{
Name: key.Name,
Description: key.Description,
UID: key.UID,
Actions: key.Actions,
Indexes: key.Indexes,
}
// Convert time.Time to *string to feat the exact ISO-8601
// format of meilisearch
if !key.ExpiresAt.IsZero() {
resp.ExpiresAt = formatDate(key.ExpiresAt, true)
}
return resp
}
func encodeTasksQuery(param *TasksQuery, req *internalRequest) {
if param.Limit != 0 {
req.withQueryParams["limit"] = strconv.FormatInt(param.Limit, 10)
}
if param.From != 0 {
req.withQueryParams["from"] = strconv.FormatInt(param.From, 10)
}
if len(param.Statuses) != 0 {
var statuses []string
for _, status := range param.Statuses {
statuses = append(statuses, string(status))
}
req.withQueryParams["statuses"] = strings.Join(statuses, ",")
}
if len(param.Types) != 0 {
var types []string
for _, t := range param.Types {
types = append(types, string(t))
}
req.withQueryParams["types"] = strings.Join(types, ",")
}
if len(param.IndexUIDS) != 0 {
req.withQueryParams["indexUids"] = strings.Join(param.IndexUIDS, ",")
}
if len(param.UIDS) != 0 {
req.withQueryParams["uids"] = strings.Trim(strings.Join(strings.Fields(fmt.Sprint(param.UIDS)), ","), "[]")
}
if len(param.CanceledBy) != 0 {
req.withQueryParams["canceledBy"] = strings.Trim(strings.Join(strings.Fields(fmt.Sprint(param.CanceledBy)), ","), "[]")
}
if !param.BeforeEnqueuedAt.IsZero() {
req.withQueryParams["beforeEnqueuedAt"] = *formatDate(param.BeforeEnqueuedAt, false)
}
if !param.AfterEnqueuedAt.IsZero() {
req.withQueryParams["afterEnqueuedAt"] = *formatDate(param.AfterEnqueuedAt, false)
}
if !param.BeforeStartedAt.IsZero() {
req.withQueryParams["beforeStartedAt"] = *formatDate(param.BeforeStartedAt, false)
}
if !param.AfterStartedAt.IsZero() {
req.withQueryParams["afterStartedAt"] = *formatDate(param.AfterStartedAt, false)
}
if !param.BeforeFinishedAt.IsZero() {
req.withQueryParams["beforeFinishedAt"] = *formatDate(param.BeforeFinishedAt, false)
}
if !param.AfterFinishedAt.IsZero() {
req.withQueryParams["afterFinishedAt"] = *formatDate(param.AfterFinishedAt, false)
}
}
func formatDate(date time.Time, _ bool) *string {
const format = "2006-01-02T15:04:05Z"
timeParsedToString := date.Format(format)
return &timeParsedToString
}
func transformStringVariadicToMap(primaryKey ...string) (options map[string]string) {
if primaryKey != nil {
return map[string]string{
"primaryKey": primaryKey[0],
}
}
return nil
}
func transformCsvDocumentsQueryToMap(options *CsvDocumentsQuery) map[string]string {
var optionsMap map[string]string
data, _ := json.Marshal(options)
_ = json.Unmarshal(data, &optionsMap)
return optionsMap
}
func generateQueryForOptions(options map[string]string) (urlQuery string) {
q := url.Values{}
for key, val := range options {
q.Add(key, val)
}
return q.Encode()
}
func sendCsvRecords(ctx context.Context, documentsCsvFunc func(ctx context.Context, recs []byte, op *CsvDocumentsQuery) (resp *TaskInfo, err error), records [][]string, options *CsvDocumentsQuery) (*TaskInfo, error) {
b := new(bytes.Buffer)
w := csv.NewWriter(b)
w.UseCRLF = true
err := w.WriteAll(records)
if err != nil {
return nil, fmt.Errorf("could not write CSV records: %w", err)
}
resp, err := documentsCsvFunc(ctx, b.Bytes(), options)
if err != nil {
return nil, err
}
return resp, nil
}

118
helper_test.go Normal file
View file

@ -0,0 +1,118 @@
package meilisearch
import (
"net/url"
"reflect"
"strconv"
"testing"
"time"
)
func (req *internalRequest) init() {
req.withQueryParams = make(map[string]string)
}
func formatDateForComparison(date time.Time) string {
const format = "2006-01-02T15:04:05Z"
return date.Format(format)
}
func TestConvertKeyToParsedKey(t *testing.T) {
key := Key{
Name: "test",
Description: "test description",
UID: "123",
Actions: []string{"read", "write"},
Indexes: []string{"index1", "index2"},
ExpiresAt: time.Now(),
}
expectedExpiresAt := formatDateForComparison(key.ExpiresAt)
parsedKey := convertKeyToParsedKey(key)
if parsedKey.Name != key.Name ||
parsedKey.Description != key.Description ||
parsedKey.UID != key.UID ||
!reflect.DeepEqual(parsedKey.Actions, key.Actions) ||
!reflect.DeepEqual(parsedKey.Indexes, key.Indexes) ||
parsedKey.ExpiresAt == nil || *parsedKey.ExpiresAt != expectedExpiresAt {
t.Errorf("convertKeyToParsedKey(%v) = %v; want %v", key, parsedKey, key)
}
}
func TestEncodeTasksQuery(t *testing.T) {
param := &TasksQuery{
Limit: 10,
From: 5,
Statuses: []TaskStatus{"queued", "running"},
Types: []TaskType{"type1", "type2"},
IndexUIDS: []string{"uid1", "uid2"},
UIDS: []int64{1, 2, 3},
CanceledBy: []int64{4, 5},
BeforeEnqueuedAt: time.Now().Add(-10 * time.Hour),
AfterEnqueuedAt: time.Now().Add(-20 * time.Hour),
BeforeStartedAt: time.Now().Add(-30 * time.Hour),
AfterStartedAt: time.Now().Add(-40 * time.Hour),
BeforeFinishedAt: time.Now().Add(-50 * time.Hour),
AfterFinishedAt: time.Now().Add(-60 * time.Hour),
}
req := &internalRequest{}
req.init()
encodeTasksQuery(param, req)
expectedParams := map[string]string{
"limit": strconv.FormatInt(param.Limit, 10),
"from": strconv.FormatInt(param.From, 10),
"statuses": "queued,running",
"types": "type1,type2",
"indexUids": "uid1,uid2",
"uids": "1,2,3",
"canceledBy": "4,5",
"beforeEnqueuedAt": formatDateForComparison(param.BeforeEnqueuedAt),
"afterEnqueuedAt": formatDateForComparison(param.AfterEnqueuedAt),
"beforeStartedAt": formatDateForComparison(param.BeforeStartedAt),
"afterStartedAt": formatDateForComparison(param.AfterStartedAt),
"beforeFinishedAt": formatDateForComparison(param.BeforeFinishedAt),
"afterFinishedAt": formatDateForComparison(param.AfterFinishedAt),
}
for k, v := range expectedParams {
if req.withQueryParams[k] != v {
t.Errorf("encodeTasksQuery() param %v = %v; want %v", k, req.withQueryParams[k], v)
}
}
}
func TestTransformStringVariadicToMap(t *testing.T) {
tests := []struct {
input []string
expect map[string]string
}{
{[]string{"primaryKey1"}, map[string]string{"primaryKey": "primaryKey1"}},
{nil, nil},
}
for _, test := range tests {
result := transformStringVariadicToMap(test.input...)
if !reflect.DeepEqual(result, test.expect) {
t.Errorf("transformStringVariadicToMap(%v) = %v; want %v", test.input, result, test.expect)
}
}
}
func TestGenerateQueryForOptions(t *testing.T) {
options := map[string]string{
"key1": "value1",
"key2": "value2",
}
expected := url.Values{}
expected.Add("key1", "value1")
expected.Add("key2", "value2")
result := generateQueryForOptions(options)
if result != expected.Encode() {
t.Errorf("generateQueryForOptions(%v) = %v; want %v", options, result, expected.Encode())
}
}

154
index.go Normal file
View file

@ -0,0 +1,154 @@
package meilisearch
import (
"context"
"net/http"
)
// index is the type that represent an index in meilisearch
type index struct {
uid string
primaryKey string
client *client
}
func newIndex(cli *client, uid string) IndexManager {
return &index{
client: cli,
uid: uid,
}
}
func (i *index) GetTaskReader() TaskReader {
return i
}
func (i *index) GetDocumentManager() DocumentManager {
return i
}
func (i *index) GetDocumentReader() DocumentReader {
return i
}
func (i *index) GetSettingsManager() SettingsManager {
return i
}
func (i *index) GetSettingsReader() SettingsReader {
return i
}
func (i *index) GetSearch() SearchReader {
return i
}
func (i *index) GetIndexReader() IndexReader {
return i
}
func (i *index) FetchInfo() (*IndexResult, error) {
return i.FetchInfoWithContext(context.Background())
}
func (i *index) FetchInfoWithContext(ctx context.Context) (*IndexResult, error) {
resp := new(IndexResult)
req := &internalRequest{
endpoint: "/indexes/" + i.uid,
method: http.MethodGet,
withRequest: nil,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusOK},
functionName: "FetchInfo",
}
if err := i.client.executeRequest(ctx, req); err != nil {
return nil, err
}
if resp.PrimaryKey != "" {
i.primaryKey = resp.PrimaryKey
}
resp.IndexManager = i
return resp, nil
}
func (i *index) FetchPrimaryKey() (*string, error) {
return i.FetchPrimaryKeyWithContext(context.Background())
}
func (i *index) FetchPrimaryKeyWithContext(ctx context.Context) (*string, error) {
idx, err := i.FetchInfoWithContext(ctx)
if err != nil {
return nil, err
}
i.primaryKey = idx.PrimaryKey
return &idx.PrimaryKey, nil
}
func (i *index) UpdateIndex(primaryKey string) (*TaskInfo, error) {
return i.UpdateIndexWithContext(context.Background(), primaryKey)
}
func (i *index) UpdateIndexWithContext(ctx context.Context, primaryKey string) (*TaskInfo, error) {
request := &UpdateIndexRequest{
PrimaryKey: primaryKey,
}
i.primaryKey = primaryKey
resp := new(TaskInfo)
req := &internalRequest{
endpoint: "/indexes/" + i.uid,
method: http.MethodPatch,
contentType: contentTypeJSON,
withRequest: request,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusAccepted},
functionName: "UpdateIndex",
}
if err := i.client.executeRequest(ctx, req); err != nil {
return nil, err
}
i.primaryKey = primaryKey
return resp, nil
}
func (i *index) Delete(uid string) (bool, error) {
return i.DeleteWithContext(context.Background(), uid)
}
func (i *index) DeleteWithContext(ctx context.Context, uid string) (bool, error) {
resp := new(TaskInfo)
req := &internalRequest{
endpoint: "/indexes/" + uid,
method: http.MethodDelete,
withRequest: nil,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusAccepted},
functionName: "Delete",
}
// err is not nil if status code is not 204 StatusNoContent
if err := i.client.executeRequest(ctx, req); err != nil {
return false, err
}
i.primaryKey = ""
return true, nil
}
func (i *index) GetStats() (*StatsIndex, error) {
return i.GetStatsWithContext(context.Background())
}
func (i *index) GetStatsWithContext(ctx context.Context) (*StatsIndex, error) {
resp := new(StatsIndex)
req := &internalRequest{
endpoint: "/indexes/" + i.uid + "/stats",
method: http.MethodGet,
withRequest: nil,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusOK},
functionName: "GetStats",
}
if err := i.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}

615
index_document.go Normal file
View file

@ -0,0 +1,615 @@
package meilisearch
import (
"bufio"
"bytes"
"context"
"encoding/csv"
"fmt"
"io"
"math"
"net/http"
"reflect"
"strconv"
"strings"
)
func (i *index) AddDocuments(documentsPtr interface{}, primaryKey ...string) (*TaskInfo, error) {
return i.AddDocumentsWithContext(context.Background(), documentsPtr, primaryKey...)
}
func (i *index) AddDocumentsWithContext(ctx context.Context, documentsPtr interface{}, primaryKey ...string) (*TaskInfo, error) {
return i.addDocuments(ctx, documentsPtr, contentTypeJSON, transformStringVariadicToMap(primaryKey...))
}
func (i *index) AddDocumentsInBatches(documentsPtr interface{}, batchSize int, primaryKey ...string) ([]TaskInfo, error) {
return i.AddDocumentsInBatchesWithContext(context.Background(), documentsPtr, batchSize, primaryKey...)
}
func (i *index) AddDocumentsInBatchesWithContext(ctx context.Context, documentsPtr interface{}, batchSize int, primaryKey ...string) ([]TaskInfo, error) {
return i.saveDocumentsInBatches(ctx, documentsPtr, batchSize, i.AddDocumentsWithContext, primaryKey...)
}
func (i *index) AddDocumentsCsv(documents []byte, options *CsvDocumentsQuery) (*TaskInfo, error) {
return i.AddDocumentsCsvWithContext(context.Background(), documents, options)
}
func (i *index) AddDocumentsCsvWithContext(ctx context.Context, documents []byte, options *CsvDocumentsQuery) (*TaskInfo, error) {
// []byte avoids JSON conversion in Client.sendRequest()
return i.addDocuments(ctx, documents, contentTypeCSV, transformCsvDocumentsQueryToMap(options))
}
func (i *index) AddDocumentsCsvInBatches(documents []byte, batchSize int, options *CsvDocumentsQuery) ([]TaskInfo, error) {
return i.AddDocumentsCsvInBatchesWithContext(context.Background(), documents, batchSize, options)
}
func (i *index) AddDocumentsCsvInBatchesWithContext(ctx context.Context, documents []byte, batchSize int, options *CsvDocumentsQuery) ([]TaskInfo, error) {
// Reuse io.Reader implementation
return i.AddDocumentsCsvFromReaderInBatchesWithContext(ctx, bytes.NewReader(documents), batchSize, options)
}
func (i *index) AddDocumentsCsvFromReaderInBatches(documents io.Reader, batchSize int, options *CsvDocumentsQuery) (resp []TaskInfo, err error) {
return i.AddDocumentsCsvFromReaderInBatchesWithContext(context.Background(), documents, batchSize, options)
}
func (i *index) AddDocumentsCsvFromReaderInBatchesWithContext(ctx context.Context, documents io.Reader, batchSize int, options *CsvDocumentsQuery) (resp []TaskInfo, err error) {
return i.saveDocumentsFromReaderInBatches(ctx, documents, batchSize, i.AddDocumentsCsvWithContext, options)
}
func (i *index) AddDocumentsCsvFromReader(documents io.Reader, options *CsvDocumentsQuery) (resp *TaskInfo, err error) {
return i.AddDocumentsCsvFromReaderWithContext(context.Background(), documents, options)
}
func (i *index) AddDocumentsCsvFromReaderWithContext(ctx context.Context, documents io.Reader, options *CsvDocumentsQuery) (resp *TaskInfo, err error) {
// Using io.Reader would avoid JSON conversion in Client.sendRequest(), but
// read content to memory anyway because of problems with streamed bodies
data, err := io.ReadAll(documents)
if err != nil {
return nil, fmt.Errorf("could not read documents: %w", err)
}
return i.addDocuments(ctx, data, contentTypeCSV, transformCsvDocumentsQueryToMap(options))
}
func (i *index) AddDocumentsNdjson(documents []byte, primaryKey ...string) (*TaskInfo, error) {
return i.AddDocumentsNdjsonWithContext(context.Background(), documents, primaryKey...)
}
func (i *index) AddDocumentsNdjsonWithContext(ctx context.Context, documents []byte, primaryKey ...string) (*TaskInfo, error) {
// []byte avoids JSON conversion in Client.sendRequest()
return i.addDocuments(ctx, documents, contentTypeNDJSON, transformStringVariadicToMap(primaryKey...))
}
func (i *index) AddDocumentsNdjsonInBatches(documents []byte, batchSize int, primaryKey ...string) ([]TaskInfo, error) {
return i.AddDocumentsNdjsonInBatchesWithContext(context.Background(), documents, batchSize, primaryKey...)
}
func (i *index) AddDocumentsNdjsonInBatchesWithContext(ctx context.Context, documents []byte, batchSize int, primaryKey ...string) ([]TaskInfo, error) {
// Reuse io.Reader implementation
return i.AddDocumentsNdjsonFromReaderInBatchesWithContext(ctx, bytes.NewReader(documents), batchSize, primaryKey...)
}
func (i *index) AddDocumentsNdjsonFromReaderInBatches(documents io.Reader, batchSize int, primaryKey ...string) (resp []TaskInfo, err error) {
return i.AddDocumentsNdjsonFromReaderInBatchesWithContext(context.Background(), documents, batchSize, primaryKey...)
}
func (i *index) AddDocumentsNdjsonFromReaderInBatchesWithContext(ctx context.Context, documents io.Reader, batchSize int, primaryKey ...string) (resp []TaskInfo, err error) {
// NDJSON files supposed to contain a valid JSON document in each line, so
// it's safe to split by lines.
// Lines are read and sent continuously to avoid reading all content into
// memory. However, this means that only part of the documents might be
// added successfully.
sendNdjsonLines := func(lines []string) (*TaskInfo, error) {
b := new(bytes.Buffer)
for _, line := range lines {
_, err := b.WriteString(line)
if err != nil {
return nil, fmt.Errorf("could not write NDJSON line: %w", err)
}
err = b.WriteByte('\n')
if err != nil {
return nil, fmt.Errorf("could not write NDJSON line: %w", err)
}
}
resp, err := i.AddDocumentsNdjsonWithContext(ctx, b.Bytes(), primaryKey...)
if err != nil {
return nil, err
}
return resp, nil
}
var (
responses []TaskInfo
lines []string
)
scanner := bufio.NewScanner(documents)
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
// Skip empty lines (NDJSON might not allow this, but just to be sure)
if line == "" {
continue
}
lines = append(lines, line)
// After reaching batchSize send NDJSON lines
if len(lines) == batchSize {
resp, err := sendNdjsonLines(lines)
if err != nil {
return nil, err
}
responses = append(responses, *resp)
lines = nil
}
}
if err := scanner.Err(); err != nil {
return nil, fmt.Errorf("could not read NDJSON: %w", err)
}
// Send remaining records as the last batch if there is any
if len(lines) > 0 {
resp, err := sendNdjsonLines(lines)
if err != nil {
return nil, err
}
responses = append(responses, *resp)
}
return responses, nil
}
func (i *index) AddDocumentsNdjsonFromReader(documents io.Reader, primaryKey ...string) (resp *TaskInfo, err error) {
return i.AddDocumentsNdjsonFromReaderWithContext(context.Background(), documents, primaryKey...)
}
func (i *index) AddDocumentsNdjsonFromReaderWithContext(ctx context.Context, documents io.Reader, primaryKey ...string) (resp *TaskInfo, err error) {
// Using io.Reader would avoid JSON conversion in Client.sendRequest(), but
// read content to memory anyway because of problems with streamed bodies
data, err := io.ReadAll(documents)
if err != nil {
return nil, fmt.Errorf("could not read documents: %w", err)
}
return i.addDocuments(ctx, data, contentTypeNDJSON, transformStringVariadicToMap(primaryKey...))
}
func (i *index) UpdateDocuments(documentsPtr interface{}, primaryKey ...string) (*TaskInfo, error) {
return i.UpdateDocumentsWithContext(context.Background(), documentsPtr, primaryKey...)
}
func (i *index) UpdateDocumentsWithContext(ctx context.Context, documentsPtr interface{}, primaryKey ...string) (*TaskInfo, error) {
return i.updateDocuments(ctx, documentsPtr, contentTypeJSON, transformStringVariadicToMap(primaryKey...))
}
func (i *index) UpdateDocumentsInBatches(documentsPtr interface{}, batchSize int, primaryKey ...string) ([]TaskInfo, error) {
return i.UpdateDocumentsInBatchesWithContext(context.Background(), documentsPtr, batchSize, primaryKey...)
}
func (i *index) UpdateDocumentsInBatchesWithContext(ctx context.Context, documentsPtr interface{}, batchSize int, primaryKey ...string) ([]TaskInfo, error) {
return i.saveDocumentsInBatches(ctx, documentsPtr, batchSize, i.UpdateDocumentsWithContext, primaryKey...)
}
func (i *index) UpdateDocumentsCsv(documents []byte, options *CsvDocumentsQuery) (*TaskInfo, error) {
return i.UpdateDocumentsCsvWithContext(context.Background(), documents, options)
}
func (i *index) UpdateDocumentsCsvWithContext(ctx context.Context, documents []byte, options *CsvDocumentsQuery) (*TaskInfo, error) {
return i.updateDocuments(ctx, documents, contentTypeCSV, transformCsvDocumentsQueryToMap(options))
}
func (i *index) UpdateDocumentsCsvInBatches(documents []byte, batchSize int, options *CsvDocumentsQuery) ([]TaskInfo, error) {
return i.UpdateDocumentsCsvInBatchesWithContext(context.Background(), documents, batchSize, options)
}
func (i *index) UpdateDocumentsCsvInBatchesWithContext(ctx context.Context, documents []byte, batchSize int, options *CsvDocumentsQuery) ([]TaskInfo, error) {
// Reuse io.Reader implementation
return i.updateDocumentsCsvFromReaderInBatches(ctx, bytes.NewReader(documents), batchSize, options)
}
func (i *index) UpdateDocumentsNdjson(documents []byte, primaryKey ...string) (*TaskInfo, error) {
return i.UpdateDocumentsNdjsonWithContext(context.Background(), documents, primaryKey...)
}
func (i *index) UpdateDocumentsNdjsonWithContext(ctx context.Context, documents []byte, primaryKey ...string) (*TaskInfo, error) {
return i.updateDocuments(ctx, documents, contentTypeNDJSON, transformStringVariadicToMap(primaryKey...))
}
func (i *index) UpdateDocumentsNdjsonInBatches(documents []byte, batchSize int, primaryKey ...string) ([]TaskInfo, error) {
return i.UpdateDocumentsNdjsonInBatchesWithContext(context.Background(), documents, batchSize, primaryKey...)
}
func (i *index) UpdateDocumentsNdjsonInBatchesWithContext(ctx context.Context, documents []byte, batchSize int, primaryKey ...string) ([]TaskInfo, error) {
return i.updateDocumentsNdjsonFromReaderInBatches(ctx, bytes.NewReader(documents), batchSize, primaryKey...)
}
func (i *index) UpdateDocumentsByFunction(req *UpdateDocumentByFunctionRequest) (*TaskInfo, error) {
return i.UpdateDocumentsByFunctionWithContext(context.Background(), req)
}
func (i *index) UpdateDocumentsByFunctionWithContext(ctx context.Context, req *UpdateDocumentByFunctionRequest) (*TaskInfo, error) {
resp := new(TaskInfo)
r := &internalRequest{
endpoint: "/indexes/" + i.uid + "/documents/edit",
method: http.MethodPost,
withRequest: req,
withResponse: resp,
contentType: contentTypeJSON,
acceptedStatusCodes: []int{http.StatusAccepted},
functionName: "UpdateDocumentsByFunction",
}
if err := i.client.executeRequest(ctx, r); err != nil {
return nil, err
}
return resp, nil
}
func (i *index) GetDocument(identifier string, request *DocumentQuery, documentPtr interface{}) error {
return i.GetDocumentWithContext(context.Background(), identifier, request, documentPtr)
}
func (i *index) GetDocumentWithContext(ctx context.Context, identifier string, request *DocumentQuery, documentPtr interface{}) error {
req := &internalRequest{
endpoint: "/indexes/" + i.uid + "/documents/" + identifier,
method: http.MethodGet,
withRequest: nil,
withResponse: documentPtr,
withQueryParams: map[string]string{},
acceptedStatusCodes: []int{http.StatusOK},
functionName: "GetDocument",
}
if request != nil {
if len(request.Fields) != 0 {
req.withQueryParams["fields"] = strings.Join(request.Fields, ",")
}
}
if err := i.client.executeRequest(ctx, req); err != nil {
return err
}
return nil
}
func (i *index) GetDocuments(param *DocumentsQuery, resp *DocumentsResult) error {
return i.GetDocumentsWithContext(context.Background(), param, resp)
}
func (i *index) GetDocumentsWithContext(ctx context.Context, param *DocumentsQuery, resp *DocumentsResult) error {
req := &internalRequest{
endpoint: "/indexes/" + i.uid + "/documents",
method: http.MethodGet,
contentType: contentTypeJSON,
withRequest: nil,
withResponse: resp,
withQueryParams: nil,
acceptedStatusCodes: []int{http.StatusOK},
functionName: "GetDocuments",
}
if param != nil && param.Filter == nil {
req.withQueryParams = map[string]string{}
if param.Limit != 0 {
req.withQueryParams["limit"] = strconv.FormatInt(param.Limit, 10)
}
if param.Offset != 0 {
req.withQueryParams["offset"] = strconv.FormatInt(param.Offset, 10)
}
if len(param.Fields) != 0 {
req.withQueryParams["fields"] = strings.Join(param.Fields, ",")
}
} else if param != nil && param.Filter != nil {
req.withRequest = param
req.method = http.MethodPost
req.endpoint = req.endpoint + "/fetch"
}
if err := i.client.executeRequest(ctx, req); err != nil {
return VersionErrorHintMessage(err, req)
}
return nil
}
func (i *index) DeleteDocument(identifier string) (*TaskInfo, error) {
return i.DeleteDocumentWithContext(context.Background(), identifier)
}
func (i *index) DeleteDocumentWithContext(ctx context.Context, identifier string) (*TaskInfo, error) {
resp := new(TaskInfo)
req := &internalRequest{
endpoint: "/indexes/" + i.uid + "/documents/" + identifier,
method: http.MethodDelete,
withRequest: nil,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusAccepted},
functionName: "DeleteDocument",
}
if err := i.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (i *index) DeleteDocuments(identifiers []string) (*TaskInfo, error) {
return i.DeleteDocumentsWithContext(context.Background(), identifiers)
}
func (i *index) DeleteDocumentsWithContext(ctx context.Context, identifiers []string) (*TaskInfo, error) {
resp := new(TaskInfo)
req := &internalRequest{
endpoint: "/indexes/" + i.uid + "/documents/delete-batch",
method: http.MethodPost,
contentType: contentTypeJSON,
withRequest: identifiers,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusAccepted},
functionName: "DeleteDocuments",
}
if err := i.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (i *index) DeleteDocumentsByFilter(filter interface{}) (*TaskInfo, error) {
return i.DeleteDocumentsByFilterWithContext(context.Background(), filter)
}
func (i *index) DeleteDocumentsByFilterWithContext(ctx context.Context, filter interface{}) (*TaskInfo, error) {
resp := new(TaskInfo)
req := &internalRequest{
endpoint: "/indexes/" + i.uid + "/documents/delete",
method: http.MethodPost,
contentType: contentTypeJSON,
withRequest: map[string]interface{}{
"filter": filter,
},
withResponse: resp,
acceptedStatusCodes: []int{http.StatusAccepted},
functionName: "DeleteDocumentsByFilter",
}
if err := i.client.executeRequest(ctx, req); err != nil {
return nil, VersionErrorHintMessage(err, req)
}
return resp, nil
}
func (i *index) DeleteAllDocuments() (*TaskInfo, error) {
return i.DeleteAllDocumentsWithContext(context.Background())
}
func (i *index) DeleteAllDocumentsWithContext(ctx context.Context) (*TaskInfo, error) {
resp := new(TaskInfo)
req := &internalRequest{
endpoint: "/indexes/" + i.uid + "/documents",
method: http.MethodDelete,
withRequest: nil,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusAccepted},
functionName: "DeleteAllDocuments",
}
if err := i.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (i *index) addDocuments(ctx context.Context, documentsPtr interface{}, contentType string, options map[string]string) (resp *TaskInfo, err error) {
resp = new(TaskInfo)
endpoint := ""
if options == nil {
endpoint = "/indexes/" + i.uid + "/documents"
} else {
for key, val := range options {
if key == "primaryKey" {
i.primaryKey = val
}
}
endpoint = "/indexes/" + i.uid + "/documents?" + generateQueryForOptions(options)
}
req := &internalRequest{
endpoint: endpoint,
method: http.MethodPost,
contentType: contentType,
withRequest: documentsPtr,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusAccepted},
functionName: "AddDocuments",
}
if err = i.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (i *index) saveDocumentsFromReaderInBatches(ctx context.Context, documents io.Reader, batchSize int, documentsCsvFunc func(ctx context.Context, recs []byte, op *CsvDocumentsQuery) (resp *TaskInfo, err error), options *CsvDocumentsQuery) (resp []TaskInfo, err error) {
// Because of the possibility of multiline fields it's not safe to split
// into batches by lines, we'll have to parse the file and reassemble it
// into smaller parts. RFC 4180 compliant input with a header row is
// expected.
// Records are read and sent continuously to avoid reading all content
// into memory. However, this means that only part of the documents might
// be added successfully.
var (
responses []TaskInfo
header []string
records [][]string
)
r := csv.NewReader(documents)
for {
// Read CSV record (empty lines and comments are already skipped by csv.Reader)
record, err := r.Read()
if err == io.EOF {
break
}
if err != nil {
return nil, fmt.Errorf("could not read CSV record: %w", err)
}
// Store first record as header
if header == nil {
header = record
continue
}
// Add header record to every batch
if len(records) == 0 {
records = append(records, header)
}
records = append(records, record)
// After reaching batchSize (not counting the header record) assemble a CSV file and send records
if len(records) == batchSize+1 {
resp, err := sendCsvRecords(ctx, documentsCsvFunc, records, options)
if err != nil {
return nil, err
}
responses = append(responses, *resp)
records = nil
}
}
// Send remaining records as the last batch if there is any
if len(records) > 0 {
resp, err := sendCsvRecords(ctx, documentsCsvFunc, records, options)
if err != nil {
return nil, err
}
responses = append(responses, *resp)
}
return responses, nil
}
func (i *index) saveDocumentsInBatches(ctx context.Context, documentsPtr interface{}, batchSize int, documentFunc func(ctx context.Context, documentsPtr interface{}, primaryKey ...string) (resp *TaskInfo, err error), primaryKey ...string) (resp []TaskInfo, err error) {
arr := reflect.ValueOf(documentsPtr)
lenDocs := arr.Len()
numBatches := int(math.Ceil(float64(lenDocs) / float64(batchSize)))
resp = make([]TaskInfo, numBatches)
for j := 0; j < numBatches; j++ {
end := (j + 1) * batchSize
if end > lenDocs {
end = lenDocs
}
batch := arr.Slice(j*batchSize, end).Interface()
if len(primaryKey) != 0 {
respID, err := documentFunc(ctx, batch, primaryKey[0])
if err != nil {
return nil, err
}
resp[j] = *respID
} else {
respID, err := documentFunc(ctx, batch)
if err != nil {
return nil, err
}
resp[j] = *respID
}
}
return resp, nil
}
func (i *index) updateDocuments(ctx context.Context, documentsPtr interface{}, contentType string, options map[string]string) (resp *TaskInfo, err error) {
resp = &TaskInfo{}
endpoint := ""
if options == nil {
endpoint = "/indexes/" + i.uid + "/documents"
} else {
for key, val := range options {
if key == "primaryKey" {
i.primaryKey = val
}
}
endpoint = "/indexes/" + i.uid + "/documents?" + generateQueryForOptions(options)
}
req := &internalRequest{
endpoint: endpoint,
method: http.MethodPut,
contentType: contentType,
withRequest: documentsPtr,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusAccepted},
functionName: "UpdateDocuments",
}
if err = i.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (i *index) updateDocumentsCsvFromReaderInBatches(ctx context.Context, documents io.Reader, batchSize int, options *CsvDocumentsQuery) (resp []TaskInfo, err error) {
return i.saveDocumentsFromReaderInBatches(ctx, documents, batchSize, i.UpdateDocumentsCsvWithContext, options)
}
func (i *index) updateDocumentsNdjsonFromReaderInBatches(ctx context.Context, documents io.Reader, batchSize int, primaryKey ...string) (resp []TaskInfo, err error) {
// NDJSON files supposed to contain a valid JSON document in each line, so
// it's safe to split by lines.
// Lines are read and sent continuously to avoid reading all content into
// memory. However, this means that only part of the documents might be
// added successfully.
sendNdjsonLines := func(lines []string) (*TaskInfo, error) {
b := new(bytes.Buffer)
for _, line := range lines {
_, err := b.WriteString(line)
if err != nil {
return nil, fmt.Errorf("could not write NDJSON line: %w", err)
}
err = b.WriteByte('\n')
if err != nil {
return nil, fmt.Errorf("could not write NDJSON line: %w", err)
}
}
resp, err := i.UpdateDocumentsNdjsonWithContext(ctx, b.Bytes(), primaryKey...)
if err != nil {
return nil, err
}
return resp, nil
}
var (
responses []TaskInfo
lines []string
)
scanner := bufio.NewScanner(documents)
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
// Skip empty lines (NDJSON might not allow this, but just to be sure)
if line == "" {
continue
}
lines = append(lines, line)
// After reaching batchSize send NDJSON lines
if len(lines) == batchSize {
resp, err := sendNdjsonLines(lines)
if err != nil {
return nil, err
}
responses = append(responses, *resp)
lines = nil
}
}
if err := scanner.Err(); err != nil {
return nil, fmt.Errorf("Could not read NDJSON: %w", err)
}
// Send remaining records as the last batch if there is any
if len(lines) > 0 {
resp, err := sendNdjsonLines(lines)
if err != nil {
return nil, err
}
responses = append(responses, *resp)
}
return responses, nil
}

1693
index_document_test.go Normal file

File diff suppressed because it is too large Load diff

653
index_interface.go Normal file
View file

@ -0,0 +1,653 @@
package meilisearch
import (
"context"
"encoding/json"
"io"
)
type IndexManager interface {
IndexReader
TaskReader
DocumentManager
SettingsManager
SearchReader
GetIndexReader() IndexReader
GetTaskReader() TaskReader
GetDocumentManager() DocumentManager
GetDocumentReader() DocumentReader
GetSettingsManager() SettingsManager
GetSettingsReader() SettingsReader
GetSearch() SearchReader
// UpdateIndex updates the primary key of the index.
UpdateIndex(primaryKey string) (*TaskInfo, error)
// UpdateIndexWithContext updates the primary key of the index using the provided context for cancellation.
UpdateIndexWithContext(ctx context.Context, primaryKey string) (*TaskInfo, error)
// Delete removes the index identified by the given UID.
Delete(uid string) (bool, error)
// DeleteWithContext removes the index identified by the given UID using the provided context for cancellation.
DeleteWithContext(ctx context.Context, uid string) (bool, error)
}
type IndexReader interface {
// FetchInfo retrieves information about the index.
FetchInfo() (*IndexResult, error)
// FetchInfoWithContext retrieves information about the index using the provided context for cancellation.
FetchInfoWithContext(ctx context.Context) (*IndexResult, error)
// FetchPrimaryKey retrieves the primary key of the index.
FetchPrimaryKey() (*string, error)
// FetchPrimaryKeyWithContext retrieves the primary key of the index using the provided context for cancellation.
FetchPrimaryKeyWithContext(ctx context.Context) (*string, error)
// GetStats retrieves statistical information about the index.
GetStats() (*StatsIndex, error)
// GetStatsWithContext retrieves statistical information about the index using the provided context for cancellation.
GetStatsWithContext(ctx context.Context) (*StatsIndex, error)
}
type DocumentManager interface {
DocumentReader
// AddDocuments adds multiple documents to the index.
AddDocuments(documentsPtr interface{}, primaryKey ...string) (*TaskInfo, error)
// AddDocumentsWithContext adds multiple documents to the index using the provided context for cancellation.
AddDocumentsWithContext(ctx context.Context, documentsPtr interface{}, primaryKey ...string) (*TaskInfo, error)
// AddDocumentsInBatches adds documents to the index in batches of specified size.
AddDocumentsInBatches(documentsPtr interface{}, batchSize int, primaryKey ...string) ([]TaskInfo, error)
// AddDocumentsInBatchesWithContext adds documents to the index in batches of specified size using the provided context for cancellation.
AddDocumentsInBatchesWithContext(ctx context.Context, documentsPtr interface{}, batchSize int, primaryKey ...string) ([]TaskInfo, error)
// AddDocumentsCsv adds documents from a CSV byte array to the index.
AddDocumentsCsv(documents []byte, options *CsvDocumentsQuery) (*TaskInfo, error)
// AddDocumentsCsvWithContext adds documents from a CSV byte array to the index using the provided context for cancellation.
AddDocumentsCsvWithContext(ctx context.Context, documents []byte, options *CsvDocumentsQuery) (*TaskInfo, error)
// AddDocumentsCsvInBatches adds documents from a CSV byte array to the index in batches of specified size.
AddDocumentsCsvInBatches(documents []byte, batchSize int, options *CsvDocumentsQuery) ([]TaskInfo, error)
// AddDocumentsCsvInBatchesWithContext adds documents from a CSV byte array to the index in batches of specified size using the provided context for cancellation.
AddDocumentsCsvInBatchesWithContext(ctx context.Context, documents []byte, batchSize int, options *CsvDocumentsQuery) ([]TaskInfo, error)
// AddDocumentsCsvFromReaderInBatches adds documents from a CSV reader to the index in batches of specified size.
AddDocumentsCsvFromReaderInBatches(documents io.Reader, batchSize int, options *CsvDocumentsQuery) ([]TaskInfo, error)
// AddDocumentsCsvFromReaderInBatchesWithContext adds documents from a CSV reader to the index in batches of specified size using the provided context for cancellation.
AddDocumentsCsvFromReaderInBatchesWithContext(ctx context.Context, documents io.Reader, batchSize int, options *CsvDocumentsQuery) ([]TaskInfo, error)
// AddDocumentsCsvFromReader adds documents from a CSV reader to the index.
AddDocumentsCsvFromReader(documents io.Reader, options *CsvDocumentsQuery) (*TaskInfo, error)
// AddDocumentsCsvFromReaderWithContext adds documents from a CSV reader to the index using the provided context for cancellation.
AddDocumentsCsvFromReaderWithContext(ctx context.Context, documents io.Reader, options *CsvDocumentsQuery) (*TaskInfo, error)
// AddDocumentsNdjson adds documents from a NDJSON byte array to the index.
AddDocumentsNdjson(documents []byte, primaryKey ...string) (*TaskInfo, error)
// AddDocumentsNdjsonWithContext adds documents from a NDJSON byte array to the index using the provided context for cancellation.
AddDocumentsNdjsonWithContext(ctx context.Context, documents []byte, primaryKey ...string) (*TaskInfo, error)
// AddDocumentsNdjsonInBatches adds documents from a NDJSON byte array to the index in batches of specified size.
AddDocumentsNdjsonInBatches(documents []byte, batchSize int, primaryKey ...string) ([]TaskInfo, error)
// AddDocumentsNdjsonInBatchesWithContext adds documents from a NDJSON byte array to the index in batches of specified size using the provided context for cancellation.
AddDocumentsNdjsonInBatchesWithContext(ctx context.Context, documents []byte, batchSize int, primaryKey ...string) ([]TaskInfo, error)
// AddDocumentsNdjsonFromReader adds documents from a NDJSON reader to the index.
AddDocumentsNdjsonFromReader(documents io.Reader, primaryKey ...string) (*TaskInfo, error)
// AddDocumentsNdjsonFromReaderWithContext adds documents from a NDJSON reader to the index using the provided context for cancellation.
AddDocumentsNdjsonFromReaderWithContext(ctx context.Context, documents io.Reader, primaryKey ...string) (*TaskInfo, error)
// AddDocumentsNdjsonFromReaderInBatches adds documents from a NDJSON reader to the index in batches of specified size.
AddDocumentsNdjsonFromReaderInBatches(documents io.Reader, batchSize int, primaryKey ...string) ([]TaskInfo, error)
// AddDocumentsNdjsonFromReaderInBatchesWithContext adds documents from a NDJSON reader to the index in batches of specified size using the provided context for cancellation.
AddDocumentsNdjsonFromReaderInBatchesWithContext(ctx context.Context, documents io.Reader, batchSize int, primaryKey ...string) ([]TaskInfo, error)
// UpdateDocuments updates multiple documents in the index.
UpdateDocuments(documentsPtr interface{}, primaryKey ...string) (*TaskInfo, error)
// UpdateDocumentsWithContext updates multiple documents in the index using the provided context for cancellation.
UpdateDocumentsWithContext(ctx context.Context, documentsPtr interface{}, primaryKey ...string) (*TaskInfo, error)
// UpdateDocumentsInBatches updates documents in the index in batches of specified size.
UpdateDocumentsInBatches(documentsPtr interface{}, batchSize int, primaryKey ...string) ([]TaskInfo, error)
// UpdateDocumentsInBatchesWithContext updates documents in the index in batches of specified size using the provided context for cancellation.
UpdateDocumentsInBatchesWithContext(ctx context.Context, documentsPtr interface{}, batchSize int, primaryKey ...string) ([]TaskInfo, error)
// UpdateDocumentsCsv updates documents in the index from a CSV byte array.
UpdateDocumentsCsv(documents []byte, options *CsvDocumentsQuery) (*TaskInfo, error)
// UpdateDocumentsCsvWithContext updates documents in the index from a CSV byte array using the provided context for cancellation.
UpdateDocumentsCsvWithContext(ctx context.Context, documents []byte, options *CsvDocumentsQuery) (*TaskInfo, error)
// UpdateDocumentsCsvInBatches updates documents in the index from a CSV byte array in batches of specified size.
UpdateDocumentsCsvInBatches(documents []byte, batchsize int, options *CsvDocumentsQuery) ([]TaskInfo, error)
// UpdateDocumentsCsvInBatchesWithContext updates documents in the index from a CSV byte array in batches of specified size using the provided context for cancellation.
UpdateDocumentsCsvInBatchesWithContext(ctx context.Context, documents []byte, batchsize int, options *CsvDocumentsQuery) ([]TaskInfo, error)
// UpdateDocumentsNdjson updates documents in the index from a NDJSON byte array.
UpdateDocumentsNdjson(documents []byte, primaryKey ...string) (*TaskInfo, error)
// UpdateDocumentsNdjsonWithContext updates documents in the index from a NDJSON byte array using the provided context for cancellation.
UpdateDocumentsNdjsonWithContext(ctx context.Context, documents []byte, primaryKey ...string) (*TaskInfo, error)
// UpdateDocumentsNdjsonInBatches updates documents in the index from a NDJSON byte array in batches of specified size.
UpdateDocumentsNdjsonInBatches(documents []byte, batchsize int, primaryKey ...string) ([]TaskInfo, error)
// UpdateDocumentsNdjsonInBatchesWithContext updates documents in the index from a NDJSON byte array in batches of specified size using the provided context for cancellation.
UpdateDocumentsNdjsonInBatchesWithContext(ctx context.Context, documents []byte, batchsize int, primaryKey ...string) ([]TaskInfo, error)
// UpdateDocumentsByFunction update documents by using function
UpdateDocumentsByFunction(req *UpdateDocumentByFunctionRequest) (*TaskInfo, error)
// UpdateDocumentsByFunctionWithContext update documents by using function then provided context for cancellation.
UpdateDocumentsByFunctionWithContext(ctx context.Context, req *UpdateDocumentByFunctionRequest) (*TaskInfo, error)
// DeleteDocument deletes a single document from the index by identifier.
DeleteDocument(identifier string) (*TaskInfo, error)
// DeleteDocumentWithContext deletes a single document from the index by identifier using the provided context for cancellation.
DeleteDocumentWithContext(ctx context.Context, identifier string) (*TaskInfo, error)
// DeleteDocuments deletes multiple documents from the index by identifiers.
DeleteDocuments(identifiers []string) (*TaskInfo, error)
// DeleteDocumentsWithContext deletes multiple documents from the index by identifiers using the provided context for cancellation.
DeleteDocumentsWithContext(ctx context.Context, identifiers []string) (*TaskInfo, error)
// DeleteDocumentsByFilter deletes documents from the index by filter.
DeleteDocumentsByFilter(filter interface{}) (*TaskInfo, error)
// DeleteDocumentsByFilterWithContext deletes documents from the index by filter using the provided context for cancellation.
DeleteDocumentsByFilterWithContext(ctx context.Context, filter interface{}) (*TaskInfo, error)
// DeleteAllDocuments deletes all documents from the index.
DeleteAllDocuments() (*TaskInfo, error)
// DeleteAllDocumentsWithContext deletes all documents from the index using the provided context for cancellation.
DeleteAllDocumentsWithContext(ctx context.Context) (*TaskInfo, error)
}
type DocumentReader interface {
// GetDocument retrieves a single document from the index by identifier.
GetDocument(identifier string, request *DocumentQuery, documentPtr interface{}) error
// GetDocumentWithContext retrieves a single document from the index by identifier using the provided context for cancellation.
GetDocumentWithContext(ctx context.Context, identifier string, request *DocumentQuery, documentPtr interface{}) error
// GetDocuments retrieves multiple documents from the index.
GetDocuments(param *DocumentsQuery, resp *DocumentsResult) error
// GetDocumentsWithContext retrieves multiple documents from the index using the provided context for cancellation.
GetDocumentsWithContext(ctx context.Context, param *DocumentsQuery, resp *DocumentsResult) error
}
type SearchReader interface {
// Search performs a search query on the index.
Search(query string, request *SearchRequest) (*SearchResponse, error)
// SearchWithContext performs a search query on the index using the provided context for cancellation.
SearchWithContext(ctx context.Context, query string, request *SearchRequest) (*SearchResponse, error)
// SearchRaw performs a raw search query on the index, returning a JSON response.
SearchRaw(query string, request *SearchRequest) (*json.RawMessage, error)
// SearchRawWithContext performs a raw search query on the index using the provided context for cancellation, returning a JSON response.
SearchRawWithContext(ctx context.Context, query string, request *SearchRequest) (*json.RawMessage, error)
// FacetSearch performs a facet search query on the index.
FacetSearch(request *FacetSearchRequest) (*json.RawMessage, error)
// FacetSearchWithContext performs a facet search query on the index using the provided context for cancellation.
FacetSearchWithContext(ctx context.Context, request *FacetSearchRequest) (*json.RawMessage, error)
// SearchSimilarDocuments performs a search for similar documents.
SearchSimilarDocuments(param *SimilarDocumentQuery, resp *SimilarDocumentResult) error
// SearchSimilarDocumentsWithContext performs a search for similar documents using the provided context for cancellation.
SearchSimilarDocumentsWithContext(ctx context.Context, param *SimilarDocumentQuery, resp *SimilarDocumentResult) error
}
type SettingsManager interface {
SettingsReader
// UpdateSettings updates the settings of the index.
UpdateSettings(request *Settings) (*TaskInfo, error)
// UpdateSettingsWithContext updates the settings of the index using the provided context for cancellation.
UpdateSettingsWithContext(ctx context.Context, request *Settings) (*TaskInfo, error)
// ResetSettings resets the settings of the index to default values.
ResetSettings() (*TaskInfo, error)
// ResetSettingsWithContext resets the settings of the index to default values using the provided context for cancellation.
ResetSettingsWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateRankingRules updates the ranking rules of the index.
UpdateRankingRules(request *[]string) (*TaskInfo, error)
// UpdateRankingRulesWithContext updates the ranking rules of the index using the provided context for cancellation.
UpdateRankingRulesWithContext(ctx context.Context, request *[]string) (*TaskInfo, error)
// ResetRankingRules resets the ranking rules of the index to default values.
ResetRankingRules() (*TaskInfo, error)
// ResetRankingRulesWithContext resets the ranking rules of the index to default values using the provided context for cancellation.
ResetRankingRulesWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateDistinctAttribute updates the distinct attribute of the index.
UpdateDistinctAttribute(request string) (*TaskInfo, error)
// UpdateDistinctAttributeWithContext updates the distinct attribute of the index using the provided context for cancellation.
UpdateDistinctAttributeWithContext(ctx context.Context, request string) (*TaskInfo, error)
// ResetDistinctAttribute resets the distinct attribute of the index to default value.
ResetDistinctAttribute() (*TaskInfo, error)
// ResetDistinctAttributeWithContext resets the distinct attribute of the index to default value using the provided context for cancellation.
ResetDistinctAttributeWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateSearchableAttributes updates the searchable attributes of the index.
UpdateSearchableAttributes(request *[]string) (*TaskInfo, error)
// UpdateSearchableAttributesWithContext updates the searchable attributes of the index using the provided context for cancellation.
UpdateSearchableAttributesWithContext(ctx context.Context, request *[]string) (*TaskInfo, error)
// ResetSearchableAttributes resets the searchable attributes of the index to default values.
ResetSearchableAttributes() (*TaskInfo, error)
// ResetSearchableAttributesWithContext resets the searchable attributes of the index to default values using the provided context for cancellation.
ResetSearchableAttributesWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateDisplayedAttributes updates the displayed attributes of the index.
UpdateDisplayedAttributes(request *[]string) (*TaskInfo, error)
// UpdateDisplayedAttributesWithContext updates the displayed attributes of the index using the provided context for cancellation.
UpdateDisplayedAttributesWithContext(ctx context.Context, request *[]string) (*TaskInfo, error)
// ResetDisplayedAttributes resets the displayed attributes of the index to default values.
ResetDisplayedAttributes() (*TaskInfo, error)
// ResetDisplayedAttributesWithContext resets the displayed attributes of the index to default values using the provided context for cancellation.
ResetDisplayedAttributesWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateStopWords updates the stop words of the index.
UpdateStopWords(request *[]string) (*TaskInfo, error)
// UpdateStopWordsWithContext updates the stop words of the index using the provided context for cancellation.
UpdateStopWordsWithContext(ctx context.Context, request *[]string) (*TaskInfo, error)
// ResetStopWords resets the stop words of the index to default values.
ResetStopWords() (*TaskInfo, error)
// ResetStopWordsWithContext resets the stop words of the index to default values using the provided context for cancellation.
ResetStopWordsWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateSynonyms updates the synonyms of the index.
UpdateSynonyms(request *map[string][]string) (*TaskInfo, error)
// UpdateSynonymsWithContext updates the synonyms of the index using the provided context for cancellation.
UpdateSynonymsWithContext(ctx context.Context, request *map[string][]string) (*TaskInfo, error)
// ResetSynonyms resets the synonyms of the index to default values.
ResetSynonyms() (*TaskInfo, error)
// ResetSynonymsWithContext resets the synonyms of the index to default values using the provided context for cancellation.
ResetSynonymsWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateFilterableAttributes updates the filterable attributes of the index.
UpdateFilterableAttributes(request *[]string) (*TaskInfo, error)
// UpdateFilterableAttributesWithContext updates the filterable attributes of the index using the provided context for cancellation.
UpdateFilterableAttributesWithContext(ctx context.Context, request *[]string) (*TaskInfo, error)
// ResetFilterableAttributes resets the filterable attributes of the index to default values.
ResetFilterableAttributes() (*TaskInfo, error)
// ResetFilterableAttributesWithContext resets the filterable attributes of the index to default values using the provided context for cancellation.
ResetFilterableAttributesWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateSortableAttributes updates the sortable attributes of the index.
UpdateSortableAttributes(request *[]string) (*TaskInfo, error)
// UpdateSortableAttributesWithContext updates the sortable attributes of the index using the provided context for cancellation.
UpdateSortableAttributesWithContext(ctx context.Context, request *[]string) (*TaskInfo, error)
// ResetSortableAttributes resets the sortable attributes of the index to default values.
ResetSortableAttributes() (*TaskInfo, error)
// ResetSortableAttributesWithContext resets the sortable attributes of the index to default values using the provided context for cancellation.
ResetSortableAttributesWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateTypoTolerance updates the typo tolerance settings of the index.
UpdateTypoTolerance(request *TypoTolerance) (*TaskInfo, error)
// UpdateTypoToleranceWithContext updates the typo tolerance settings of the index using the provided context for cancellation.
UpdateTypoToleranceWithContext(ctx context.Context, request *TypoTolerance) (*TaskInfo, error)
// ResetTypoTolerance resets the typo tolerance settings of the index to default values.
ResetTypoTolerance() (*TaskInfo, error)
// ResetTypoToleranceWithContext resets the typo tolerance settings of the index to default values using the provided context for cancellation.
ResetTypoToleranceWithContext(ctx context.Context) (*TaskInfo, error)
// UpdatePagination updates the pagination settings of the index.
UpdatePagination(request *Pagination) (*TaskInfo, error)
// UpdatePaginationWithContext updates the pagination settings of the index using the provided context for cancellation.
UpdatePaginationWithContext(ctx context.Context, request *Pagination) (*TaskInfo, error)
// ResetPagination resets the pagination settings of the index to default values.
ResetPagination() (*TaskInfo, error)
// ResetPaginationWithContext resets the pagination settings of the index to default values using the provided context for cancellation.
ResetPaginationWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateFaceting updates the faceting settings of the index.
UpdateFaceting(request *Faceting) (*TaskInfo, error)
// UpdateFacetingWithContext updates the faceting settings of the index using the provided context for cancellation.
UpdateFacetingWithContext(ctx context.Context, request *Faceting) (*TaskInfo, error)
// ResetFaceting resets the faceting settings of the index to default values.
ResetFaceting() (*TaskInfo, error)
// ResetFacetingWithContext resets the faceting settings of the index to default values using the provided context for cancellation.
ResetFacetingWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateEmbedders updates the embedders of the index.
UpdateEmbedders(request map[string]Embedder) (*TaskInfo, error)
// UpdateEmbeddersWithContext updates the embedders of the index using the provided context for cancellation.
UpdateEmbeddersWithContext(ctx context.Context, request map[string]Embedder) (*TaskInfo, error)
// ResetEmbedders resets the embedders of the index to default values.
ResetEmbedders() (*TaskInfo, error)
// ResetEmbeddersWithContext resets the embedders of the index to default values using the provided context for cancellation.
ResetEmbeddersWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateSearchCutoffMs updates the search cutoff time in milliseconds.
UpdateSearchCutoffMs(request int64) (*TaskInfo, error)
// UpdateSearchCutoffMsWithContext updates the search cutoff time in milliseconds using the provided context for cancellation.
UpdateSearchCutoffMsWithContext(ctx context.Context, request int64) (*TaskInfo, error)
// ResetSearchCutoffMs resets the search cutoff time in milliseconds to default value.
ResetSearchCutoffMs() (*TaskInfo, error)
// ResetSearchCutoffMsWithContext resets the search cutoff time in milliseconds to default value using the provided context for cancellation.
ResetSearchCutoffMsWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateSeparatorTokens update separator tokens
// https://www.meilisearch.com/docs/reference/api/settings#update-separator-tokens
UpdateSeparatorTokens(tokens []string) (*TaskInfo, error)
// UpdateSeparatorTokensWithContext update separator tokens and support parent context
// https://www.meilisearch.com/docs/reference/api/settings#update-separator-tokens
UpdateSeparatorTokensWithContext(ctx context.Context, tokens []string) (*TaskInfo, error)
// ResetSeparatorTokens reset separator tokens
// https://www.meilisearch.com/docs/reference/api/settings#reset-separator-tokens
ResetSeparatorTokens() (*TaskInfo, error)
// ResetSeparatorTokensWithContext reset separator tokens and support parent context
// https://www.meilisearch.com/docs/reference/api/settings#reset-separator-tokens
ResetSeparatorTokensWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateNonSeparatorTokens update non-separator tokens
// https://www.meilisearch.com/docs/reference/api/settings#update-non-separator-tokens
UpdateNonSeparatorTokens(tokens []string) (*TaskInfo, error)
// UpdateNonSeparatorTokensWithContext update non-separator tokens and support parent context
// https://www.meilisearch.com/docs/reference/api/settings#update-non-separator-tokens
UpdateNonSeparatorTokensWithContext(ctx context.Context, tokens []string) (*TaskInfo, error)
// ResetNonSeparatorTokens reset non-separator tokens
// https://www.meilisearch.com/docs/reference/api/settings#reset-non-separator-tokens
ResetNonSeparatorTokens() (*TaskInfo, error)
// ResetNonSeparatorTokensWithContext reset non-separator tokens and support parent context
// https://www.meilisearch.com/docs/reference/api/settings#reset-non-separator-tokens
ResetNonSeparatorTokensWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateDictionary update user dictionary
// https://www.meilisearch.com/docs/reference/api/settings#update-dictionary
UpdateDictionary(words []string) (*TaskInfo, error)
// UpdateDictionaryWithContext update user dictionary and support parent context
// https://www.meilisearch.com/docs/reference/api/settings#update-dictionary
UpdateDictionaryWithContext(ctx context.Context, words []string) (*TaskInfo, error)
// ResetDictionary reset user dictionary
// https://www.meilisearch.com/docs/reference/api/settings#reset-dictionary
ResetDictionary() (*TaskInfo, error)
// ResetDictionaryWithContext reset user dictionary and support parent context
// https://www.meilisearch.com/docs/reference/api/settings#reset-dictionary
ResetDictionaryWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateProximityPrecision set ProximityPrecision value ByWord or ByAttribute
// https://www.meilisearch.com/docs/reference/api/settings#update-proximity-precision-settings
UpdateProximityPrecision(proximityType ProximityPrecisionType) (*TaskInfo, error)
// UpdateProximityPrecisionWithContext set ProximityPrecision value ByWord or ByAttribute and support parent context
// https://www.meilisearch.com/docs/reference/api/settings#update-proximity-precision-settings
UpdateProximityPrecisionWithContext(ctx context.Context, proximityType ProximityPrecisionType) (*TaskInfo, error)
// ResetProximityPrecision reset ProximityPrecision to default ByWord
// https://www.meilisearch.com/docs/reference/api/settings#reset-proximity-precision-settings
ResetProximityPrecision() (*TaskInfo, error)
// ResetProximityPrecisionWithContext reset ProximityPrecision to default ByWord and support parent context
// https://www.meilisearch.com/docs/reference/api/settings#reset-proximity-precision-settings
ResetProximityPrecisionWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateLocalizedAttributes update the localized attributes settings of an index
// https://www.meilisearch.com/docs/reference/api/settings#update-localized-attribute-settings
UpdateLocalizedAttributes(request []*LocalizedAttributes) (*TaskInfo, error)
// UpdateLocalizedAttributesWithContext update the localized attributes settings of an index using the provided context for cancellation
// https://www.meilisearch.com/docs/reference/api/settings#update-localized-attribute-settings
UpdateLocalizedAttributesWithContext(ctx context.Context, request []*LocalizedAttributes) (*TaskInfo, error)
// ResetLocalizedAttributes reset the localized attributes settings
ResetLocalizedAttributes() (*TaskInfo, error)
// ResetLocalizedAttributesWithContext reset the localized attributes settings using the provided context for cancellation
ResetLocalizedAttributesWithContext(ctx context.Context) (*TaskInfo, error)
// UpdatePrefixSearch updates the prefix search setting of the index.
UpdatePrefixSearch(request string) (*TaskInfo, error)
// UpdatePrefixSearchWithContext updates the prefix search setting of the index using the provided context for cancellation.
UpdatePrefixSearchWithContext(ctx context.Context, request string) (*TaskInfo, error)
// ResetPrefixSearch resets the prefix search setting of the index to default value.
ResetPrefixSearch() (*TaskInfo, error)
// ResetPrefixSearchWithContext resets the prefix search setting of the index to default value using the provided context for cancellation.
ResetPrefixSearchWithContext(ctx context.Context) (*TaskInfo, error)
// UpdateFacetSearch updates the facet search setting of the index.
UpdateFacetSearch(request bool) (*TaskInfo, error)
// UpdateFacetSearchWithContext updates the facet search setting of the index using the provided context for cancellation.
UpdateFacetSearchWithContext(ctx context.Context, request bool) (*TaskInfo, error)
// ResetFacetSearch resets the facet search setting of the index to default value.
ResetFacetSearch() (*TaskInfo, error)
// ResetFacetSearchWithContext resets the facet search setting of the index to default value using the provided context for cancellation.
ResetFacetSearchWithContext(ctx context.Context) (*TaskInfo, error)
}
type SettingsReader interface {
// GetSettings retrieves the settings of the index.
GetSettings() (*Settings, error)
// GetSettingsWithContext retrieves the settings of the index using the provided context for cancellation.
GetSettingsWithContext(ctx context.Context) (*Settings, error)
// GetRankingRules retrieves the ranking rules of the index.
GetRankingRules() (*[]string, error)
// GetRankingRulesWithContext retrieves the ranking rules of the index using the provided context for cancellation.
GetRankingRulesWithContext(ctx context.Context) (*[]string, error)
// GetDistinctAttribute retrieves the distinct attribute of the index.
GetDistinctAttribute() (*string, error)
// GetDistinctAttributeWithContext retrieves the distinct attribute of the index using the provided context for cancellation.
GetDistinctAttributeWithContext(ctx context.Context) (*string, error)
// GetSearchableAttributes retrieves the searchable attributes of the index.
GetSearchableAttributes() (*[]string, error)
// GetSearchableAttributesWithContext retrieves the searchable attributes of the index using the provided context for cancellation.
GetSearchableAttributesWithContext(ctx context.Context) (*[]string, error)
// GetDisplayedAttributes retrieves the displayed attributes of the index.
GetDisplayedAttributes() (*[]string, error)
// GetDisplayedAttributesWithContext retrieves the displayed attributes of the index using the provided context for cancellation.
GetDisplayedAttributesWithContext(ctx context.Context) (*[]string, error)
// GetStopWords retrieves the stop words of the index.
GetStopWords() (*[]string, error)
// GetStopWordsWithContext retrieves the stop words of the index using the provided context for cancellation.
GetStopWordsWithContext(ctx context.Context) (*[]string, error)
// GetSynonyms retrieves the synonyms of the index.
GetSynonyms() (*map[string][]string, error)
// GetSynonymsWithContext retrieves the synonyms of the index using the provided context for cancellation.
GetSynonymsWithContext(ctx context.Context) (*map[string][]string, error)
// GetFilterableAttributes retrieves the filterable attributes of the index.
GetFilterableAttributes() (*[]string, error)
// GetFilterableAttributesWithContext retrieves the filterable attributes of the index using the provided context for cancellation.
GetFilterableAttributesWithContext(ctx context.Context) (*[]string, error)
// GetSortableAttributes retrieves the sortable attributes of the index.
GetSortableAttributes() (*[]string, error)
// GetSortableAttributesWithContext retrieves the sortable attributes of the index using the provided context for cancellation.
GetSortableAttributesWithContext(ctx context.Context) (*[]string, error)
// GetTypoTolerance retrieves the typo tolerance settings of the index.
GetTypoTolerance() (*TypoTolerance, error)
// GetTypoToleranceWithContext retrieves the typo tolerance settings of the index using the provided context for cancellation.
GetTypoToleranceWithContext(ctx context.Context) (*TypoTolerance, error)
// GetPagination retrieves the pagination settings of the index.
GetPagination() (*Pagination, error)
// GetPaginationWithContext retrieves the pagination settings of the index using the provided context for cancellation.
GetPaginationWithContext(ctx context.Context) (*Pagination, error)
// GetFaceting retrieves the faceting settings of the index.
GetFaceting() (*Faceting, error)
// GetFacetingWithContext retrieves the faceting settings of the index using the provided context for cancellation.
GetFacetingWithContext(ctx context.Context) (*Faceting, error)
// GetEmbedders retrieves the embedders of the index.
GetEmbedders() (map[string]Embedder, error)
// GetEmbeddersWithContext retrieves the embedders of the index using the provided context for cancellation.
GetEmbeddersWithContext(ctx context.Context) (map[string]Embedder, error)
// GetSearchCutoffMs retrieves the search cutoff time in milliseconds.
GetSearchCutoffMs() (int64, error)
// GetSearchCutoffMsWithContext retrieves the search cutoff time in milliseconds using the provided context for cancellation.
GetSearchCutoffMsWithContext(ctx context.Context) (int64, error)
// GetSeparatorTokens returns separators tokens
// https://www.meilisearch.com/docs/reference/api/settings#get-separator-tokens
GetSeparatorTokens() ([]string, error)
// GetSeparatorTokensWithContext returns separator tokens and support parent context
// https://www.meilisearch.com/docs/reference/api/settings#get-separator-tokens
GetSeparatorTokensWithContext(ctx context.Context) ([]string, error)
// GetNonSeparatorTokens returns non-separator tokens
// https://www.meilisearch.com/docs/reference/api/settings#get-non-separator-tokens
GetNonSeparatorTokens() ([]string, error)
// GetNonSeparatorTokensWithContext returns non-separator tokens and support parent context
// https://www.meilisearch.com/docs/reference/api/settings#get-non-separator-tokens
GetNonSeparatorTokensWithContext(ctx context.Context) ([]string, error)
// GetDictionary returns user dictionary
//
//Allows users to instruct Meilisearch to consider groups of strings as a
//single term by adding a supplementary dictionary of user-defined terms.
//This is particularly useful when working with datasets containing many domain-specific
//words, and in languages where words are not separated by whitespace such as Japanese.
//Custom dictionaries are also useful in a few use-cases for space-separated languages,
//such as datasets with names such as "J. R. R. Tolkien" and "W. E. B. Du Bois".
//
// https://www.meilisearch.com/docs/reference/api/settings#get-dictionary
GetDictionary() ([]string, error)
// GetDictionaryWithContext returns user dictionary and support parent context
//
//Allows users to instruct Meilisearch to consider groups of strings as a
//single term by adding a supplementary dictionary of user-defined terms.
//This is particularly useful when working with datasets containing many domain-specific
//words, and in languages where words are not separated by whitespace such as Japanese.
//Custom dictionaries are also useful in a few use-cases for space-separated languages,
//such as datasets with names such as "J. R. R. Tolkien" and "W. E. B. Du Bois".
//
// https://www.meilisearch.com/docs/reference/api/settings#get-dictionary
GetDictionaryWithContext(ctx context.Context) ([]string, error)
// GetProximityPrecision returns ProximityPrecision configuration value
// https://www.meilisearch.com/docs/reference/api/settings#get-proximity-precision-settings
GetProximityPrecision() (ProximityPrecisionType, error)
// GetProximityPrecisionWithContext returns ProximityPrecision configuration value and support parent context
// https://www.meilisearch.com/docs/reference/api/settings#get-proximity-precision-settings
GetProximityPrecisionWithContext(ctx context.Context) (ProximityPrecisionType, error)
// GetLocalizedAttributes get the localized attributes settings of an index
// https://www.meilisearch.com/docs/reference/api/settings#get-localized-attributes-settings
GetLocalizedAttributes() ([]*LocalizedAttributes, error)
// GetLocalizedAttributesWithContext get the localized attributes settings of an index using the provided context for cancellation
// https://www.meilisearch.com/docs/reference/api/settings#get-localized-attributes-settings
GetLocalizedAttributesWithContext(ctx context.Context) ([]*LocalizedAttributes, error)
// GetPrefixSearch retrieves the prefix search setting of the index.
GetPrefixSearch() (*string, error)
// GetPrefixSearchWithContext retrieves the prefix search setting of the index using the provided context for cancellation.
GetPrefixSearchWithContext(ctx context.Context) (*string, error)
// GetFacetSearch retrieves the facet search setting of the index.
GetFacetSearch() (bool, error)
// GetFacetSearchWithContext retrieves the facet search setting of the index using the provided context for cancellation.
GetFacetSearchWithContext(ctx context.Context) (bool, error)
}

132
index_search.go Normal file
View file

@ -0,0 +1,132 @@
package meilisearch
import (
"context"
"encoding/json"
"net/http"
)
func (i *index) Search(query string, request *SearchRequest) (*SearchResponse, error) {
return i.SearchWithContext(context.Background(), query, request)
}
func (i *index) SearchWithContext(ctx context.Context, query string, request *SearchRequest) (*SearchResponse, error) {
if request == nil {
return nil, ErrNoSearchRequest
}
if query != "" {
request.Query = query
}
if request.IndexUID != "" {
request.IndexUID = ""
}
request.validate()
resp := new(SearchResponse)
req := &internalRequest{
endpoint: "/indexes/" + i.uid + "/search",
method: http.MethodPost,
contentType: contentTypeJSON,
withRequest: request,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusOK},
functionName: "Search",
}
if err := i.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (i *index) SearchRaw(query string, request *SearchRequest) (*json.RawMessage, error) {
return i.SearchRawWithContext(context.Background(), query, request)
}
func (i *index) SearchRawWithContext(ctx context.Context, query string, request *SearchRequest) (*json.RawMessage, error) {
if request == nil {
return nil, ErrNoSearchRequest
}
if query != "" {
request.Query = query
}
if request.IndexUID != "" {
request.IndexUID = ""
}
request.validate()
resp := new(json.RawMessage)
req := &internalRequest{
endpoint: "/indexes/" + i.uid + "/search",
method: http.MethodPost,
contentType: contentTypeJSON,
withRequest: request,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusOK},
functionName: "SearchRaw",
}
if err := i.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (i *index) FacetSearch(request *FacetSearchRequest) (*json.RawMessage, error) {
return i.FacetSearchWithContext(context.Background(), request)
}
func (i *index) FacetSearchWithContext(ctx context.Context, request *FacetSearchRequest) (*json.RawMessage, error) {
if request == nil {
return nil, ErrNoFacetSearchRequest
}
resp := new(json.RawMessage)
req := &internalRequest{
endpoint: "/indexes/" + i.uid + "/facet-search",
method: http.MethodPost,
contentType: contentTypeJSON,
withRequest: request,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusOK},
functionName: "FacetSearch",
}
if err := i.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (i *index) SearchSimilarDocuments(param *SimilarDocumentQuery, resp *SimilarDocumentResult) error {
return i.SearchSimilarDocumentsWithContext(context.Background(), param, resp)
}
func (i *index) SearchSimilarDocumentsWithContext(ctx context.Context, param *SimilarDocumentQuery, resp *SimilarDocumentResult) error {
req := &internalRequest{
endpoint: "/indexes/" + i.uid + "/similar",
method: http.MethodPost,
withRequest: param,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusOK},
functionName: "SearchSimilarDocuments",
contentType: contentTypeJSON,
}
if err := i.client.executeRequest(ctx, req); err != nil {
return err
}
return nil
}

2267
index_search_test.go Normal file

File diff suppressed because it is too large Load diff

1288
index_settings.go Normal file

File diff suppressed because it is too large Load diff

4212
index_settings_test.go Normal file

File diff suppressed because it is too large Load diff

79
index_task.go Normal file
View file

@ -0,0 +1,79 @@
package meilisearch
import (
"context"
"net/http"
"strconv"
"strings"
"time"
)
func (i *index) GetTask(taskUID int64) (*Task, error) {
return i.GetTaskWithContext(context.Background(), taskUID)
}
func (i *index) GetTaskWithContext(ctx context.Context, taskUID int64) (*Task, error) {
return getTask(ctx, i.client, taskUID)
}
func (i *index) GetTasks(param *TasksQuery) (*TaskResult, error) {
return i.GetTasksWithContext(context.Background(), param)
}
func (i *index) GetTasksWithContext(ctx context.Context, param *TasksQuery) (*TaskResult, error) {
resp := new(TaskResult)
req := &internalRequest{
endpoint: "/tasks",
method: http.MethodGet,
withRequest: nil,
withResponse: resp,
withQueryParams: map[string]string{},
acceptedStatusCodes: []int{http.StatusOK},
functionName: "GetTasks",
}
if param != nil {
if param.Limit != 0 {
req.withQueryParams["limit"] = strconv.FormatInt(param.Limit, 10)
}
if param.From != 0 {
req.withQueryParams["from"] = strconv.FormatInt(param.From, 10)
}
if len(param.Statuses) != 0 {
statuses := make([]string, len(param.Statuses))
for i, status := range param.Statuses {
statuses[i] = string(status)
}
req.withQueryParams["statuses"] = strings.Join(statuses, ",")
}
if len(param.Types) != 0 {
types := make([]string, len(param.Types))
for i, t := range param.Types {
types[i] = string(t)
}
req.withQueryParams["types"] = strings.Join(types, ",")
}
if len(param.IndexUIDS) != 0 {
param.IndexUIDS = append(param.IndexUIDS, i.uid)
req.withQueryParams["indexUids"] = strings.Join(param.IndexUIDS, ",")
} else {
req.withQueryParams["indexUids"] = i.uid
}
if param.Reverse {
req.withQueryParams["reverse"] = "true"
}
}
if err := i.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (i *index) WaitForTask(taskUID int64, interval time.Duration) (*Task, error) {
return waitForTask(context.Background(), i.client, taskUID, interval)
}
func (i *index) WaitForTaskWithContext(ctx context.Context, taskUID int64, interval time.Duration) (*Task, error) {
return waitForTask(ctx, i.client, taskUID, interval)
}

281
index_task_test.go Normal file
View file

@ -0,0 +1,281 @@
package meilisearch
import (
"context"
"crypto/tls"
"github.com/stretchr/testify/require"
"testing"
"time"
)
func TestIndex_GetTask(t *testing.T) {
sv := setup(t, "")
customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{
InsecureSkipVerify: true,
}))
type args struct {
UID string
client ServiceManager
taskUID int64
document []docTest
}
tests := []struct {
name string
args args
}{
{
name: "TestIndexBasicGetTask",
args: args{
UID: "TestIndexBasicGetTask",
client: sv,
taskUID: 0,
document: []docTest{
{ID: "123", Name: "Pride and Prejudice"},
},
},
},
{
name: "TestIndexGetTaskWithCustomClient",
args: args{
UID: "TestIndexGetTaskWithCustomClient",
client: customSv,
taskUID: 0,
document: []docTest{
{ID: "123", Name: "Pride and Prejudice"},
},
},
},
{
name: "TestIndexGetTask",
args: args{
UID: "TestIndexGetTask",
client: sv,
taskUID: 0,
document: []docTest{
{ID: "456", Name: "Le Petit Prince"},
{ID: "1", Name: "Alice In Wonderland"},
},
},
},
}
t.Cleanup(cleanup(sv, customSv))
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c := tt.args.client
i := c.Index(tt.args.UID)
t.Cleanup(cleanup(c))
task, err := i.AddDocuments(tt.args.document)
require.NoError(t, err)
_, err = c.WaitForTask(task.TaskUID, 0)
require.NoError(t, err)
gotResp, err := i.GetTask(task.TaskUID)
require.NoError(t, err)
require.NotNil(t, gotResp)
require.GreaterOrEqual(t, gotResp.UID, tt.args.taskUID)
require.Equal(t, gotResp.IndexUID, tt.args.UID)
require.Equal(t, gotResp.Status, TaskStatusSucceeded)
// Make sure that timestamps are also retrieved
require.NotZero(t, gotResp.EnqueuedAt)
require.NotZero(t, gotResp.StartedAt)
require.NotZero(t, gotResp.FinishedAt)
})
}
}
func TestIndex_GetTasks(t *testing.T) {
sv := setup(t, "")
customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{
InsecureSkipVerify: true,
}))
type args struct {
UID string
client ServiceManager
document []docTest
query *TasksQuery
}
tests := []struct {
name string
args args
}{
{
name: "TestIndexBasicGetTasks",
args: args{
UID: "indexUID",
client: sv,
document: []docTest{
{ID: "123", Name: "Pride and Prejudice"},
},
},
},
{
name: "TestIndexGetTasksWithCustomClient",
args: args{
UID: "indexUID",
client: customSv,
document: []docTest{
{ID: "123", Name: "Pride and Prejudice"},
},
},
},
{
name: "TestIndexBasicGetTasksWithFilters",
args: args{
UID: "indexUID",
client: sv,
document: []docTest{
{ID: "123", Name: "Pride and Prejudice"},
},
query: &TasksQuery{
Statuses: []TaskStatus{TaskStatusSucceeded},
Types: []TaskType{TaskTypeDocumentAdditionOrUpdate},
},
},
},
{
name: "TestTasksWithParams",
args: args{
UID: "indexUID",
client: sv,
document: []docTest{
{ID: "123", Name: "Pride and Prejudice"},
},
query: &TasksQuery{
IndexUIDS: []string{"indexUID"},
Limit: 10,
From: 0,
Statuses: []TaskStatus{TaskStatusSucceeded},
Types: []TaskType{TaskTypeDocumentAdditionOrUpdate},
Reverse: true,
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c := tt.args.client
i := c.Index(tt.args.UID)
t.Cleanup(cleanup(c))
task, err := i.AddDocuments(tt.args.document)
require.NoError(t, err)
_, err = c.WaitForTask(task.TaskUID, 0)
require.NoError(t, err)
gotResp, err := i.GetTasks(tt.args.query)
require.NoError(t, err)
require.NotNil(t, (*gotResp).Results[0].Status)
require.NotNil(t, (*gotResp).Results[0].Type)
})
}
}
func TestIndex_WaitForTask(t *testing.T) {
sv := setup(t, "")
customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{
InsecureSkipVerify: true,
}))
type args struct {
UID string
client ServiceManager
interval time.Duration
timeout time.Duration
document []docTest
}
tests := []struct {
name string
args args
want TaskStatus
}{
{
name: "TestWaitForTask50",
args: args{
UID: "TestWaitForTask50",
client: sv,
interval: time.Millisecond * 50,
timeout: time.Second * 5,
document: []docTest{
{ID: "123", Name: "Pride and Prejudice"},
{ID: "456", Name: "Le Petit Prince"},
{ID: "1", Name: "Alice In Wonderland"},
},
},
want: "succeeded",
},
{
name: "TestWaitForTask50WithCustomClient",
args: args{
UID: "TestWaitForTask50WithCustomClient",
client: customSv,
interval: time.Millisecond * 50,
timeout: time.Second * 5,
document: []docTest{
{ID: "123", Name: "Pride and Prejudice"},
{ID: "456", Name: "Le Petit Prince"},
{ID: "1", Name: "Alice In Wonderland"},
},
},
want: "succeeded",
},
{
name: "TestWaitForTask10",
args: args{
UID: "TestWaitForTask10",
client: sv,
interval: time.Millisecond * 10,
timeout: time.Second * 5,
document: []docTest{
{ID: "123", Name: "Pride and Prejudice"},
{ID: "456", Name: "Le Petit Prince"},
{ID: "1", Name: "Alice In Wonderland"},
},
},
want: "succeeded",
},
{
name: "TestWaitForTaskWithTimeout",
args: args{
UID: "TestWaitForTaskWithTimeout",
client: sv,
interval: time.Millisecond * 50,
timeout: time.Millisecond * 10,
document: []docTest{
{ID: "123", Name: "Pride and Prejudice"},
{ID: "456", Name: "Le Petit Prince"},
{ID: "1", Name: "Alice In Wonderland"},
},
},
want: "succeeded",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c := tt.args.client
i := c.Index(tt.args.UID)
t.Cleanup(cleanup(c))
task, err := i.AddDocuments(tt.args.document)
require.NoError(t, err)
ctx, cancelFunc := context.WithTimeout(context.Background(), tt.args.timeout)
defer cancelFunc()
gotTask, err := i.WaitForTaskWithContext(ctx, task.TaskUID, 0)
if tt.args.timeout < tt.args.interval {
require.Error(t, err)
} else {
require.NoError(t, err)
require.Equal(t, tt.want, gotTask.Status)
}
})
}
}

422
index_test.go Normal file
View file

@ -0,0 +1,422 @@
package meilisearch
import (
"crypto/tls"
"github.com/stretchr/testify/require"
"testing"
)
func TestIndex_Delete(t *testing.T) {
sv := setup(t, "")
customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{
InsecureSkipVerify: true,
}))
type args struct {
createUid []string
deleteUid []string
}
tests := []struct {
name string
client ServiceManager
args args
}{
{
name: "TestIndexDeleteOneIndex",
client: sv,
args: args{
createUid: []string{"TestIndexDeleteOneIndex"},
deleteUid: []string{"TestIndexDeleteOneIndex"},
},
},
{
name: "TestIndexDeleteOneIndexWithCustomClient",
client: customSv,
args: args{
createUid: []string{"TestIndexDeleteOneIndexWithCustomClient"},
deleteUid: []string{"TestIndexDeleteOneIndexWithCustomClient"},
},
},
{
name: "TestIndexDeleteMultipleIndex",
client: sv,
args: args{
createUid: []string{
"TestIndexDeleteMultipleIndex_1",
"TestIndexDeleteMultipleIndex_2",
"TestIndexDeleteMultipleIndex_3",
"TestIndexDeleteMultipleIndex_4",
"TestIndexDeleteMultipleIndex_5",
},
deleteUid: []string{
"TestIndexDeleteMultipleIndex_1",
"TestIndexDeleteMultipleIndex_2",
"TestIndexDeleteMultipleIndex_3",
"TestIndexDeleteMultipleIndex_4",
"TestIndexDeleteMultipleIndex_5",
},
},
},
{
name: "TestIndexDeleteNotExistingIndex",
client: sv,
args: args{
createUid: []string{},
deleteUid: []string{"TestIndexDeleteNotExistingIndex"},
},
},
{
name: "TestIndexDeleteMultipleNotExistingIndex",
client: sv,
args: args{
createUid: []string{},
deleteUid: []string{
"TestIndexDeleteMultipleNotExistingIndex_1",
"TestIndexDeleteMultipleNotExistingIndex_2",
"TestIndexDeleteMultipleNotExistingIndex_3",
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c := tt.client
t.Cleanup(cleanup(c))
for _, uid := range tt.args.createUid {
_, err := setUpEmptyIndex(sv, &IndexConfig{Uid: uid})
require.NoError(t, err, "CreateIndex() in DeleteTest error should be nil")
}
for k := range tt.args.deleteUid {
i := c.Index(tt.args.deleteUid[k])
gotResp, err := i.Delete(tt.args.deleteUid[k])
require.True(t, gotResp)
require.NoError(t, err)
}
})
}
}
func TestIndex_GetStats(t *testing.T) {
sv := setup(t, "")
customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{
InsecureSkipVerify: true,
}))
type args struct {
UID string
client ServiceManager
}
tests := []struct {
name string
args args
wantResp *StatsIndex
}{
{
name: "TestIndexBasicGetStats",
args: args{
UID: "TestIndexBasicGetStats",
client: sv,
},
wantResp: &StatsIndex{
NumberOfDocuments: 6,
IsIndexing: false,
FieldDistribution: map[string]int64{"book_id": 6, "title": 6},
RawDocumentDbSize: 4096,
AvgDocumentSize: 674,
},
},
{
name: "TestIndexGetStatsWithCustomClient",
args: args{
UID: "TestIndexGetStatsWithCustomClient",
client: customSv,
},
wantResp: &StatsIndex{
NumberOfDocuments: 6,
IsIndexing: false,
FieldDistribution: map[string]int64{"book_id": 6, "title": 6},
RawDocumentDbSize: 4096,
AvgDocumentSize: 674,
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
setUpBasicIndex(sv, tt.args.UID)
c := tt.args.client
i := c.Index(tt.args.UID)
t.Cleanup(cleanup(c))
gotResp, err := i.GetStats()
require.NoError(t, err)
require.Equal(t, tt.wantResp, gotResp)
})
}
}
func Test_newIndex(t *testing.T) {
sv := setup(t, "")
customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{
InsecureSkipVerify: true,
}))
type args struct {
client ServiceManager
uid string
}
tests := []struct {
name string
args args
want IndexManager
}{
{
name: "TestBasicNewIndex",
args: args{
client: sv,
uid: "TestBasicNewIndex",
},
want: sv.Index("TestBasicNewIndex"),
},
{
name: "TestNewIndexCustomClient",
args: args{
client: sv,
uid: "TestNewIndexCustomClient",
},
want: customSv.Index("TestNewIndexCustomClient"),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c := tt.args.client
t.Cleanup(cleanup(c))
gotIdx := c.Index(tt.args.uid)
task, err := c.CreateIndex(&IndexConfig{Uid: tt.args.uid})
require.NoError(t, err)
testWaitForTask(t, gotIdx, task)
gotIdxResult, err := gotIdx.FetchInfo()
require.NoError(t, err)
wantIdxResult, err := tt.want.FetchInfo()
require.NoError(t, err)
require.Equal(t, gotIdxResult.UID, wantIdxResult.UID)
// Timestamps should be empty unless fetched
require.NotZero(t, gotIdxResult.CreatedAt)
require.NotZero(t, gotIdxResult.UpdatedAt)
})
}
}
func TestIndex_FetchInfo(t *testing.T) {
sv := setup(t, "")
customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{
InsecureSkipVerify: true,
}))
broken := setup(t, "", WithAPIKey("wrong"))
type args struct {
UID string
client ServiceManager
}
tests := []struct {
name string
args args
wantResp *IndexResult
}{
{
name: "TestIndexBasicFetchInfo",
args: args{
UID: "TestIndexBasicFetchInfo",
client: sv,
},
wantResp: &IndexResult{
UID: "TestIndexBasicFetchInfo",
PrimaryKey: "book_id",
},
},
{
name: "TestIndexFetchInfoWithCustomClient",
args: args{
UID: "TestIndexFetchInfoWithCustomClient",
client: customSv,
},
wantResp: &IndexResult{
UID: "TestIndexFetchInfoWithCustomClient",
PrimaryKey: "book_id",
},
},
{
name: "TestIndexFetchInfoWithBrokenClient",
args: args{
UID: "TestIndexFetchInfoWithCustomClient",
client: broken,
},
wantResp: nil,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
setUpBasicIndex(sv, tt.args.UID)
c := tt.args.client
t.Cleanup(cleanup(c))
i := c.Index(tt.args.UID)
gotResp, err := i.FetchInfo()
if tt.wantResp == nil {
require.Error(t, err)
require.Nil(t, gotResp)
} else {
require.NoError(t, err)
require.Equal(t, tt.wantResp.UID, gotResp.UID)
require.Equal(t, tt.wantResp.PrimaryKey, gotResp.PrimaryKey)
// Make sure that timestamps are also fetched and are updated
require.NotZero(t, gotResp.CreatedAt)
require.NotZero(t, gotResp.UpdatedAt)
}
})
}
}
func TestIndex_FetchPrimaryKey(t *testing.T) {
sv := setup(t, "")
customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{
InsecureSkipVerify: true,
}))
type args struct {
UID string
client ServiceManager
}
tests := []struct {
name string
args args
wantPrimaryKey string
}{
{
name: "TestIndexBasicFetchPrimaryKey",
args: args{
UID: "TestIndexBasicFetchPrimaryKey",
client: sv,
},
wantPrimaryKey: "book_id",
},
{
name: "TestIndexFetchPrimaryKeyWithCustomClient",
args: args{
UID: "TestIndexFetchPrimaryKeyWithCustomClient",
client: customSv,
},
wantPrimaryKey: "book_id",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
setUpBasicIndex(tt.args.client, tt.args.UID)
c := tt.args.client
i := c.Index(tt.args.UID)
t.Cleanup(cleanup(c))
gotPrimaryKey, err := i.FetchPrimaryKey()
require.NoError(t, err)
require.Equal(t, &tt.wantPrimaryKey, gotPrimaryKey)
})
}
}
func TestIndex_UpdateIndex(t *testing.T) {
sv := setup(t, "")
customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{
InsecureSkipVerify: true,
}))
type args struct {
primaryKey string
config IndexConfig
client ServiceManager
}
tests := []struct {
name string
args args
wantResp *IndexResult
}{
{
name: "TestIndexBasicUpdateIndex",
args: args{
client: sv,
config: IndexConfig{
Uid: "indexUID",
},
primaryKey: "book_id",
},
wantResp: &IndexResult{
UID: "indexUID",
PrimaryKey: "book_id",
},
},
{
name: "TestIndexUpdateIndexWithCustomClient",
args: args{
client: customSv,
config: IndexConfig{
Uid: "indexUID",
},
primaryKey: "book_id",
},
wantResp: &IndexResult{
UID: "indexUID",
PrimaryKey: "book_id",
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c := tt.args.client
t.Cleanup(cleanup(c))
i, err := setUpEmptyIndex(tt.args.client, &tt.args.config)
require.NoError(t, err)
require.Equal(t, tt.args.config.Uid, i.UID)
// Store original timestamps
createdAt := i.CreatedAt
updatedAt := i.UpdatedAt
gotResp, err := i.UpdateIndex(tt.args.primaryKey)
require.NoError(t, err)
_, err = c.WaitForTask(gotResp.TaskUID, 0)
require.NoError(t, err)
require.NoError(t, err)
require.Equal(t, tt.wantResp.UID, gotResp.IndexUID)
gotIndex, err := c.GetIndex(tt.args.config.Uid)
require.NoError(t, err)
require.Equal(t, tt.wantResp.PrimaryKey, gotIndex.PrimaryKey)
// Make sure that timestamps were correctly updated as well
require.Equal(t, createdAt, gotIndex.CreatedAt)
require.NotEqual(t, updatedAt, gotIndex.UpdatedAt)
})
}
}
func TestIndexManagerAndReaders(t *testing.T) {
c := setup(t, "")
idx := c.Index("indexUID")
require.NotNil(t, idx)
require.NotNil(t, idx.GetIndexReader())
require.NotNil(t, idx.GetTaskReader())
require.NotNil(t, idx.GetSettingsManager())
require.NotNil(t, idx.GetSettingsReader())
require.NotNil(t, idx.GetSearch())
require.NotNil(t, idx.GetDocumentManager())
require.NotNil(t, idx.GetDocumentReader())
}

424
main_test.go Normal file
View file

@ -0,0 +1,424 @@
package meilisearch
import (
"bufio"
"context"
"encoding/csv"
"encoding/json"
"fmt"
"github.com/stretchr/testify/require"
"io"
"net/http"
"os"
"strings"
"testing"
)
var (
masterKey = "masterKey"
defaultRankingRules = []string{
"words", "typo", "proximity", "attribute", "sort", "exactness",
}
defaultTypoTolerance = TypoTolerance{
Enabled: true,
MinWordSizeForTypos: MinWordSizeForTypos{
OneTypo: 5,
TwoTypos: 9,
},
DisableOnWords: []string{},
DisableOnAttributes: []string{},
}
defaultPagination = Pagination{
MaxTotalHits: 1000,
}
defaultFaceting = Faceting{
MaxValuesPerFacet: 100,
SortFacetValuesBy: map[string]SortFacetType{
"*": SortFacetTypeAlpha,
},
}
)
var testNdjsonDocuments = []byte(`{"id": 1, "name": "Alice In Wonderland"}
{"id": 2, "name": "Pride and Prejudice"}
{"id": 3, "name": "Le Petit Prince"}
{"id": 4, "name": "The Great Gatsby"}
{"id": 5, "name": "Don Quixote"}
`)
var testCsvDocuments = []byte(`id,name
1,Alice In Wonderland
2,Pride and Prejudice
3,Le Petit Prince
4,The Great Gatsby
5,Don Quixote
`)
type docTest struct {
ID string `json:"id"`
Name string `json:"name"`
}
type docTestBooks struct {
BookID int `json:"book_id"`
Title string `json:"title"`
Tag string `json:"tag"`
Year int `json:"year"`
}
func setup(t *testing.T, host string, options ...Option) ServiceManager {
t.Helper()
opts := make([]Option, 0)
opts = append(opts, WithAPIKey(masterKey))
opts = append(opts, options...)
if host == "" {
host = getenv("MEILISEARCH_URL", "http://localhost:7700")
}
sv := New(host, opts...)
return sv
}
func cleanup(services ...ServiceManager) func() {
return func() {
for _, s := range services {
_, _ = deleteAllIndexes(s)
_, _ = deleteAllKeys(s)
}
}
}
func getPrivateKey(sv ServiceManager) (key string) {
list, err := sv.GetKeys(nil)
if err != nil {
return ""
}
for _, key := range list.Results {
if strings.Contains(key.Name, "Default Admin API Key") || (key.Description == "") {
return key.Key
}
}
return ""
}
func getPrivateUIDKey(sv ServiceManager) (key string) {
list, err := sv.GetKeys(nil)
if err != nil {
return ""
}
for _, key := range list.Results {
if strings.Contains(key.Name, "Default Admin API Key") || (key.Description == "") {
return key.UID
}
}
return ""
}
func deleteAllIndexes(sv ServiceManager) (ok bool, err error) {
list, err := sv.ListIndexes(nil)
if err != nil {
return false, err
}
for _, index := range list.Results {
task, _ := sv.DeleteIndex(index.UID)
_, err := sv.WaitForTask(task.TaskUID, 0)
if err != nil {
return false, err
}
}
return true, nil
}
func deleteAllKeys(sv ServiceManager) (ok bool, err error) {
list, err := sv.GetKeys(nil)
if err != nil {
return false, err
}
for _, key := range list.Results {
if strings.Contains(key.Description, "Test") || (key.Description == "") {
_, err = sv.DeleteKey(key.Key)
if err != nil {
return false, err
}
}
}
return true, nil
}
func getenv(key, fallback string) string {
value := os.Getenv(key)
if len(value) == 0 {
return fallback
}
return value
}
func testWaitForTask(t *testing.T, i IndexManager, u *TaskInfo) {
t.Helper()
r, err := i.WaitForTask(u.TaskUID, 0)
require.NoError(t, err)
require.Equal(t, TaskStatusSucceeded, r.Status, fmt.Sprintf("Task failed: %#+v", r))
}
func testWaitForBatchTask(t *testing.T, i IndexManager, u []TaskInfo) {
for _, id := range u {
_, err := i.WaitForTask(id.TaskUID, 0)
require.NoError(t, err)
}
}
func setUpEmptyIndex(sv ServiceManager, index *IndexConfig) (resp *IndexResult, err error) {
task, err := sv.CreateIndex(index)
if err != nil {
fmt.Println(err)
return nil, err
}
finalTask, _ := sv.WaitForTask(task.TaskUID, 0)
if finalTask.Status != "succeeded" {
cleanup(sv)
return setUpEmptyIndex(sv, index)
}
return sv.GetIndex(index.Uid)
}
func setUpBasicIndex(sv ServiceManager, indexUID string) {
index := sv.Index(indexUID)
documents := []map[string]interface{}{
{"book_id": 123, "title": "Pride and Prejudice"},
{"book_id": 456, "title": "Le Petit Prince"},
{"book_id": 1, "title": "Alice In Wonderland"},
{"book_id": 1344, "title": "The Hobbit"},
{"book_id": 4, "title": "Harry Potter and the Half-Blood Prince"},
{"book_id": 42, "title": "The Hitchhiker's Guide to the Galaxy"},
}
task, err := index.AddDocuments(documents)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
finalTask, _ := index.WaitForTask(task.TaskUID, 0)
if finalTask.Status != "succeeded" {
os.Exit(1)
}
}
func setupMovieIndex(t *testing.T, client ServiceManager) IndexManager {
t.Helper()
idx := client.Index("indexUID")
testdata, err := os.Open("./testdata/movies.json")
require.NoError(t, err)
defer testdata.Close()
tests := make([]map[string]interface{}, 0)
require.NoError(t, json.NewDecoder(testdata).Decode(&tests))
task, err := idx.AddDocuments(tests)
require.NoError(t, err)
testWaitForTask(t, idx, task)
task, err = idx.UpdateFilterableAttributes(&[]string{"id"})
require.NoError(t, err)
testWaitForTask(t, idx, task)
return idx
}
func setUpIndexForFaceting(client ServiceManager) {
idx := client.Index("indexUID")
booksTest := []docTestBooks{
{BookID: 123, Title: "Pride and Prejudice", Tag: "Romance", Year: 1813},
{BookID: 456, Title: "Le Petit Prince", Tag: "Tale", Year: 1943},
{BookID: 1, Title: "Alice In Wonderland", Tag: "Tale", Year: 1865},
{BookID: 1344, Title: "The Hobbit", Tag: "Epic fantasy", Year: 1937},
{BookID: 4, Title: "Harry Potter and the Half-Blood Prince", Tag: "Epic fantasy", Year: 2005},
{BookID: 42, Title: "The Hitchhiker's Guide to the Galaxy", Tag: "Epic fantasy", Year: 1978},
{BookID: 742, Title: "The Great Gatsby", Tag: "Tragedy", Year: 1925},
{BookID: 834, Title: "One Hundred Years of Solitude", Tag: "Tragedy", Year: 1967},
{BookID: 17, Title: "In Search of Lost Time", Tag: "Modernist literature", Year: 1913},
{BookID: 204, Title: "Ulysses", Tag: "Novel", Year: 1922},
{BookID: 7, Title: "Don Quixote", Tag: "Satiric", Year: 1605},
{BookID: 10, Title: "Moby Dick", Tag: "Novel", Year: 1851},
{BookID: 730, Title: "War and Peace", Tag: "Historical fiction", Year: 1865},
{BookID: 69, Title: "Hamlet", Tag: "Tragedy", Year: 1598},
{BookID: 32, Title: "The Odyssey", Tag: "Epic", Year: 1571},
{BookID: 71, Title: "Madame Bovary", Tag: "Novel", Year: 1857},
{BookID: 56, Title: "The Divine Comedy", Tag: "Epic", Year: 1303},
{BookID: 254, Title: "Lolita", Tag: "Novel", Year: 1955},
{BookID: 921, Title: "The Brothers Karamazov", Tag: "Novel", Year: 1879},
{BookID: 1032, Title: "Crime and Punishment", Tag: "Crime fiction", Year: 1866},
{BookID: 1039, Title: "The Girl in the white shirt", Tag: "white shirt", Year: 1999},
{BookID: 1050, Title: "星の王子さま", Tag: "物語", Year: 1943},
}
task, err := idx.AddDocuments(booksTest)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
finalTask, _ := idx.WaitForTask(task.TaskUID, 0)
if finalTask.Status != "succeeded" {
os.Exit(1)
}
}
func setUpIndexWithNestedFields(client ServiceManager, indexUID string) {
index := client.Index(indexUID)
documents := []map[string]interface{}{
{"id": 1, "title": "Pride and Prejudice", "info": map[string]interface{}{"comment": "A great book", "reviewNb": 50}},
{"id": 2, "title": "Le Petit Prince", "info": map[string]interface{}{"comment": "A french book", "reviewNb": 600}},
{"id": 3, "title": "Le Rouge et le Noir", "info": map[string]interface{}{"comment": "Another french book", "reviewNb": 700}},
{"id": 4, "title": "Alice In Wonderland", "comment": "A weird book", "info": map[string]interface{}{"comment": "A weird book", "reviewNb": 800}},
{"id": 5, "title": "The Hobbit", "info": map[string]interface{}{"comment": "An awesome book", "reviewNb": 900}},
{"id": 6, "title": "Harry Potter and the Half-Blood Prince", "info": map[string]interface{}{"comment": "The best book", "reviewNb": 1000}},
{"id": 7, "title": "The Hitchhiker's Guide to the Galaxy"},
}
task, err := index.AddDocuments(documents)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
finalTask, _ := index.WaitForTask(task.TaskUID, 0)
if finalTask.Status != "succeeded" {
os.Exit(1)
}
}
func setUpIndexWithVector(client *meilisearch, indexUID string) (resp *IndexResult, err error) {
req := &internalRequest{
endpoint: "/experimental-features",
method: http.MethodPatch,
contentType: "application/json",
withRequest: map[string]interface{}{
"vectorStore": true,
},
}
if err := client.client.executeRequest(context.Background(), req); err != nil {
return nil, err
}
idx := client.Index(indexUID)
taskInfo, err := idx.UpdateSettings(&Settings{
Embedders: map[string]Embedder{
"default": {
Source: "userProvided",
Dimensions: 3,
},
},
})
if err != nil {
return nil, err
}
settingsTask, err := idx.WaitForTask(taskInfo.TaskUID, 0)
if err != nil {
return nil, err
}
if settingsTask.Status != TaskStatusSucceeded {
return nil, fmt.Errorf("Update settings task failed: %#+v", settingsTask)
}
documents := []map[string]interface{}{
{"book_id": 123, "title": "Pride and Prejudice", "_vectors": map[string]interface{}{"default": []float64{0.1, 0.2, 0.3}}},
{"book_id": 456, "title": "Le Petit Prince", "_vectors": map[string]interface{}{"default": []float64{2.4, 8.5, 1.6}}},
}
taskInfo, err = idx.AddDocuments(documents)
if err != nil {
return nil, err
}
finalTask, _ := idx.WaitForTask(taskInfo.TaskUID, 0)
if finalTask.Status != TaskStatusSucceeded {
return nil, fmt.Errorf("Add documents task failed: %#+v", finalTask)
}
return client.GetIndex(indexUID)
}
func setUpDistinctIndex(client ServiceManager, indexUID string) {
idx := client.Index(indexUID)
atters := []string{"product_id", "title", "sku", "url"}
task, err := idx.UpdateFilterableAttributes(&atters)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
finalTask, _ := idx.WaitForTask(task.TaskUID, 0)
if finalTask.Status != "succeeded" {
os.Exit(1)
}
documents := []map[string]interface{}{
{"product_id": 123, "title": "white shirt", "sku": "sku1234", "url": "https://example.com/products/p123"},
{"product_id": 456, "title": "red shirt", "sku": "sku213", "url": "https://example.com/products/p456"},
{"product_id": 1, "title": "green shirt", "sku": "sku876", "url": "https://example.com/products/p1"},
{"product_id": 1344, "title": "blue shirt", "sku": "sku963", "url": "https://example.com/products/p1344"},
{"product_id": 4, "title": "yellow shirt", "sku": "sku9064", "url": "https://example.com/products/p4"},
{"product_id": 42, "title": "gray shirt", "sku": "sku964", "url": "https://example.com/products/p42"},
}
task, err = idx.AddDocuments(documents)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
finalTask, _ = idx.WaitForTask(task.TaskUID, 0)
if finalTask.Status != "succeeded" {
os.Exit(1)
}
}
func testParseCsvDocuments(t *testing.T, documents io.Reader) []map[string]interface{} {
var (
docs []map[string]interface{}
header []string
)
r := csv.NewReader(documents)
for {
record, err := r.Read()
if err == io.EOF {
break
}
require.NoError(t, err)
if header == nil {
header = record
continue
}
doc := make(map[string]interface{})
for i, key := range header {
doc[key] = record[i]
}
docs = append(docs, doc)
}
return docs
}
func testParseNdjsonDocuments(t *testing.T, documents io.Reader) []map[string]interface{} {
var docs []map[string]interface{}
scanner := bufio.NewScanner(documents)
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if line == "" {
continue
}
doc := make(map[string]interface{})
err := json.Unmarshal([]byte(line), &doc)
require.NoError(t, err)
docs = append(docs, doc)
}
require.NoError(t, scanner.Err())
return docs
}

12
makefile Normal file
View file

@ -0,0 +1,12 @@
.PHONY: test easyjson requirements
easyjson:
easyjson -all types.go
test:
docker compose run --rm package bash -c "go get && golangci-lint run -v && go test -v"
requirements:
go get github.com/mailru/easyjson && go install github.com/mailru/easyjson/...@latest
curl -fsSL https://get.docker.com -o get-docker.sh && sh get-docker.sh
go get -v -t ./...

717
meilisearch.go Normal file
View file

@ -0,0 +1,717 @@
package meilisearch
import (
"context"
"fmt"
"net/http"
"strconv"
"time"
"github.com/golang-jwt/jwt/v4"
)
type meilisearch struct {
client *client
}
// New create new service manager for operating on meilisearch
func New(host string, options ...Option) ServiceManager {
defOpt := defaultMeiliOpt
for _, opt := range options {
opt(defOpt)
}
return &meilisearch{
client: newClient(
defOpt.client,
host,
defOpt.apiKey,
clientConfig{
contentEncoding: defOpt.contentEncoding.encodingType,
encodingCompressionLevel: defOpt.contentEncoding.level,
disableRetry: defOpt.disableRetry,
retryOnStatus: defOpt.retryOnStatus,
maxRetries: defOpt.maxRetries,
},
),
}
}
// Connect create service manager and check connection with meilisearch
func Connect(host string, options ...Option) (ServiceManager, error) {
meili := New(host, options...)
if !meili.IsHealthy() {
return nil, ErrConnectingFailed
}
return meili, nil
}
func (m *meilisearch) ServiceReader() ServiceReader {
return m
}
func (m *meilisearch) TaskManager() TaskManager {
return m
}
func (m *meilisearch) TaskReader() TaskReader {
return m
}
func (m *meilisearch) KeyManager() KeyManager {
return m
}
func (m *meilisearch) KeyReader() KeyReader {
return m
}
func (m *meilisearch) Index(uid string) IndexManager {
return newIndex(m.client, uid)
}
func (m *meilisearch) GetIndex(indexID string) (*IndexResult, error) {
return m.GetIndexWithContext(context.Background(), indexID)
}
func (m *meilisearch) GetIndexWithContext(ctx context.Context, indexID string) (*IndexResult, error) {
return newIndex(m.client, indexID).FetchInfoWithContext(ctx)
}
func (m *meilisearch) GetRawIndex(uid string) (map[string]interface{}, error) {
return m.GetRawIndexWithContext(context.Background(), uid)
}
func (m *meilisearch) GetRawIndexWithContext(ctx context.Context, uid string) (map[string]interface{}, error) {
resp := map[string]interface{}{}
req := &internalRequest{
endpoint: "/indexes/" + uid,
method: http.MethodGet,
withRequest: nil,
withResponse: &resp,
acceptedStatusCodes: []int{http.StatusOK},
functionName: "GetRawIndex",
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) ListIndexes(param *IndexesQuery) (*IndexesResults, error) {
return m.ListIndexesWithContext(context.Background(), param)
}
func (m *meilisearch) ListIndexesWithContext(ctx context.Context, param *IndexesQuery) (*IndexesResults, error) {
resp := new(IndexesResults)
req := &internalRequest{
endpoint: "/indexes",
method: http.MethodGet,
withRequest: nil,
withResponse: &resp,
withQueryParams: map[string]string{},
acceptedStatusCodes: []int{http.StatusOK},
functionName: "GetIndexes",
}
if param != nil && param.Limit != 0 {
req.withQueryParams["limit"] = strconv.FormatInt(param.Limit, 10)
}
if param != nil && param.Offset != 0 {
req.withQueryParams["offset"] = strconv.FormatInt(param.Offset, 10)
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
for i := range resp.Results {
resp.Results[i].IndexManager = newIndex(m.client, resp.Results[i].UID)
}
return resp, nil
}
func (m *meilisearch) GetRawIndexes(param *IndexesQuery) (map[string]interface{}, error) {
return m.GetRawIndexesWithContext(context.Background(), param)
}
func (m *meilisearch) GetRawIndexesWithContext(ctx context.Context, param *IndexesQuery) (map[string]interface{}, error) {
resp := map[string]interface{}{}
req := &internalRequest{
endpoint: "/indexes",
method: http.MethodGet,
withRequest: nil,
withResponse: &resp,
withQueryParams: map[string]string{},
acceptedStatusCodes: []int{http.StatusOK},
functionName: "GetRawIndexes",
}
if param != nil && param.Limit != 0 {
req.withQueryParams["limit"] = strconv.FormatInt(param.Limit, 10)
}
if param != nil && param.Offset != 0 {
req.withQueryParams["offset"] = strconv.FormatInt(param.Offset, 10)
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) CreateIndex(config *IndexConfig) (*TaskInfo, error) {
return m.CreateIndexWithContext(context.Background(), config)
}
func (m *meilisearch) CreateIndexWithContext(ctx context.Context, config *IndexConfig) (*TaskInfo, error) {
request := &CreateIndexRequest{
UID: config.Uid,
PrimaryKey: config.PrimaryKey,
}
resp := new(TaskInfo)
req := &internalRequest{
endpoint: "/indexes",
method: http.MethodPost,
contentType: contentTypeJSON,
withRequest: request,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusAccepted},
functionName: "CreateIndex",
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) DeleteIndex(uid string) (*TaskInfo, error) {
return m.DeleteIndexWithContext(context.Background(), uid)
}
func (m *meilisearch) DeleteIndexWithContext(ctx context.Context, uid string) (*TaskInfo, error) {
resp := new(TaskInfo)
req := &internalRequest{
endpoint: "/indexes/" + uid,
method: http.MethodDelete,
withRequest: nil,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusAccepted},
functionName: "DeleteIndex",
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) MultiSearch(queries *MultiSearchRequest) (*MultiSearchResponse, error) {
return m.MultiSearchWithContext(context.Background(), queries)
}
func (m *meilisearch) MultiSearchWithContext(ctx context.Context, queries *MultiSearchRequest) (*MultiSearchResponse, error) {
resp := new(MultiSearchResponse)
for i := 0; i < len(queries.Queries); i++ {
queries.Queries[i].validate()
}
req := &internalRequest{
endpoint: "/multi-search",
method: http.MethodPost,
contentType: contentTypeJSON,
withRequest: queries,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusOK},
functionName: "MultiSearch",
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) CreateKey(request *Key) (*Key, error) {
return m.CreateKeyWithContext(context.Background(), request)
}
func (m *meilisearch) CreateKeyWithContext(ctx context.Context, request *Key) (*Key, error) {
parsedRequest := convertKeyToParsedKey(*request)
resp := new(Key)
req := &internalRequest{
endpoint: "/keys",
method: http.MethodPost,
contentType: contentTypeJSON,
withRequest: &parsedRequest,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusCreated},
functionName: "CreateKey",
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) GetKey(identifier string) (*Key, error) {
return m.GetKeyWithContext(context.Background(), identifier)
}
func (m *meilisearch) GetKeyWithContext(ctx context.Context, identifier string) (*Key, error) {
resp := new(Key)
req := &internalRequest{
endpoint: "/keys/" + identifier,
method: http.MethodGet,
withRequest: nil,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusOK},
functionName: "GetKey",
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) GetKeys(param *KeysQuery) (*KeysResults, error) {
return m.GetKeysWithContext(context.Background(), param)
}
func (m *meilisearch) GetKeysWithContext(ctx context.Context, param *KeysQuery) (*KeysResults, error) {
resp := new(KeysResults)
req := &internalRequest{
endpoint: "/keys",
method: http.MethodGet,
withRequest: nil,
withResponse: resp,
withQueryParams: map[string]string{},
acceptedStatusCodes: []int{http.StatusOK},
functionName: "GetKeys",
}
if param != nil && param.Limit != 0 {
req.withQueryParams["limit"] = strconv.FormatInt(param.Limit, 10)
}
if param != nil && param.Offset != 0 {
req.withQueryParams["offset"] = strconv.FormatInt(param.Offset, 10)
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) UpdateKey(keyOrUID string, request *Key) (*Key, error) {
return m.UpdateKeyWithContext(context.Background(), keyOrUID, request)
}
func (m *meilisearch) UpdateKeyWithContext(ctx context.Context, keyOrUID string, request *Key) (*Key, error) {
parsedRequest := KeyUpdate{Name: request.Name, Description: request.Description}
resp := new(Key)
req := &internalRequest{
endpoint: "/keys/" + keyOrUID,
method: http.MethodPatch,
contentType: contentTypeJSON,
withRequest: &parsedRequest,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusOK},
functionName: "UpdateKey",
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) DeleteKey(keyOrUID string) (bool, error) {
return m.DeleteKeyWithContext(context.Background(), keyOrUID)
}
func (m *meilisearch) DeleteKeyWithContext(ctx context.Context, keyOrUID string) (bool, error) {
req := &internalRequest{
endpoint: "/keys/" + keyOrUID,
method: http.MethodDelete,
withRequest: nil,
withResponse: nil,
acceptedStatusCodes: []int{http.StatusNoContent},
functionName: "DeleteKey",
}
if err := m.client.executeRequest(ctx, req); err != nil {
return false, err
}
return true, nil
}
func (m *meilisearch) GetTask(taskUID int64) (*Task, error) {
return m.GetTaskWithContext(context.Background(), taskUID)
}
func (m *meilisearch) GetTaskWithContext(ctx context.Context, taskUID int64) (*Task, error) {
return getTask(ctx, m.client, taskUID)
}
func (m *meilisearch) GetTasks(param *TasksQuery) (*TaskResult, error) {
return m.GetTasksWithContext(context.Background(), param)
}
func (m *meilisearch) GetTasksWithContext(ctx context.Context, param *TasksQuery) (*TaskResult, error) {
resp := new(TaskResult)
req := &internalRequest{
endpoint: "/tasks",
method: http.MethodGet,
withRequest: nil,
withResponse: &resp,
withQueryParams: map[string]string{},
acceptedStatusCodes: []int{http.StatusOK},
functionName: "GetTasks",
}
if param != nil {
encodeTasksQuery(param, req)
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) CancelTasks(param *CancelTasksQuery) (*TaskInfo, error) {
return m.CancelTasksWithContext(context.Background(), param)
}
func (m *meilisearch) CancelTasksWithContext(ctx context.Context, param *CancelTasksQuery) (*TaskInfo, error) {
resp := new(TaskInfo)
req := &internalRequest{
endpoint: "/tasks/cancel",
method: http.MethodPost,
withRequest: nil,
withResponse: &resp,
withQueryParams: map[string]string{},
acceptedStatusCodes: []int{http.StatusOK},
functionName: "CancelTasks",
}
if param != nil {
paramToSend := &TasksQuery{
UIDS: param.UIDS,
IndexUIDS: param.IndexUIDS,
Statuses: param.Statuses,
Types: param.Types,
BeforeEnqueuedAt: param.BeforeEnqueuedAt,
AfterEnqueuedAt: param.AfterEnqueuedAt,
BeforeStartedAt: param.BeforeStartedAt,
AfterStartedAt: param.AfterStartedAt,
}
encodeTasksQuery(paramToSend, req)
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) DeleteTasks(param *DeleteTasksQuery) (*TaskInfo, error) {
return m.DeleteTasksWithContext(context.Background(), param)
}
func (m *meilisearch) DeleteTasksWithContext(ctx context.Context, param *DeleteTasksQuery) (*TaskInfo, error) {
resp := new(TaskInfo)
req := &internalRequest{
endpoint: "/tasks",
method: http.MethodDelete,
withRequest: nil,
withResponse: &resp,
withQueryParams: map[string]string{},
acceptedStatusCodes: []int{http.StatusOK},
functionName: "DeleteTasks",
}
if param != nil {
paramToSend := &TasksQuery{
UIDS: param.UIDS,
IndexUIDS: param.IndexUIDS,
Statuses: param.Statuses,
Types: param.Types,
CanceledBy: param.CanceledBy,
BeforeEnqueuedAt: param.BeforeEnqueuedAt,
AfterEnqueuedAt: param.AfterEnqueuedAt,
BeforeStartedAt: param.BeforeStartedAt,
AfterStartedAt: param.AfterStartedAt,
BeforeFinishedAt: param.BeforeFinishedAt,
AfterFinishedAt: param.AfterFinishedAt,
}
encodeTasksQuery(paramToSend, req)
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) SwapIndexes(param []*SwapIndexesParams) (*TaskInfo, error) {
return m.SwapIndexesWithContext(context.Background(), param)
}
func (m *meilisearch) SwapIndexesWithContext(ctx context.Context, param []*SwapIndexesParams) (*TaskInfo, error) {
resp := new(TaskInfo)
req := &internalRequest{
endpoint: "/swap-indexes",
method: http.MethodPost,
contentType: contentTypeJSON,
withRequest: param,
withResponse: &resp,
acceptedStatusCodes: []int{http.StatusAccepted},
functionName: "SwapIndexes",
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) WaitForTask(taskUID int64, interval time.Duration) (*Task, error) {
return waitForTask(context.Background(), m.client, taskUID, interval)
}
func (m *meilisearch) WaitForTaskWithContext(ctx context.Context, taskUID int64, interval time.Duration) (*Task, error) {
return waitForTask(ctx, m.client, taskUID, interval)
}
func (m *meilisearch) GenerateTenantToken(
apiKeyUID string,
searchRules map[string]interface{},
options *TenantTokenOptions,
) (string, error) {
// validate the arguments
if searchRules == nil {
return "", fmt.Errorf("GenerateTenantToken: The search rules added in the token generation " +
"must be of type array or object")
}
if (options == nil || options.APIKey == "") && m.client.apiKey == "" {
return "", fmt.Errorf("GenerateTenantToken: The API key used for the token " +
"generation must exist and be a valid meilisearch key")
}
if apiKeyUID == "" || !IsValidUUID(apiKeyUID) {
return "", fmt.Errorf("GenerateTenantToken: The uid used for the token " +
"generation must exist and comply to uuid4 format")
}
if options != nil && !options.ExpiresAt.IsZero() && options.ExpiresAt.Before(time.Now()) {
return "", fmt.Errorf("GenerateTenantToken: When the expiresAt field in " +
"the token generation has a value, it must be a date set in the future")
}
var secret string
if options == nil || options.APIKey == "" {
secret = m.client.apiKey
} else {
secret = options.APIKey
}
// For HMAC signing method, the key should be any []byte
hmacSampleSecret := []byte(secret)
// Create the claims
claims := TenantTokenClaims{}
if options != nil && !options.ExpiresAt.IsZero() {
claims.RegisteredClaims = jwt.RegisteredClaims{
ExpiresAt: jwt.NewNumericDate(options.ExpiresAt),
}
}
claims.APIKeyUID = apiKeyUID
claims.SearchRules = searchRules
// Create a new token object, specifying signing method and the claims
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
// Sign and get the complete encoded token as a string using the secret
tokenString, err := token.SignedString(hmacSampleSecret)
return tokenString, err
}
func (m *meilisearch) GetStats() (*Stats, error) {
return m.GetStatsWithContext(context.Background())
}
func (m *meilisearch) GetStatsWithContext(ctx context.Context) (*Stats, error) {
resp := new(Stats)
req := &internalRequest{
endpoint: "/stats",
method: http.MethodGet,
withRequest: nil,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusOK},
functionName: "GetStats",
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) CreateDump() (*TaskInfo, error) {
return m.CreateDumpWithContext(context.Background())
}
func (m *meilisearch) CreateDumpWithContext(ctx context.Context) (*TaskInfo, error) {
resp := new(TaskInfo)
req := &internalRequest{
endpoint: "/dumps",
method: http.MethodPost,
contentType: contentTypeJSON,
withRequest: nil,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusAccepted},
functionName: "CreateDump",
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) Version() (*Version, error) {
return m.VersionWithContext(context.Background())
}
func (m *meilisearch) VersionWithContext(ctx context.Context) (*Version, error) {
resp := new(Version)
req := &internalRequest{
endpoint: "/version",
method: http.MethodGet,
withRequest: nil,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusOK},
functionName: "Version",
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) Health() (*Health, error) {
return m.HealthWithContext(context.Background())
}
func (m *meilisearch) HealthWithContext(ctx context.Context) (*Health, error) {
resp := new(Health)
req := &internalRequest{
endpoint: "/health",
method: http.MethodGet,
withRequest: nil,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusOK},
functionName: "Health",
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) CreateSnapshot() (*TaskInfo, error) {
return m.CreateSnapshotWithContext(context.Background())
}
func (m *meilisearch) CreateSnapshotWithContext(ctx context.Context) (*TaskInfo, error) {
resp := new(TaskInfo)
req := &internalRequest{
endpoint: "/snapshots",
method: http.MethodPost,
withRequest: nil,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusAccepted},
contentType: contentTypeJSON,
functionName: "CreateSnapshot",
}
if err := m.client.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func (m *meilisearch) IsHealthy() bool {
res, err := m.HealthWithContext(context.Background())
return err == nil && res.Status == "available"
}
func (m *meilisearch) Close() {
m.client.client.CloseIdleConnections()
}
func getTask(ctx context.Context, cli *client, taskUID int64) (*Task, error) {
resp := new(Task)
req := &internalRequest{
endpoint: "/tasks/" + strconv.FormatInt(taskUID, 10),
method: http.MethodGet,
withRequest: nil,
withResponse: resp,
acceptedStatusCodes: []int{http.StatusOK},
functionName: "GetTask",
}
if err := cli.executeRequest(ctx, req); err != nil {
return nil, err
}
return resp, nil
}
func waitForTask(ctx context.Context, cli *client, taskUID int64, interval time.Duration) (*Task, error) {
if interval == 0 {
interval = 50 * time.Millisecond
}
// extract closure to get the task and check the status first before the ticker
fn := func() (*Task, error) {
getTask, err := getTask(ctx, cli, taskUID)
if err != nil {
return nil, err
}
if getTask.Status != TaskStatusEnqueued && getTask.Status != TaskStatusProcessing {
return getTask, nil
}
return nil, nil
}
// run first before the ticker, we do not want to wait for the first interval
task, err := fn()
if err != nil {
// Return error if it exists
return nil, err
}
// Return task if it exists
if task != nil {
return task, nil
}
// Create a ticker to check the task status, because our initial check was not successful
ticker := time.NewTicker(interval)
// Defer the stop of the ticker, help GC to cleanup
defer func() {
// we might want to revist this, go.mod now is 1.16
// however I still encouter the issue on go 1.22.2
// there are 2 issues regarding tickers
// https://go-review.googlesource.com/c/go/+/512355
// https://github.com/golang/go/issues/61542
ticker.Stop()
ticker = nil
}()
for {
select {
case <-ctx.Done():
return nil, ctx.Err()
case <-ticker.C:
task, err := fn()
if err != nil {
return nil, err
}
if task != nil {
return task, nil
}
}
}
}

187
meilisearch_interface.go Normal file
View file

@ -0,0 +1,187 @@
package meilisearch
import (
"context"
"time"
)
type ServiceManager interface {
ServiceReader
KeyManager
TaskManager
ServiceReader() ServiceReader
TaskManager() TaskManager
TaskReader() TaskReader
KeyManager() KeyManager
KeyReader() KeyReader
// CreateIndex creates a new index.
CreateIndex(config *IndexConfig) (*TaskInfo, error)
// CreateIndexWithContext creates a new index with a context for cancellation.
CreateIndexWithContext(ctx context.Context, config *IndexConfig) (*TaskInfo, error)
// DeleteIndex deletes a specific index.
DeleteIndex(uid string) (*TaskInfo, error)
// DeleteIndexWithContext deletes a specific index with a context for cancellation.
DeleteIndexWithContext(ctx context.Context, uid string) (*TaskInfo, error)
// SwapIndexes swaps the positions of two indexes.
SwapIndexes(param []*SwapIndexesParams) (*TaskInfo, error)
// SwapIndexesWithContext swaps the positions of two indexes with a context for cancellation.
SwapIndexesWithContext(ctx context.Context, param []*SwapIndexesParams) (*TaskInfo, error)
// GenerateTenantToken generates a tenant token for multi-tenancy.
GenerateTenantToken(apiKeyUID string, searchRules map[string]interface{}, options *TenantTokenOptions) (string, error)
// CreateDump creates a database dump.
CreateDump() (*TaskInfo, error)
// CreateDumpWithContext creates a database dump with a context for cancellation.
CreateDumpWithContext(ctx context.Context) (*TaskInfo, error)
// CreateSnapshot create database snapshot from meilisearch
CreateSnapshot() (*TaskInfo, error)
// CreateSnapshotWithContext create database snapshot from meilisearch and support parent context
CreateSnapshotWithContext(ctx context.Context) (*TaskInfo, error)
// ExperimentalFeatures returns the experimental features manager.
ExperimentalFeatures() *ExperimentalFeatures
// Close closes the connection to the Meilisearch server.
Close()
}
type ServiceReader interface {
// Index retrieves an IndexManager for a specific index.
Index(uid string) IndexManager
// GetIndex fetches the details of a specific index.
GetIndex(indexID string) (*IndexResult, error)
// GetIndexWithContext fetches the details of a specific index with a context for cancellation.
GetIndexWithContext(ctx context.Context, indexID string) (*IndexResult, error)
// GetRawIndex fetches the raw JSON representation of a specific index.
GetRawIndex(uid string) (map[string]interface{}, error)
// GetRawIndexWithContext fetches the raw JSON representation of a specific index with a context for cancellation.
GetRawIndexWithContext(ctx context.Context, uid string) (map[string]interface{}, error)
// ListIndexes lists all indexes.
ListIndexes(param *IndexesQuery) (*IndexesResults, error)
// ListIndexesWithContext lists all indexes with a context for cancellation.
ListIndexesWithContext(ctx context.Context, param *IndexesQuery) (*IndexesResults, error)
// GetRawIndexes fetches the raw JSON representation of all indexes.
GetRawIndexes(param *IndexesQuery) (map[string]interface{}, error)
// GetRawIndexesWithContext fetches the raw JSON representation of all indexes with a context for cancellation.
GetRawIndexesWithContext(ctx context.Context, param *IndexesQuery) (map[string]interface{}, error)
// MultiSearch performs a multi-index search.
MultiSearch(queries *MultiSearchRequest) (*MultiSearchResponse, error)
// MultiSearchWithContext performs a multi-index search with a context for cancellation.
MultiSearchWithContext(ctx context.Context, queries *MultiSearchRequest) (*MultiSearchResponse, error)
// GetStats fetches global stats.
GetStats() (*Stats, error)
// GetStatsWithContext fetches global stats with a context for cancellation.
GetStatsWithContext(ctx context.Context) (*Stats, error)
// Version fetches the version of the Meilisearch server.
Version() (*Version, error)
// VersionWithContext fetches the version of the Meilisearch server with a context for cancellation.
VersionWithContext(ctx context.Context) (*Version, error)
// Health checks the health of the Meilisearch server.
Health() (*Health, error)
// HealthWithContext checks the health of the Meilisearch server with a context for cancellation.
HealthWithContext(ctx context.Context) (*Health, error)
// IsHealthy checks if the Meilisearch server is healthy.
IsHealthy() bool
}
type KeyManager interface {
KeyReader
// CreateKey creates a new API key.
CreateKey(request *Key) (*Key, error)
// CreateKeyWithContext creates a new API key with a context for cancellation.
CreateKeyWithContext(ctx context.Context, request *Key) (*Key, error)
// UpdateKey updates a specific API key.
UpdateKey(keyOrUID string, request *Key) (*Key, error)
// UpdateKeyWithContext updates a specific API key with a context for cancellation.
UpdateKeyWithContext(ctx context.Context, keyOrUID string, request *Key) (*Key, error)
// DeleteKey deletes a specific API key.
DeleteKey(keyOrUID string) (bool, error)
// DeleteKeyWithContext deletes a specific API key with a context for cancellation.
DeleteKeyWithContext(ctx context.Context, keyOrUID string) (bool, error)
}
type KeyReader interface {
// GetKey fetches the details of a specific API key.
GetKey(identifier string) (*Key, error)
// GetKeyWithContext fetches the details of a specific API key with a context for cancellation.
GetKeyWithContext(ctx context.Context, identifier string) (*Key, error)
// GetKeys lists all API keys.
GetKeys(param *KeysQuery) (*KeysResults, error)
// GetKeysWithContext lists all API keys with a context for cancellation.
GetKeysWithContext(ctx context.Context, param *KeysQuery) (*KeysResults, error)
}
type TaskManager interface {
TaskReader
// CancelTasks cancels specific tasks.
CancelTasks(param *CancelTasksQuery) (*TaskInfo, error)
// CancelTasksWithContext cancels specific tasks with a context for cancellation.
CancelTasksWithContext(ctx context.Context, param *CancelTasksQuery) (*TaskInfo, error)
// DeleteTasks deletes specific tasks.
DeleteTasks(param *DeleteTasksQuery) (*TaskInfo, error)
// DeleteTasksWithContext deletes specific tasks with a context for cancellation.
DeleteTasksWithContext(ctx context.Context, param *DeleteTasksQuery) (*TaskInfo, error)
}
type TaskReader interface {
// GetTask retrieves a task by its UID.
GetTask(taskUID int64) (*Task, error)
// GetTaskWithContext retrieves a task by its UID using the provided context for cancellation.
GetTaskWithContext(ctx context.Context, taskUID int64) (*Task, error)
// GetTasks retrieves multiple tasks based on query parameters.
GetTasks(param *TasksQuery) (*TaskResult, error)
// GetTasksWithContext retrieves multiple tasks based on query parameters using the provided context for cancellation.
GetTasksWithContext(ctx context.Context, param *TasksQuery) (*TaskResult, error)
// WaitForTask waits for a task to complete by its UID with the given interval.
WaitForTask(taskUID int64, interval time.Duration) (*Task, error)
// WaitForTaskWithContext waits for a task to complete by its UID with the given interval using the provided context for cancellation.
WaitForTaskWithContext(ctx context.Context, taskUID int64, interval time.Duration) (*Task, error)
}

2344
meilisearch_test.go Normal file

File diff suppressed because it is too large Load diff

119
options.go Normal file
View file

@ -0,0 +1,119 @@
package meilisearch
import (
"crypto/tls"
"net"
"net/http"
"time"
)
var (
defaultMeiliOpt = &meiliOpt{
client: &http.Client{
Transport: baseTransport(),
},
contentEncoding: &encodingOpt{
level: DefaultCompression,
},
retryOnStatus: map[int]bool{
502: true,
503: true,
504: true,
},
disableRetry: false,
maxRetries: 3,
}
)
type meiliOpt struct {
client *http.Client
apiKey string
contentEncoding *encodingOpt
retryOnStatus map[int]bool
disableRetry bool
maxRetries uint8
}
type encodingOpt struct {
encodingType ContentEncoding
level EncodingCompressionLevel
}
type Option func(*meiliOpt)
// WithCustomClient set custom http.Client
func WithCustomClient(client *http.Client) Option {
return func(opt *meiliOpt) {
opt.client = client
}
}
// WithCustomClientWithTLS client support tls configuration
func WithCustomClientWithTLS(tlsConfig *tls.Config) Option {
return func(opt *meiliOpt) {
trans := baseTransport()
trans.TLSClientConfig = tlsConfig
opt.client = &http.Client{Transport: trans}
}
}
// WithAPIKey is API key or master key.
//
// more: https://www.meilisearch.com/docs/reference/api/keys
func WithAPIKey(key string) Option {
return func(opt *meiliOpt) {
opt.apiKey = key
}
}
// WithContentEncoding support the Content-Encoding header indicates the media type is compressed by a given algorithm.
// compression improves transfer speed and reduces bandwidth consumption by sending and receiving smaller payloads.
// the Accept-Encoding header, instead, indicates the compression algorithm the client understands.
//
// more: https://www.meilisearch.com/docs/reference/api/overview#content-encoding
func WithContentEncoding(encodingType ContentEncoding, level EncodingCompressionLevel) Option {
return func(opt *meiliOpt) {
opt.contentEncoding = &encodingOpt{
encodingType: encodingType,
level: level,
}
}
}
// WithCustomRetries set retry on specific http error code and max retries (min: 1, max: 255)
func WithCustomRetries(retryOnStatus []int, maxRetries uint8) Option {
return func(opt *meiliOpt) {
opt.retryOnStatus = make(map[int]bool)
for _, status := range retryOnStatus {
opt.retryOnStatus[status] = true
}
if maxRetries == 0 {
maxRetries = 1
}
opt.maxRetries = maxRetries
}
}
// DisableRetries disable retry logic in client
func DisableRetries() Option {
return func(opt *meiliOpt) {
opt.disableRetry = true
}
}
func baseTransport() *http.Transport {
return &http.Transport{
Proxy: http.ProxyFromEnvironment,
DialContext: (&net.Dialer{
Timeout: 30 * time.Second,
KeepAlive: 30 * time.Second,
}).DialContext,
MaxIdleConns: 100,
MaxIdleConnsPerHost: 100,
IdleConnTimeout: 90 * time.Second,
TLSHandshakeTimeout: 10 * time.Second,
ExpectContinueTimeout: 1 * time.Second,
}
}

80
options_test.go Normal file
View file

@ -0,0 +1,80 @@
package meilisearch
import (
"crypto/tls"
"github.com/stretchr/testify/require"
"net/http"
"testing"
)
func TestOptions_WithCustomClient(t *testing.T) {
meili := setup(t, "", WithCustomClient(http.DefaultClient))
require.NotNil(t, meili)
m, ok := meili.(*meilisearch)
require.True(t, ok)
require.Equal(t, m.client.client, http.DefaultClient)
}
func TestOptions_WithCustomClientWithTLS(t *testing.T) {
tl := new(tls.Config)
meili := setup(t, "", WithCustomClientWithTLS(tl))
require.NotNil(t, meili)
m, ok := meili.(*meilisearch)
require.True(t, ok)
tr, ok := m.client.client.Transport.(*http.Transport)
require.True(t, ok)
require.Equal(t, tr.TLSClientConfig, tl)
}
func TestOptions_WithAPIKey(t *testing.T) {
meili := setup(t, "", WithAPIKey("foobar"))
require.NotNil(t, meili)
m, ok := meili.(*meilisearch)
require.True(t, ok)
require.Equal(t, m.client.apiKey, "foobar")
}
func TestOptions_WithContentEncoding(t *testing.T) {
meili := setup(t, "", WithContentEncoding(GzipEncoding, DefaultCompression))
require.NotNil(t, meili)
m, ok := meili.(*meilisearch)
require.True(t, ok)
require.Equal(t, m.client.contentEncoding, GzipEncoding)
require.NotNil(t, m.client.encoder)
}
func TestOptions_WithCustomRetries(t *testing.T) {
meili := setup(t, "", WithCustomRetries([]int{http.StatusInternalServerError}, 10))
require.NotNil(t, meili)
m, ok := meili.(*meilisearch)
require.True(t, ok)
require.True(t, m.client.retryOnStatus[http.StatusInternalServerError])
require.Equal(t, m.client.maxRetries, uint8(10))
meili = setup(t, "", WithCustomRetries([]int{http.StatusInternalServerError}, 0))
require.NotNil(t, meili)
m, ok = meili.(*meilisearch)
require.True(t, ok)
require.True(t, m.client.retryOnStatus[http.StatusInternalServerError])
require.Equal(t, m.client.maxRetries, uint8(1))
}
func TestOptions_DisableRetries(t *testing.T) {
meili := setup(t, "", DisableRetries())
require.NotNil(t, meili)
m, ok := meili.(*meilisearch)
require.True(t, ok)
require.Equal(t, m.client.disableRetry, true)
}

1619
testdata/movies.json vendored Normal file

File diff suppressed because it is too large Load diff

628
types.go Normal file
View file

@ -0,0 +1,628 @@
package meilisearch
import (
"time"
"github.com/golang-jwt/jwt/v4"
)
type (
ContentEncoding string
EncodingCompressionLevel int
)
const (
DefaultLimit int64 = 20
contentTypeJSON string = "application/json"
contentTypeNDJSON string = "application/x-ndjson"
contentTypeCSV string = "text/csv"
GzipEncoding ContentEncoding = "gzip"
DeflateEncoding ContentEncoding = "deflate"
BrotliEncoding ContentEncoding = "br"
NoCompression EncodingCompressionLevel = 0
BestSpeed EncodingCompressionLevel = 1
BestCompression EncodingCompressionLevel = 9
DefaultCompression EncodingCompressionLevel = -1
HuffmanOnlyCompression EncodingCompressionLevel = -2
ConstantCompression EncodingCompressionLevel = -2
StatelessCompression EncodingCompressionLevel = -3
nullBody = "null"
)
func (c ContentEncoding) String() string { return string(c) }
func (c ContentEncoding) IsZero() bool { return c == "" }
func (c EncodingCompressionLevel) Int() int { return int(c) }
type IndexConfig struct {
// Uid is the unique identifier of a given index.
Uid string
// PrimaryKey is optional
PrimaryKey string
}
type IndexResult struct {
UID string `json:"uid"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
PrimaryKey string `json:"primaryKey,omitempty"`
IndexManager
}
// IndexesResults return of multiple indexes is wrap in a IndexesResults
type IndexesResults struct {
Results []*IndexResult `json:"results"`
Offset int64 `json:"offset"`
Limit int64 `json:"limit"`
Total int64 `json:"total"`
}
type IndexesQuery struct {
Limit int64
Offset int64
}
// Settings is the type that represents the settings in meilisearch
type Settings struct {
RankingRules []string `json:"rankingRules,omitempty"`
DistinctAttribute *string `json:"distinctAttribute,omitempty"`
SearchableAttributes []string `json:"searchableAttributes,omitempty"`
Dictionary []string `json:"dictionary,omitempty"`
SearchCutoffMs int64 `json:"searchCutoffMs,omitempty"`
ProximityPrecision ProximityPrecisionType `json:"proximityPrecision,omitempty"`
SeparatorTokens []string `json:"separatorTokens,omitempty"`
NonSeparatorTokens []string `json:"nonSeparatorTokens,omitempty"`
DisplayedAttributes []string `json:"displayedAttributes,omitempty"`
StopWords []string `json:"stopWords,omitempty"`
Synonyms map[string][]string `json:"synonyms,omitempty"`
FilterableAttributes []string `json:"filterableAttributes,omitempty"`
SortableAttributes []string `json:"sortableAttributes,omitempty"`
LocalizedAttributes []*LocalizedAttributes `json:"localizedAttributes,omitempty"`
TypoTolerance *TypoTolerance `json:"typoTolerance,omitempty"`
Pagination *Pagination `json:"pagination,omitempty"`
Faceting *Faceting `json:"faceting,omitempty"`
Embedders map[string]Embedder `json:"embedders,omitempty"`
PrefixSearch *string `json:"prefixSearch,omitempty"`
FacetSearch bool `json:"facetSearch,omitempty"`
}
type LocalizedAttributes struct {
Locales []string `json:"locales,omitempty"`
AttributePatterns []string `json:"attributePatterns,omitempty"`
}
// TypoTolerance is the type that represents the typo tolerance setting in meilisearch
type TypoTolerance struct {
Enabled bool `json:"enabled"`
MinWordSizeForTypos MinWordSizeForTypos `json:"minWordSizeForTypos,omitempty"`
DisableOnWords []string `json:"disableOnWords,omitempty"`
DisableOnAttributes []string `json:"disableOnAttributes,omitempty"`
}
// MinWordSizeForTypos is the type that represents the minWordSizeForTypos setting in the typo tolerance setting in meilisearch
type MinWordSizeForTypos struct {
OneTypo int64 `json:"oneTypo,omitempty"`
TwoTypos int64 `json:"twoTypos,omitempty"`
}
// Pagination is the type that represents the pagination setting in meilisearch
type Pagination struct {
MaxTotalHits int64 `json:"maxTotalHits"`
}
// Faceting is the type that represents the faceting setting in meilisearch
type Faceting struct {
MaxValuesPerFacet int64 `json:"maxValuesPerFacet"`
// SortFacetValuesBy index_name: alpha|count
SortFacetValuesBy map[string]SortFacetType `json:"sortFacetValuesBy"`
}
// Embedder represents a unified configuration for various embedder types.
type Embedder struct {
Source string `json:"source"` // The type of embedder: "openAi", "huggingFace", "userProvided", "rest", "ollama"
Model string `json:"model,omitempty"` // Optional for "openAi", "huggingFace", "ollama"
APIKey string `json:"apiKey,omitempty"` // Optional for "openAi", "rest", "ollama"
DocumentTemplate string `json:"documentTemplate,omitempty"` // Optional for most embedders
Dimensions int `json:"dimensions,omitempty"` // Optional for "openAi", "rest", "userProvided", "ollama"
Distribution *Distribution `json:"distribution,omitempty"` // Optional for all embedders
URL string `json:"url,omitempty"` // Optional for "openAi", "rest", "ollama"
Revision string `json:"revision,omitempty"` // Optional for "huggingFace"
Request map[string]interface{} `json:"request,omitempty"` // Optional for "rest"
Response map[string]interface{} `json:"response,omitempty"` // Optional for "rest"
Headers map[string]string `json:"headers,omitempty"` // Optional for "rest"
}
// Distribution represents a statistical distribution with mean and standard deviation (sigma).
type Distribution struct {
Mean float64 `json:"mean"` // Mean of the distribution
Sigma float64 `json:"sigma"` // Sigma (standard deviation) of the distribution
}
// Version is the type that represents the versions in meilisearch
type Version struct {
CommitSha string `json:"commitSha"`
CommitDate string `json:"commitDate"`
PkgVersion string `json:"pkgVersion"`
}
// StatsIndex is the type that represent the stats of an index in meilisearch
type StatsIndex struct {
NumberOfDocuments int64 `json:"numberOfDocuments"`
IsIndexing bool `json:"isIndexing"`
FieldDistribution map[string]int64 `json:"fieldDistribution"`
RawDocumentDbSize int64 `json:"rawDocumentDbSize"`
AvgDocumentSize int64 `json:"avgDocumentSize"`
NumberOfEmbeddedDocuments int64 `json:"numberOfEmbeddedDocuments"`
NumberOfEmbeddings int64 `json:"numberOfEmbeddings"`
}
// Stats is the type that represent all stats
type Stats struct {
DatabaseSize int64 `json:"databaseSize"`
UsedDatabaseSize int64 `json:"usedDatabaseSize"`
LastUpdate time.Time `json:"lastUpdate"`
Indexes map[string]StatsIndex `json:"indexes"`
}
type (
TaskType string // TaskType is the type of a task
SortFacetType string // SortFacetType is type of facet sorting, alpha or count
TaskStatus string // TaskStatus is the status of a task.
ProximityPrecisionType string // ProximityPrecisionType accepts one of the ByWord or ByAttribute
MatchingStrategy string // MatchingStrategy one of the Last, All, Frequency
)
const (
// Last returns documents containing all the query terms first. If there are not enough results containing all
// query terms to meet the requested limit, Meilisearch will remove one query term at a time,
// starting from the end of the query.
Last MatchingStrategy = "last"
// All only returns documents that contain all query terms. Meilisearch will not match any more documents even
// if there aren't enough to meet the requested limit.
All MatchingStrategy = "all"
// Frequency returns documents containing all the query terms first. If there are not enough results containing
//all query terms to meet the requested limit, Meilisearch will remove one query term at a time, starting
//with the word that is the most frequent in the dataset. frequency effectively gives more weight to terms
//that appear less frequently in a set of results.
Frequency MatchingStrategy = "frequency"
)
const (
// ByWord calculate the precise distance between query terms. Higher precision, but may lead to longer
// indexing time. This is the default setting
ByWord ProximityPrecisionType = "byWord"
// ByAttribute determine if multiple query terms are present in the same attribute.
// Lower precision, but shorter indexing time
ByAttribute ProximityPrecisionType = "byAttribute"
)
const (
// TaskStatusUnknown is the default TaskStatus, should not exist
TaskStatusUnknown TaskStatus = "unknown"
// TaskStatusEnqueued the task request has been received and will be processed soon
TaskStatusEnqueued TaskStatus = "enqueued"
// TaskStatusProcessing the task is being processed
TaskStatusProcessing TaskStatus = "processing"
// TaskStatusSucceeded the task has been successfully processed
TaskStatusSucceeded TaskStatus = "succeeded"
// TaskStatusFailed a failure occurred when processing the task, no changes were made to the database
TaskStatusFailed TaskStatus = "failed"
// TaskStatusCanceled the task was canceled
TaskStatusCanceled TaskStatus = "canceled"
)
const (
SortFacetTypeAlpha SortFacetType = "alpha"
SortFacetTypeCount SortFacetType = "count"
)
const (
// TaskTypeIndexCreation represents an index creation
TaskTypeIndexCreation TaskType = "indexCreation"
// TaskTypeIndexUpdate represents an index update
TaskTypeIndexUpdate TaskType = "indexUpdate"
// TaskTypeIndexDeletion represents an index deletion
TaskTypeIndexDeletion TaskType = "indexDeletion"
// TaskTypeIndexSwap represents an index swap
TaskTypeIndexSwap TaskType = "indexSwap"
// TaskTypeDocumentAdditionOrUpdate represents a document addition or update in an index
TaskTypeDocumentAdditionOrUpdate TaskType = "documentAdditionOrUpdate"
// TaskTypeDocumentDeletion represents a document deletion from an index
TaskTypeDocumentDeletion TaskType = "documentDeletion"
// TaskTypeSettingsUpdate represents a settings update
TaskTypeSettingsUpdate TaskType = "settingsUpdate"
// TaskTypeDumpCreation represents a dump creation
TaskTypeDumpCreation TaskType = "dumpCreation"
// TaskTypeTaskCancelation represents a task cancelation
TaskTypeTaskCancelation TaskType = "taskCancelation"
// TaskTypeTaskDeletion represents a task deletion
TaskTypeTaskDeletion TaskType = "taskDeletion"
// TaskTypeSnapshotCreation represents a snapshot creation
TaskTypeSnapshotCreation TaskType = "snapshotCreation"
)
// Task indicates information about a task resource
//
// Documentation: https://www.meilisearch.com/docs/learn/advanced/asynchronous_operations
type Task struct {
Status TaskStatus `json:"status"`
UID int64 `json:"uid,omitempty"`
TaskUID int64 `json:"taskUid,omitempty"`
IndexUID string `json:"indexUid"`
Type TaskType `json:"type"`
Error meilisearchApiError `json:"error,omitempty"`
Duration string `json:"duration,omitempty"`
EnqueuedAt time.Time `json:"enqueuedAt"`
StartedAt time.Time `json:"startedAt,omitempty"`
FinishedAt time.Time `json:"finishedAt,omitempty"`
Details Details `json:"details,omitempty"`
CanceledBy int64 `json:"canceledBy,omitempty"`
}
// TaskInfo indicates information regarding a task returned by an asynchronous method
//
// Documentation: https://www.meilisearch.com/docs/reference/api/tasks#tasks
type TaskInfo struct {
Status TaskStatus `json:"status"`
TaskUID int64 `json:"taskUid"`
IndexUID string `json:"indexUid"`
Type TaskType `json:"type"`
EnqueuedAt time.Time `json:"enqueuedAt"`
}
// TasksQuery is a list of filter available to send as query parameters
type TasksQuery struct {
UIDS []int64
Limit int64
From int64
IndexUIDS []string
Statuses []TaskStatus
Types []TaskType
CanceledBy []int64
BeforeEnqueuedAt time.Time
AfterEnqueuedAt time.Time
BeforeStartedAt time.Time
AfterStartedAt time.Time
BeforeFinishedAt time.Time
AfterFinishedAt time.Time
Reverse bool
}
// CancelTasksQuery is a list of filter available to send as query parameters
type CancelTasksQuery struct {
UIDS []int64
IndexUIDS []string
Statuses []TaskStatus
Types []TaskType
BeforeEnqueuedAt time.Time
AfterEnqueuedAt time.Time
BeforeStartedAt time.Time
AfterStartedAt time.Time
}
// DeleteTasksQuery is a list of filter available to send as query parameters
type DeleteTasksQuery struct {
UIDS []int64
IndexUIDS []string
Statuses []TaskStatus
Types []TaskType
CanceledBy []int64
BeforeEnqueuedAt time.Time
AfterEnqueuedAt time.Time
BeforeStartedAt time.Time
AfterStartedAt time.Time
BeforeFinishedAt time.Time
AfterFinishedAt time.Time
}
type Details struct {
ReceivedDocuments int64 `json:"receivedDocuments,omitempty"`
IndexedDocuments int64 `json:"indexedDocuments,omitempty"`
DeletedDocuments int64 `json:"deletedDocuments,omitempty"`
PrimaryKey string `json:"primaryKey,omitempty"`
ProvidedIds int64 `json:"providedIds,omitempty"`
RankingRules []string `json:"rankingRules,omitempty"`
DistinctAttribute *string `json:"distinctAttribute,omitempty"`
SearchableAttributes []string `json:"searchableAttributes,omitempty"`
DisplayedAttributes []string `json:"displayedAttributes,omitempty"`
StopWords []string `json:"stopWords,omitempty"`
Synonyms map[string][]string `json:"synonyms,omitempty"`
FilterableAttributes []string `json:"filterableAttributes,omitempty"`
SortableAttributes []string `json:"sortableAttributes,omitempty"`
TypoTolerance *TypoTolerance `json:"typoTolerance,omitempty"`
Pagination *Pagination `json:"pagination,omitempty"`
Faceting *Faceting `json:"faceting,omitempty"`
MatchedTasks int64 `json:"matchedTasks,omitempty"`
CanceledTasks int64 `json:"canceledTasks,omitempty"`
DeletedTasks int64 `json:"deletedTasks,omitempty"`
OriginalFilter string `json:"originalFilter,omitempty"`
Swaps []SwapIndexesParams `json:"swaps,omitempty"`
DumpUid string `json:"dumpUid,omitempty"`
}
// TaskResult return of multiple tasks is wrap in a TaskResult
type TaskResult struct {
Results []Task `json:"results"`
Limit int64 `json:"limit"`
From int64 `json:"from"`
Next int64 `json:"next"`
Total int64 `json:"total"`
}
// Key allow the user to connect to the meilisearch instance
//
// Documentation: https://www.meilisearch.com/docs/learn/security/master_api_keys#protecting-a-meilisearch-instance
type Key struct {
Name string `json:"name"`
Description string `json:"description"`
Key string `json:"key,omitempty"`
UID string `json:"uid,omitempty"`
Actions []string `json:"actions,omitempty"`
Indexes []string `json:"indexes,omitempty"`
CreatedAt time.Time `json:"createdAt,omitempty"`
UpdatedAt time.Time `json:"updatedAt,omitempty"`
ExpiresAt time.Time `json:"expiresAt"`
}
// KeyParsed this structure is used to send the exact ISO-8601 time format managed by meilisearch
type KeyParsed struct {
Name string `json:"name"`
Description string `json:"description"`
UID string `json:"uid,omitempty"`
Actions []string `json:"actions,omitempty"`
Indexes []string `json:"indexes,omitempty"`
ExpiresAt *string `json:"expiresAt"`
}
// KeyUpdate this structure is used to update a Key
type KeyUpdate struct {
Name string `json:"name,omitempty"`
Description string `json:"description,omitempty"`
}
// KeysResults return of multiple keys is wrap in a KeysResults
type KeysResults struct {
Results []Key `json:"results"`
Offset int64 `json:"offset"`
Limit int64 `json:"limit"`
Total int64 `json:"total"`
}
type KeysQuery struct {
Limit int64
Offset int64
}
// TenantTokenOptions information to create a tenant token
//
// ExpiresAt is a time.Time when the key will expire.
// Note that if an ExpiresAt value is included it should be in UTC time.
// ApiKey is the API key parent of the token.
type TenantTokenOptions struct {
APIKey string
ExpiresAt time.Time
}
// TenantTokenClaims custom Claims structure to create a Tenant Token
type TenantTokenClaims struct {
APIKeyUID string `json:"apiKeyUid"`
SearchRules interface{} `json:"searchRules"`
jwt.RegisteredClaims
}
//
// Request/Response
//
// CreateIndexRequest is the request body for create index method
type CreateIndexRequest struct {
UID string `json:"uid,omitempty"`
PrimaryKey string `json:"primaryKey,omitempty"`
}
// SearchRequest is the request url param needed for a search query.
// This struct will be converted to url param before sent.
//
// Documentation: https://www.meilisearch.com/docs/reference/api/search#search-parameters
type SearchRequest struct {
Offset int64 `json:"offset,omitempty"`
Limit int64 `json:"limit,omitempty"`
AttributesToRetrieve []string `json:"attributesToRetrieve,omitempty"`
AttributesToSearchOn []string `json:"attributesToSearchOn,omitempty"`
AttributesToCrop []string `json:"attributesToCrop,omitempty"`
CropLength int64 `json:"cropLength,omitempty"`
CropMarker string `json:"cropMarker,omitempty"`
AttributesToHighlight []string `json:"attributesToHighlight,omitempty"`
HighlightPreTag string `json:"highlightPreTag,omitempty"`
HighlightPostTag string `json:"highlightPostTag,omitempty"`
MatchingStrategy MatchingStrategy `json:"matchingStrategy,omitempty"`
Filter interface{} `json:"filter,omitempty"`
ShowMatchesPosition bool `json:"showMatchesPosition,omitempty"`
ShowRankingScore bool `json:"showRankingScore,omitempty"`
ShowRankingScoreDetails bool `json:"showRankingScoreDetails,omitempty"`
Facets []string `json:"facets,omitempty"`
Sort []string `json:"sort,omitempty"`
Vector []float32 `json:"vector,omitempty"`
HitsPerPage int64 `json:"hitsPerPage,omitempty"`
Page int64 `json:"page,omitempty"`
IndexUID string `json:"indexUid,omitempty"`
Query string `json:"q"`
Distinct string `json:"distinct,omitempty"`
Hybrid *SearchRequestHybrid `json:"hybrid"`
RetrieveVectors bool `json:"retrieveVectors,omitempty"`
RankingScoreThreshold float64 `json:"rankingScoreThreshold,omitempty"`
FederationOptions *SearchFederationOptions `json:"federationOptions,omitempty"`
Locates []string `json:"locales,omitempty"`
}
type SearchFederationOptions struct {
Weight float64 `json:"weight"`
}
type SearchRequestHybrid struct {
SemanticRatio float64 `json:"semanticRatio,omitempty"`
Embedder string `json:"embedder"`
}
type MultiSearchRequest struct {
Federation *MultiSearchFederation `json:"federation,omitempty"`
Queries []*SearchRequest `json:"queries"`
}
type MultiSearchFederation struct {
Offset int64 `json:"offset,omitempty"`
Limit int64 `json:"limit,omitempty"`
}
// SearchResponse is the response body for search method
type SearchResponse struct {
Hits []interface{} `json:"hits"`
EstimatedTotalHits int64 `json:"estimatedTotalHits,omitempty"`
Offset int64 `json:"offset,omitempty"`
Limit int64 `json:"limit,omitempty"`
ProcessingTimeMs int64 `json:"processingTimeMs"`
Query string `json:"query"`
FacetDistribution interface{} `json:"facetDistribution,omitempty"`
TotalHits int64 `json:"totalHits,omitempty"`
HitsPerPage int64 `json:"hitsPerPage,omitempty"`
Page int64 `json:"page,omitempty"`
TotalPages int64 `json:"totalPages,omitempty"`
FacetStats interface{} `json:"facetStats,omitempty"`
IndexUID string `json:"indexUid,omitempty"`
}
type MultiSearchResponse struct {
Results []SearchResponse `json:"results,omitempty"`
Hits []interface{} `json:"hits,omitempty"`
ProcessingTimeMs int64 `json:"processingTimeMs,omitempty"`
Offset int64 `json:"offset,omitempty"`
Limit int64 `json:"limit,omitempty"`
EstimatedTotalHits int64 `json:"estimatedTotalHits,omitempty"`
SemanticHitCount int64 `json:"semanticHitCount,omitempty"`
}
type FacetSearchRequest struct {
FacetName string `json:"facetName,omitempty"`
FacetQuery string `json:"facetQuery,omitempty"`
Q string `json:"q,omitempty"`
Filter string `json:"filter,omitempty"`
MatchingStrategy string `json:"matchingStrategy,omitempty"`
AttributesToSearchOn []string `json:"attributesToSearchOn,omitempty"`
}
type FacetSearchResponse struct {
FacetHits []interface{} `json:"facetHits"`
FacetQuery string `json:"facetQuery"`
ProcessingTimeMs int64 `json:"processingTimeMs"`
}
// DocumentQuery is the request body get one documents method
type DocumentQuery struct {
Fields []string `json:"fields,omitempty"`
}
// DocumentsQuery is the request body for list documents method
type DocumentsQuery struct {
Offset int64 `json:"offset,omitempty"`
Limit int64 `json:"limit,omitempty"`
Fields []string `json:"fields,omitempty"`
Filter interface{} `json:"filter,omitempty"`
}
// SimilarDocumentQuery is query parameters of similar documents
type SimilarDocumentQuery struct {
Id interface{} `json:"id,omitempty"`
Embedder string `json:"embedder"`
AttributesToRetrieve []string `json:"attributesToRetrieve,omitempty"`
Offset int64 `json:"offset,omitempty"`
Limit int64 `json:"limit,omitempty"`
Filter string `json:"filter,omitempty"`
ShowRankingScore bool `json:"showRankingScore,omitempty"`
ShowRankingScoreDetails bool `json:"showRankingScoreDetails,omitempty"`
RankingScoreThreshold float64 `json:"rankingScoreThreshold,omitempty"`
RetrieveVectors bool `json:"retrieveVectors,omitempty"`
}
type SimilarDocumentResult struct {
Hits []interface{} `json:"hits,omitempty"`
ID string `json:"id,omitempty"`
ProcessingTimeMS int64 `json:"processingTimeMs,omitempty"`
Limit int64 `json:"limit,omitempty"`
Offset int64 `json:"offset,omitempty"`
EstimatedTotalHits int64 `json:"estimatedTotalHits,omitempty"`
}
type CsvDocumentsQuery struct {
PrimaryKey string `json:"primaryKey,omitempty"`
CsvDelimiter string `json:"csvDelimiter,omitempty"`
}
type DocumentsResult struct {
Results []map[string]interface{} `json:"results"`
Limit int64 `json:"limit"`
Offset int64 `json:"offset"`
Total int64 `json:"total"`
}
type UpdateDocumentByFunctionRequest struct {
Filter string `json:"filter,omitempty"`
Function string `json:"function"`
Context map[string]interface{} `json:"context,omitempty"`
}
// ExperimentalFeaturesResult represents the experimental features result from the API.
type ExperimentalFeaturesBase struct {
LogsRoute *bool `json:"logsRoute,omitempty"`
Metrics *bool `json:"metrics,omitempty"`
EditDocumentsByFunction *bool `json:"editDocumentsByFunction,omitempty"`
ContainsFilter *bool `json:"containsFilter,omitempty"`
}
type ExperimentalFeaturesResult struct {
LogsRoute bool `json:"logsRoute"`
Metrics bool `json:"metrics"`
EditDocumentsByFunction bool `json:"editDocumentsByFunction"`
ContainsFilter bool `json:"containsFilter"`
}
type SwapIndexesParams struct {
Indexes []string `json:"indexes"`
}
// RawType is an alias for raw byte[]
type RawType []byte
// Health is the request body for set meilisearch health
type Health struct {
Status string `json:"status"`
}
// UpdateIndexRequest is the request body for update Index primary key
type UpdateIndexRequest struct {
PrimaryKey string `json:"primaryKey"`
}
// Unknown is unknown json type
type Unknown map[string]interface{}
// UnmarshalJSON supports json.Unmarshaler interface
func (b *RawType) UnmarshalJSON(data []byte) error {
*b = data
return nil
}
// MarshalJSON supports json.Marshaler interface
func (b RawType) MarshalJSON() ([]byte, error) {
return b, nil
}
func (s *SearchRequest) validate() {
if s.Hybrid != nil && s.Hybrid.Embedder == "" {
s.Hybrid.Embedder = "default"
}
}

8346
types_easyjson.go Normal file

File diff suppressed because it is too large Load diff

42
types_test.go Normal file
View file

@ -0,0 +1,42 @@
package meilisearch
import (
"github.com/stretchr/testify/assert"
"testing"
)
func TestRawType_UnmarshalJSON(t *testing.T) {
var r RawType
data := []byte(`"example"`)
err := r.UnmarshalJSON(data)
assert.NoError(t, err)
assert.Equal(t, RawType(`"example"`), r)
data = []byte(`""`)
err = r.UnmarshalJSON(data)
assert.NoError(t, err)
assert.Equal(t, RawType(`""`), r)
data = []byte(`{invalid}`)
err = r.UnmarshalJSON(data)
assert.NoError(t, err)
assert.Equal(t, RawType(`{invalid}`), r)
}
func TestRawType_MarshalJSON(t *testing.T) {
r := RawType(`"example"`)
data, err := r.MarshalJSON()
assert.NoError(t, err)
assert.Equal(t, []byte(`"example"`), data)
r = RawType(`""`)
data, err = r.MarshalJSON()
assert.NoError(t, err)
assert.Equal(t, []byte(`""`), data)
r = RawType(`{random}`)
data, err = r.MarshalJSON()
assert.NoError(t, err)
assert.Equal(t, []byte(`{random}`), data)
}

13
version.go Normal file
View file

@ -0,0 +1,13 @@
package meilisearch
import "fmt"
const VERSION = "0.32.0"
func GetQualifiedVersion() (qualifiedVersion string) {
return getQualifiedVersion(VERSION)
}
func getQualifiedVersion(version string) (qualifiedVersion string) {
return fmt.Sprintf("Meilisearch Go (v%s)", version)
}

30
version_test.go Normal file
View file

@ -0,0 +1,30 @@
package meilisearch
import (
"fmt"
"regexp"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestVersion_GetQualifiedVersion(t *testing.T) {
version := GetQualifiedVersion()
require.NotNil(t, version)
require.Equal(t, version, fmt.Sprintf("Meilisearch Go (v%s)", VERSION))
}
func TestVersion_qualifiedVersionFormat(t *testing.T) {
version := getQualifiedVersion("2.2.5")
require.NotNil(t, version)
require.Equal(t, version, "Meilisearch Go (v2.2.5)")
}
func TestVersion_constVERSIONFormat(t *testing.T) {
match, _ := regexp.MatchString("[0-9]+.[0-9]+.[0-9]+", VERSION)
assert.True(t, match)
}