Adding upstream version 2.5.1.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
c71cb8b61d
commit
982828099e
783 changed files with 150650 additions and 0 deletions
77
analysis/token/elision/elision.go
Normal file
77
analysis/token/elision/elision.go
Normal file
|
@ -0,0 +1,77 @@
|
|||
// Copyright (c) 2014 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package elision
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/blevesearch/bleve/v2/analysis"
|
||||
"github.com/blevesearch/bleve/v2/registry"
|
||||
)
|
||||
|
||||
const Name = "elision"
|
||||
|
||||
const RightSingleQuotationMark = '’'
|
||||
const Apostrophe = '\''
|
||||
|
||||
type ElisionFilter struct {
|
||||
articles analysis.TokenMap
|
||||
}
|
||||
|
||||
func NewElisionFilter(articles analysis.TokenMap) *ElisionFilter {
|
||||
return &ElisionFilter{
|
||||
articles: articles,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *ElisionFilter) Filter(input analysis.TokenStream) analysis.TokenStream {
|
||||
for _, token := range input {
|
||||
term := token.Term
|
||||
for i := 0; i < len(term); {
|
||||
r, size := utf8.DecodeRune(term[i:])
|
||||
if r == Apostrophe || r == RightSingleQuotationMark {
|
||||
// see if the prefix matches one of the articles
|
||||
prefix := term[0:i]
|
||||
_, articleMatch := s.articles[string(prefix)]
|
||||
if articleMatch {
|
||||
token.Term = term[i+size:]
|
||||
break
|
||||
}
|
||||
}
|
||||
i += size
|
||||
}
|
||||
}
|
||||
return input
|
||||
}
|
||||
|
||||
func ElisionFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) {
|
||||
articlesTokenMapName, ok := config["articles_token_map"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("must specify articles_token_map")
|
||||
}
|
||||
articlesTokenMap, err := cache.TokenMapNamed(articlesTokenMapName)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error building elision filter: %v", err)
|
||||
}
|
||||
return NewElisionFilter(articlesTokenMap), nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
err := registry.RegisterTokenFilter(Name, ElisionFilterConstructor)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
85
analysis/token/elision/elision_test.go
Normal file
85
analysis/token/elision/elision_test.go
Normal file
|
@ -0,0 +1,85 @@
|
|||
// Copyright (c) 2014 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package elision
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/blevesearch/bleve/v2/analysis"
|
||||
"github.com/blevesearch/bleve/v2/analysis/tokenmap"
|
||||
"github.com/blevesearch/bleve/v2/registry"
|
||||
)
|
||||
|
||||
func TestElisionFilter(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
input analysis.TokenStream
|
||||
output analysis.TokenStream
|
||||
}{
|
||||
{
|
||||
input: analysis.TokenStream{
|
||||
&analysis.Token{
|
||||
Term: []byte("ar" + string(Apostrophe) + "word"),
|
||||
},
|
||||
},
|
||||
output: analysis.TokenStream{
|
||||
&analysis.Token{
|
||||
Term: []byte("word"),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
input: analysis.TokenStream{
|
||||
&analysis.Token{
|
||||
Term: []byte("ar" + string(RightSingleQuotationMark) + "word"),
|
||||
},
|
||||
},
|
||||
output: analysis.TokenStream{
|
||||
&analysis.Token{
|
||||
Term: []byte("word"),
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
cache := registry.NewCache()
|
||||
|
||||
articleListConfig := map[string]interface{}{
|
||||
"type": tokenmap.Name,
|
||||
"tokens": []interface{}{"ar"},
|
||||
}
|
||||
_, err := cache.DefineTokenMap("articles_test", articleListConfig)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
elisionConfig := map[string]interface{}{
|
||||
"type": "elision",
|
||||
"articles_token_map": "articles_test",
|
||||
}
|
||||
elisionFilter, err := cache.DefineTokenFilter("elision_test", elisionConfig)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
|
||||
actual := elisionFilter.Filter(test.input)
|
||||
if !reflect.DeepEqual(actual, test.output) {
|
||||
t.Errorf("expected %s, got %s", test.output[0].Term, actual[0].Term)
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue