1
0
Fork 0

Adding upstream version 2.5.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-05-19 00:20:02 +02:00
parent c71cb8b61d
commit 982828099e
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
783 changed files with 150650 additions and 0 deletions

View file

@ -0,0 +1,82 @@
// Copyright (c) 2014 Couchbase, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package unicodenorm
import (
"fmt"
"github.com/blevesearch/bleve/v2/analysis"
"github.com/blevesearch/bleve/v2/registry"
"golang.org/x/text/unicode/norm"
)
const Name = "normalize_unicode"
const NFC = "nfc"
const NFD = "nfd"
const NFKC = "nfkc"
const NFKD = "nfkd"
var forms = map[string]norm.Form{
NFC: norm.NFC,
NFD: norm.NFD,
NFKC: norm.NFKC,
NFKD: norm.NFKD,
}
type UnicodeNormalizeFilter struct {
form norm.Form
}
func NewUnicodeNormalizeFilter(formName string) (*UnicodeNormalizeFilter, error) {
form, ok := forms[formName]
if !ok {
return nil, fmt.Errorf("no form named %s", formName)
}
return &UnicodeNormalizeFilter{
form: form,
}, nil
}
func MustNewUnicodeNormalizeFilter(formName string) *UnicodeNormalizeFilter {
filter, err := NewUnicodeNormalizeFilter(formName)
if err != nil {
panic(err)
}
return filter
}
func (s *UnicodeNormalizeFilter) Filter(input analysis.TokenStream) analysis.TokenStream {
for _, token := range input {
token.Term = s.form.Bytes(token.Term)
}
return input
}
func UnicodeNormalizeFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) {
formVal, ok := config["form"].(string)
if !ok {
return nil, fmt.Errorf("must specify form")
}
form := formVal
return NewUnicodeNormalizeFilter(form)
}
func init() {
err := registry.RegisterTokenFilter(Name, UnicodeNormalizeFilterConstructor)
if err != nil {
panic(err)
}
}

View file

@ -0,0 +1,162 @@
// Copyright (c) 2014 Couchbase, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package unicodenorm
import (
"reflect"
"testing"
"github.com/blevesearch/bleve/v2/analysis"
)
// the following tests come from the lucene
// test cases for CJK width filter
// which is our basis for using this
// as a substitute for that
func TestUnicodeNormalization(t *testing.T) {
tests := []struct {
formName string
input analysis.TokenStream
output analysis.TokenStream
}{
{
formName: NFKD,
input: analysis.TokenStream{
&analysis.Token{
Term: []byte(""),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("Test"),
},
},
},
{
formName: NFKD,
input: analysis.TokenStream{
&analysis.Token{
Term: []byte(""),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("1234"),
},
},
},
{
formName: NFKD,
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("カタカナ"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("カタカナ"),
},
},
},
{
formName: NFKC,
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("ヴィッツ"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("ヴィッツ"),
},
},
},
{
formName: NFKC,
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("パナソニック"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("パナソニック"),
},
},
},
{
formName: NFD,
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("\u212B"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("\u0041\u030A"),
},
},
},
{
formName: NFC,
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("\u212B"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("\u00C5"),
},
},
},
{
formName: NFKD,
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("\uFB01"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("\u0066\u0069"),
},
},
},
{
formName: NFKC,
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("\uFB01"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("\u0066\u0069"),
},
},
},
}
for _, test := range tests {
filter := MustNewUnicodeNormalizeFilter(test.formName)
actual := filter.Filter(test.input)
if !reflect.DeepEqual(actual, test.output) {
t.Errorf("expected %s, got %s", test.output[0].Term, actual[0].Term)
t.Errorf("expected %#v, got %#v", test.output[0].Term, actual[0].Term)
}
}
}