1
0
Fork 0

Adding upstream version 2.5.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-05-19 00:20:02 +02:00
parent c71cb8b61d
commit 982828099e
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
783 changed files with 150650 additions and 0 deletions

View file

@ -0,0 +1,50 @@
// Copyright (c) 2014 Couchbase, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package web
import (
"regexp"
"strings"
"github.com/blevesearch/bleve/v2/analysis"
"github.com/blevesearch/bleve/v2/analysis/tokenizer/exception"
"github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode"
"github.com/blevesearch/bleve/v2/registry"
)
const Name = "web"
var email = `(?:[a-z0-9!#$%&'*+/=?^_` + "`" + `{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_` + "`" + `{|}~-]+)*|"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\[(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?|[a-z0-9-]*[a-z0-9]:(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])`
var url = `(?i)\b((?:[a-z][\w-]+:(?:/{1,3}|[a-z0-9%])|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s` + "`" + `!()\[\]{};:'".,<>?«»“”‘’]))`
var twitterHandle = `@([a-zA-Z0-9_]){1,15}`
var twitterHashtag = `#([a-zA-Z0-9_])+`
var exceptions = []string{email, url, twitterHandle, twitterHashtag}
var exceptionsRegexp = regexp.MustCompile(strings.Join(exceptions, "|"))
func TokenizerConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.Tokenizer, error) {
remainingTokenizer, err := cache.TokenizerNamed(unicode.Name)
if err != nil {
return nil, err
}
return exception.NewExceptionsTokenizer(exceptionsRegexp, remainingTokenizer), nil
}
func init() {
err := registry.RegisterTokenizer(Name, TokenizerConstructor)
if err != nil {
panic(err)
}
}

View file

@ -0,0 +1,148 @@
// Copyright (c) 2014 Couchbase, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package web
import (
"reflect"
"testing"
"github.com/blevesearch/bleve/v2/analysis"
"github.com/blevesearch/bleve/v2/registry"
)
func TestWeb(t *testing.T) {
tests := []struct {
input []byte
output analysis.TokenStream
}{
{
[]byte("Hello info@blevesearch.com"),
analysis.TokenStream{
{
Start: 0,
End: 5,
Term: []byte("Hello"),
Position: 1,
Type: analysis.AlphaNumeric,
},
{
Start: 6,
End: 26,
Term: []byte("info@blevesearch.com"),
Position: 2,
Type: analysis.AlphaNumeric,
},
},
},
{
[]byte("That http://blevesearch.com"),
analysis.TokenStream{
{
Start: 0,
End: 4,
Term: []byte("That"),
Position: 1,
Type: analysis.AlphaNumeric,
},
{
Start: 5,
End: 27,
Term: []byte("http://blevesearch.com"),
Position: 2,
Type: analysis.AlphaNumeric,
},
},
},
{
[]byte("Hey @blevesearch"),
analysis.TokenStream{
{
Start: 0,
End: 3,
Term: []byte("Hey"),
Position: 1,
Type: analysis.AlphaNumeric,
},
{
Start: 4,
End: 16,
Term: []byte("@blevesearch"),
Position: 2,
Type: analysis.AlphaNumeric,
},
},
},
{
[]byte("This #bleve"),
analysis.TokenStream{
{
Start: 0,
End: 4,
Term: []byte("This"),
Position: 1,
Type: analysis.AlphaNumeric,
},
{
Start: 5,
End: 11,
Term: []byte("#bleve"),
Position: 2,
Type: analysis.AlphaNumeric,
},
},
},
{
[]byte("What about @blevesearch?"),
analysis.TokenStream{
{
Start: 0,
End: 4,
Term: []byte("What"),
Position: 1,
Type: analysis.AlphaNumeric,
},
{
Start: 5,
End: 10,
Term: []byte("about"),
Position: 2,
Type: analysis.AlphaNumeric,
},
{
Start: 11,
End: 23,
Term: []byte("@blevesearch"),
Position: 3,
Type: analysis.AlphaNumeric,
},
},
},
}
cache := registry.NewCache()
tokenizer, err := cache.TokenizerNamed(Name)
if err != nil {
t.Fatal(err)
}
for _, test := range tests {
actual := tokenizer.Tokenize(test.input)
if !reflect.DeepEqual(actual, test.output) {
t.Errorf("Expected %v, got %v for %s", test.output, actual, string(test.input))
}
}
}