Adding upstream version 2.5.1.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
c71cb8b61d
commit
982828099e
783 changed files with 150650 additions and 0 deletions
76
analysis/tokenizer/character/character.go
Normal file
76
analysis/tokenizer/character/character.go
Normal file
|
@ -0,0 +1,76 @@
|
|||
// Copyright (c) 2016 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package character
|
||||
|
||||
import (
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/blevesearch/bleve/v2/analysis"
|
||||
)
|
||||
|
||||
type IsTokenRune func(r rune) bool
|
||||
|
||||
type CharacterTokenizer struct {
|
||||
isTokenRun IsTokenRune
|
||||
}
|
||||
|
||||
func NewCharacterTokenizer(f IsTokenRune) *CharacterTokenizer {
|
||||
return &CharacterTokenizer{
|
||||
isTokenRun: f,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *CharacterTokenizer) Tokenize(input []byte) analysis.TokenStream {
|
||||
|
||||
rv := make(analysis.TokenStream, 0, 1024)
|
||||
|
||||
offset := 0
|
||||
start := 0
|
||||
end := 0
|
||||
count := 0
|
||||
for currRune, size := utf8.DecodeRune(input[offset:]); currRune != utf8.RuneError; currRune, size = utf8.DecodeRune(input[offset:]) {
|
||||
isToken := c.isTokenRun(currRune)
|
||||
if isToken {
|
||||
end = offset + size
|
||||
} else {
|
||||
if end-start > 0 {
|
||||
// build token
|
||||
rv = append(rv, &analysis.Token{
|
||||
Term: input[start:end],
|
||||
Start: start,
|
||||
End: end,
|
||||
Position: count + 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
})
|
||||
count++
|
||||
}
|
||||
start = offset + size
|
||||
end = start
|
||||
}
|
||||
offset += size
|
||||
}
|
||||
// if we ended in the middle of a token, finish it
|
||||
if end-start > 0 {
|
||||
// build token
|
||||
rv = append(rv, &analysis.Token{
|
||||
Term: input[start:end],
|
||||
Start: start,
|
||||
End: end,
|
||||
Position: count + 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
})
|
||||
}
|
||||
return rv
|
||||
}
|
84
analysis/tokenizer/character/character_test.go
Normal file
84
analysis/tokenizer/character/character_test.go
Normal file
|
@ -0,0 +1,84 @@
|
|||
// Copyright (c) 2016 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package character
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
"unicode"
|
||||
|
||||
"github.com/blevesearch/bleve/v2/analysis"
|
||||
)
|
||||
|
||||
func TestCharacterTokenizer(t *testing.T) {
|
||||
tests := []struct {
|
||||
input []byte
|
||||
output analysis.TokenStream
|
||||
}{
|
||||
{
|
||||
[]byte("Hello World."),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 5,
|
||||
Term: []byte("Hello"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 6,
|
||||
End: 11,
|
||||
Term: []byte("World"),
|
||||
Position: 2,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte("dominique@mcdiabetes.com"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 9,
|
||||
Term: []byte("dominique"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 10,
|
||||
End: 20,
|
||||
Term: []byte("mcdiabetes"),
|
||||
Position: 2,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 21,
|
||||
End: 24,
|
||||
Term: []byte("com"),
|
||||
Position: 3,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
tokenizer := NewCharacterTokenizer(unicode.IsLetter)
|
||||
for _, test := range tests {
|
||||
actual := tokenizer.Tokenize(test.input)
|
||||
if !reflect.DeepEqual(actual, test.output) {
|
||||
t.Errorf("Expected %v, got %v for %s", test.output, actual, string(test.input))
|
||||
}
|
||||
}
|
||||
}
|
144
analysis/tokenizer/exception/exception.go
Normal file
144
analysis/tokenizer/exception/exception.go
Normal file
|
@ -0,0 +1,144 @@
|
|||
// Copyright (c) 2015 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// package exception implements a Tokenizer which extracts pieces matched by a
|
||||
// regular expression from the input data, delegates the rest to another
|
||||
// tokenizer, then insert back extracted parts in the token stream. Use it to
|
||||
// preserve sequences which a regular tokenizer would alter or remove.
|
||||
//
|
||||
// Its constructor takes the following arguments:
|
||||
//
|
||||
// "exceptions" ([]string): one or more Go regular expressions matching the
|
||||
// sequence to preserve. Multiple expressions are combined with "|".
|
||||
//
|
||||
// "tokenizer" (string): the name of the tokenizer processing the data not
|
||||
// matched by "exceptions".
|
||||
package exception
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/blevesearch/bleve/v2/analysis"
|
||||
"github.com/blevesearch/bleve/v2/registry"
|
||||
)
|
||||
|
||||
const Name = "exception"
|
||||
|
||||
type ExceptionsTokenizer struct {
|
||||
exception *regexp.Regexp
|
||||
remaining analysis.Tokenizer
|
||||
}
|
||||
|
||||
func NewExceptionsTokenizer(exception *regexp.Regexp, remaining analysis.Tokenizer) *ExceptionsTokenizer {
|
||||
return &ExceptionsTokenizer{
|
||||
exception: exception,
|
||||
remaining: remaining,
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ExceptionsTokenizer) Tokenize(input []byte) analysis.TokenStream {
|
||||
rv := make(analysis.TokenStream, 0)
|
||||
matches := t.exception.FindAllIndex(input, -1)
|
||||
currInput := 0
|
||||
lastPos := 0
|
||||
for _, match := range matches {
|
||||
start := match[0]
|
||||
end := match[1]
|
||||
if start > currInput {
|
||||
// need to defer to remaining for unprocessed section
|
||||
intermediate := t.remaining.Tokenize(input[currInput:start])
|
||||
// add intermediate tokens to our result stream
|
||||
for _, token := range intermediate {
|
||||
// adjust token offsets
|
||||
token.Position += lastPos
|
||||
token.Start += currInput
|
||||
token.End += currInput
|
||||
rv = append(rv, token)
|
||||
}
|
||||
lastPos += len(intermediate)
|
||||
currInput = start
|
||||
}
|
||||
|
||||
// create single token with this regexp match
|
||||
token := &analysis.Token{
|
||||
Term: input[start:end],
|
||||
Start: start,
|
||||
End: end,
|
||||
Position: lastPos + 1,
|
||||
}
|
||||
rv = append(rv, token)
|
||||
lastPos++
|
||||
currInput = end
|
||||
|
||||
}
|
||||
|
||||
if currInput < len(input) {
|
||||
// need to defer to remaining for unprocessed section
|
||||
intermediate := t.remaining.Tokenize(input[currInput:])
|
||||
// add intermediate tokens to our result stream
|
||||
for _, token := range intermediate {
|
||||
// adjust token offsets
|
||||
token.Position += lastPos
|
||||
token.Start += currInput
|
||||
token.End += currInput
|
||||
rv = append(rv, token)
|
||||
}
|
||||
}
|
||||
|
||||
return rv
|
||||
}
|
||||
|
||||
func ExceptionsTokenizerConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.Tokenizer, error) {
|
||||
exceptions := []string{}
|
||||
iexceptions, ok := config["exceptions"].([]interface{})
|
||||
if ok {
|
||||
for _, exception := range iexceptions {
|
||||
exception, ok := exception.(string)
|
||||
if ok {
|
||||
exceptions = append(exceptions, exception)
|
||||
}
|
||||
}
|
||||
}
|
||||
aexceptions, ok := config["exceptions"].([]string)
|
||||
if ok {
|
||||
exceptions = append(exceptions, aexceptions...)
|
||||
}
|
||||
if len(exceptions) == 0 {
|
||||
return nil, fmt.Errorf("no pattern found in 'exception' property")
|
||||
}
|
||||
exceptionPattern := strings.Join(exceptions, "|")
|
||||
r, err := regexp.Compile(exceptionPattern)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to build regexp tokenizer: %v", err)
|
||||
}
|
||||
|
||||
remainingName, ok := config["tokenizer"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("must specify tokenizer for remaining input")
|
||||
}
|
||||
remaining, err := cache.TokenizerNamed(remainingName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return NewExceptionsTokenizer(r, remaining), nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
err := registry.RegisterTokenizer(Name, ExceptionsTokenizerConstructor)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
171
analysis/tokenizer/exception/exception_test.go
Normal file
171
analysis/tokenizer/exception/exception_test.go
Normal file
|
@ -0,0 +1,171 @@
|
|||
// Copyright (c) 2015 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package exception
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/blevesearch/bleve/v2/analysis"
|
||||
_ "github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode"
|
||||
"github.com/blevesearch/bleve/v2/registry"
|
||||
)
|
||||
|
||||
func TestExceptionsTokenizer(t *testing.T) {
|
||||
tests := []struct {
|
||||
config map[string]interface{}
|
||||
input []byte
|
||||
patterns []string
|
||||
result analysis.TokenStream
|
||||
}{
|
||||
{
|
||||
input: []byte("test http://blevesearch.com/ words"),
|
||||
config: map[string]interface{}{
|
||||
"type": "exception",
|
||||
"tokenizer": "unicode",
|
||||
"exceptions": []interface{}{
|
||||
`[hH][tT][tT][pP][sS]?://(\S)*`,
|
||||
`[fF][iI][lL][eE]://(\S)*`,
|
||||
`[fF][tT][pP]://(\S)*`,
|
||||
},
|
||||
},
|
||||
result: analysis.TokenStream{
|
||||
&analysis.Token{
|
||||
Term: []byte("test"),
|
||||
Position: 1,
|
||||
Start: 0,
|
||||
End: 4,
|
||||
},
|
||||
&analysis.Token{
|
||||
Term: []byte("http://blevesearch.com/"),
|
||||
Position: 2,
|
||||
Start: 5,
|
||||
End: 28,
|
||||
},
|
||||
&analysis.Token{
|
||||
Term: []byte("words"),
|
||||
Position: 3,
|
||||
Start: 29,
|
||||
End: 34,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
input: []byte("what ftp://blevesearch.com/ songs"),
|
||||
config: map[string]interface{}{
|
||||
"type": "exception",
|
||||
"tokenizer": "unicode",
|
||||
"exceptions": []interface{}{
|
||||
`[hH][tT][tT][pP][sS]?://(\S)*`,
|
||||
`[fF][iI][lL][eE]://(\S)*`,
|
||||
`[fF][tT][pP]://(\S)*`,
|
||||
},
|
||||
},
|
||||
result: analysis.TokenStream{
|
||||
&analysis.Token{
|
||||
Term: []byte("what"),
|
||||
Position: 1,
|
||||
Start: 0,
|
||||
End: 4,
|
||||
},
|
||||
&analysis.Token{
|
||||
Term: []byte("ftp://blevesearch.com/"),
|
||||
Position: 2,
|
||||
Start: 5,
|
||||
End: 27,
|
||||
},
|
||||
&analysis.Token{
|
||||
Term: []byte("songs"),
|
||||
Position: 3,
|
||||
Start: 28,
|
||||
End: 33,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
input: []byte("please email marty@couchbase.com the URL https://blevesearch.com/"),
|
||||
config: map[string]interface{}{
|
||||
"type": "exception",
|
||||
"tokenizer": "unicode",
|
||||
"exceptions": []interface{}{
|
||||
`[hH][tT][tT][pP][sS]?://(\S)*`,
|
||||
`[fF][iI][lL][eE]://(\S)*`,
|
||||
`[fF][tT][pP]://(\S)*`,
|
||||
`\S+@\S+`,
|
||||
},
|
||||
},
|
||||
result: analysis.TokenStream{
|
||||
&analysis.Token{
|
||||
Term: []byte("please"),
|
||||
Position: 1,
|
||||
Start: 0,
|
||||
End: 6,
|
||||
},
|
||||
&analysis.Token{
|
||||
Term: []byte("email"),
|
||||
Position: 2,
|
||||
Start: 7,
|
||||
End: 12,
|
||||
},
|
||||
&analysis.Token{
|
||||
Term: []byte("marty@couchbase.com"),
|
||||
Position: 3,
|
||||
Start: 13,
|
||||
End: 32,
|
||||
},
|
||||
&analysis.Token{
|
||||
Term: []byte("the"),
|
||||
Position: 4,
|
||||
Start: 33,
|
||||
End: 36,
|
||||
},
|
||||
&analysis.Token{
|
||||
Term: []byte("URL"),
|
||||
Position: 5,
|
||||
Start: 37,
|
||||
End: 40,
|
||||
},
|
||||
&analysis.Token{
|
||||
Term: []byte("https://blevesearch.com/"),
|
||||
Position: 6,
|
||||
Start: 41,
|
||||
End: 65,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// remaining := unicode.NewUnicodeTokenizer()
|
||||
for _, test := range tests {
|
||||
|
||||
// build the requested exception tokenizer
|
||||
cache := registry.NewCache()
|
||||
tokenizer, err := cache.DefineTokenizer("custom", test.config)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// pattern := strings.Join(test.patterns, "|")
|
||||
// r, err := regexp.Compile(pattern)
|
||||
// if err != nil {
|
||||
// t.Fatal(err)
|
||||
// }
|
||||
// tokenizer := NewExceptionsTokenizer(r, remaining)
|
||||
actual := tokenizer.Tokenize(test.input)
|
||||
if !reflect.DeepEqual(actual, test.result) {
|
||||
t.Errorf("expected %v, got %v", test.result, actual)
|
||||
}
|
||||
}
|
||||
}
|
36
analysis/tokenizer/letter/letter.go
Normal file
36
analysis/tokenizer/letter/letter.go
Normal file
|
@ -0,0 +1,36 @@
|
|||
// Copyright (c) 2016 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package letter
|
||||
|
||||
import (
|
||||
"unicode"
|
||||
|
||||
"github.com/blevesearch/bleve/v2/analysis"
|
||||
"github.com/blevesearch/bleve/v2/analysis/tokenizer/character"
|
||||
"github.com/blevesearch/bleve/v2/registry"
|
||||
)
|
||||
|
||||
const Name = "letter"
|
||||
|
||||
func TokenizerConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.Tokenizer, error) {
|
||||
return character.NewCharacterTokenizer(unicode.IsLetter), nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
err := registry.RegisterTokenizer(Name, TokenizerConstructor)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
87
analysis/tokenizer/regexp/regexp.go
Normal file
87
analysis/tokenizer/regexp/regexp.go
Normal file
|
@ -0,0 +1,87 @@
|
|||
// Copyright (c) 2014 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package regexp
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
|
||||
"github.com/blevesearch/bleve/v2/analysis"
|
||||
"github.com/blevesearch/bleve/v2/registry"
|
||||
)
|
||||
|
||||
const Name = "regexp"
|
||||
|
||||
var IdeographRegexp = regexp.MustCompile(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`)
|
||||
|
||||
type RegexpTokenizer struct {
|
||||
r *regexp.Regexp
|
||||
}
|
||||
|
||||
func NewRegexpTokenizer(r *regexp.Regexp) *RegexpTokenizer {
|
||||
return &RegexpTokenizer{
|
||||
r: r,
|
||||
}
|
||||
}
|
||||
|
||||
func (rt *RegexpTokenizer) Tokenize(input []byte) analysis.TokenStream {
|
||||
matches := rt.r.FindAllIndex(input, -1)
|
||||
rv := make(analysis.TokenStream, 0, len(matches))
|
||||
for i, match := range matches {
|
||||
matchBytes := input[match[0]:match[1]]
|
||||
if match[1]-match[0] > 0 {
|
||||
token := analysis.Token{
|
||||
Term: matchBytes,
|
||||
Start: match[0],
|
||||
End: match[1],
|
||||
Position: i + 1,
|
||||
Type: detectTokenType(matchBytes),
|
||||
}
|
||||
rv = append(rv, &token)
|
||||
}
|
||||
}
|
||||
return rv
|
||||
}
|
||||
|
||||
func RegexpTokenizerConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.Tokenizer, error) {
|
||||
rval, ok := config["regexp"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("must specify regexp")
|
||||
}
|
||||
r, err := regexp.Compile(rval)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to build regexp tokenizer: %v", err)
|
||||
}
|
||||
return NewRegexpTokenizer(r), nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
err := registry.RegisterTokenizer(Name, RegexpTokenizerConstructor)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func detectTokenType(termBytes []byte) analysis.TokenType {
|
||||
if IdeographRegexp.Match(termBytes) {
|
||||
return analysis.Ideographic
|
||||
}
|
||||
_, err := strconv.ParseFloat(string(termBytes), 64)
|
||||
if err == nil {
|
||||
return analysis.Numeric
|
||||
}
|
||||
return analysis.AlphaNumeric
|
||||
}
|
166
analysis/tokenizer/regexp/regexp_test.go
Normal file
166
analysis/tokenizer/regexp/regexp_test.go
Normal file
|
@ -0,0 +1,166 @@
|
|||
// Copyright (c) 2014 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package regexp
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/blevesearch/bleve/v2/analysis"
|
||||
)
|
||||
|
||||
func TestBoundary(t *testing.T) {
|
||||
|
||||
wordRegex := regexp.MustCompile(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}|\w+`)
|
||||
|
||||
tests := []struct {
|
||||
input []byte
|
||||
output analysis.TokenStream
|
||||
}{
|
||||
{
|
||||
[]byte("Hello World."),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 5,
|
||||
Term: []byte("Hello"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 6,
|
||||
End: 11,
|
||||
Term: []byte("World"),
|
||||
Position: 2,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte("こんにちは世界"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 3,
|
||||
Term: []byte("こ"),
|
||||
Position: 1,
|
||||
Type: analysis.Ideographic,
|
||||
},
|
||||
{
|
||||
Start: 3,
|
||||
End: 6,
|
||||
Term: []byte("ん"),
|
||||
Position: 2,
|
||||
Type: analysis.Ideographic,
|
||||
},
|
||||
{
|
||||
Start: 6,
|
||||
End: 9,
|
||||
Term: []byte("に"),
|
||||
Position: 3,
|
||||
Type: analysis.Ideographic,
|
||||
},
|
||||
{
|
||||
Start: 9,
|
||||
End: 12,
|
||||
Term: []byte("ち"),
|
||||
Position: 4,
|
||||
Type: analysis.Ideographic,
|
||||
},
|
||||
{
|
||||
Start: 12,
|
||||
End: 15,
|
||||
Term: []byte("は"),
|
||||
Position: 5,
|
||||
Type: analysis.Ideographic,
|
||||
},
|
||||
{
|
||||
Start: 15,
|
||||
End: 18,
|
||||
Term: []byte("世"),
|
||||
Position: 6,
|
||||
Type: analysis.Ideographic,
|
||||
},
|
||||
{
|
||||
Start: 18,
|
||||
End: 21,
|
||||
Term: []byte("界"),
|
||||
Position: 7,
|
||||
Type: analysis.Ideographic,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte(""),
|
||||
analysis.TokenStream{},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
tokenizer := NewRegexpTokenizer(wordRegex)
|
||||
actual := tokenizer.Tokenize(test.input)
|
||||
|
||||
if !reflect.DeepEqual(actual, test.output) {
|
||||
t.Errorf("Expected %v, got %v for %s", test.output, actual, string(test.input))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestBugProducingEmptyTokens(t *testing.T) {
|
||||
|
||||
wordRegex := regexp.MustCompile(`[0-9a-zA-Z_]*`)
|
||||
|
||||
tests := []struct {
|
||||
input []byte
|
||||
output analysis.TokenStream
|
||||
}{
|
||||
{
|
||||
[]byte("Chatha Edwards Sr."),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 6,
|
||||
Term: []byte("Chatha"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 7,
|
||||
End: 14,
|
||||
Term: []byte("Edwards"),
|
||||
Position: 2,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 15,
|
||||
End: 17,
|
||||
Term: []byte("Sr"),
|
||||
Position: 3,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
tokenizer := NewRegexpTokenizer(wordRegex)
|
||||
actual := tokenizer.Tokenize(test.input)
|
||||
|
||||
if !reflect.DeepEqual(actual, test.output) {
|
||||
t.Errorf("Expected %v, got %v for %s", test.output, actual, string(test.input))
|
||||
}
|
||||
}
|
||||
}
|
52
analysis/tokenizer/single/single.go
Normal file
52
analysis/tokenizer/single/single.go
Normal file
|
@ -0,0 +1,52 @@
|
|||
// Copyright (c) 2014 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package single
|
||||
|
||||
import (
|
||||
"github.com/blevesearch/bleve/v2/analysis"
|
||||
"github.com/blevesearch/bleve/v2/registry"
|
||||
)
|
||||
|
||||
const Name = "single"
|
||||
|
||||
type SingleTokenTokenizer struct {
|
||||
}
|
||||
|
||||
func NewSingleTokenTokenizer() *SingleTokenTokenizer {
|
||||
return &SingleTokenTokenizer{}
|
||||
}
|
||||
|
||||
func (t *SingleTokenTokenizer) Tokenize(input []byte) analysis.TokenStream {
|
||||
return analysis.TokenStream{
|
||||
&analysis.Token{
|
||||
Term: input,
|
||||
Position: 1,
|
||||
Start: 0,
|
||||
End: len(input),
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func SingleTokenTokenizerConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.Tokenizer, error) {
|
||||
return NewSingleTokenTokenizer(), nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
err := registry.RegisterTokenizer(Name, SingleTokenTokenizerConstructor)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
76
analysis/tokenizer/single/single_test.go
Normal file
76
analysis/tokenizer/single/single_test.go
Normal file
|
@ -0,0 +1,76 @@
|
|||
// Copyright (c) 2014 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package single
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/blevesearch/bleve/v2/analysis"
|
||||
)
|
||||
|
||||
func TestSingleTokenTokenizer(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
input []byte
|
||||
output analysis.TokenStream
|
||||
}{
|
||||
{
|
||||
[]byte("Hello World"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 11,
|
||||
Term: []byte("Hello World"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte("こんにちは世界"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 21,
|
||||
Term: []byte("こんにちは世界"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte("แยกคำภาษาไทยก็ทำได้นะจ้ะ"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 72,
|
||||
Term: []byte("แยกคำภาษาไทยก็ทำได้นะจ้ะ"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
tokenizer := NewSingleTokenTokenizer()
|
||||
actual := tokenizer.Tokenize(test.input)
|
||||
|
||||
if !reflect.DeepEqual(actual, test.output) {
|
||||
t.Errorf("Expected %v, got %v for %s", test.output, actual, string(test.input))
|
||||
}
|
||||
}
|
||||
}
|
134
analysis/tokenizer/unicode/unicode.go
Normal file
134
analysis/tokenizer/unicode/unicode.go
Normal file
|
@ -0,0 +1,134 @@
|
|||
// Copyright (c) 2014 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package unicode
|
||||
|
||||
import (
|
||||
"github.com/blevesearch/segment"
|
||||
|
||||
"github.com/blevesearch/bleve/v2/analysis"
|
||||
"github.com/blevesearch/bleve/v2/registry"
|
||||
)
|
||||
|
||||
const Name = "unicode"
|
||||
|
||||
type UnicodeTokenizer struct {
|
||||
}
|
||||
|
||||
func NewUnicodeTokenizer() *UnicodeTokenizer {
|
||||
return &UnicodeTokenizer{}
|
||||
}
|
||||
|
||||
func (rt *UnicodeTokenizer) Tokenize(input []byte) analysis.TokenStream {
|
||||
rvx := make([]analysis.TokenStream, 0, 10) // When rv gets full, append to rvx.
|
||||
rv := make(analysis.TokenStream, 0, 1)
|
||||
|
||||
ta := []analysis.Token(nil)
|
||||
taNext := 0
|
||||
|
||||
segmenter := segment.NewWordSegmenterDirect(input)
|
||||
start := 0
|
||||
pos := 1
|
||||
|
||||
guessRemaining := func(end int) int {
|
||||
avgSegmentLen := end / (len(rv) + 1)
|
||||
if avgSegmentLen < 1 {
|
||||
avgSegmentLen = 1
|
||||
}
|
||||
|
||||
remainingLen := len(input) - end
|
||||
|
||||
return remainingLen / avgSegmentLen
|
||||
}
|
||||
|
||||
for segmenter.Segment() {
|
||||
segmentBytes := segmenter.Bytes()
|
||||
end := start + len(segmentBytes)
|
||||
if segmenter.Type() != segment.None {
|
||||
if taNext >= len(ta) {
|
||||
remainingSegments := guessRemaining(end)
|
||||
if remainingSegments > 1000 {
|
||||
remainingSegments = 1000
|
||||
}
|
||||
if remainingSegments < 1 {
|
||||
remainingSegments = 1
|
||||
}
|
||||
|
||||
ta = make([]analysis.Token, remainingSegments)
|
||||
taNext = 0
|
||||
}
|
||||
|
||||
token := &ta[taNext]
|
||||
taNext++
|
||||
|
||||
token.Term = segmentBytes
|
||||
token.Start = start
|
||||
token.End = end
|
||||
token.Position = pos
|
||||
token.Type = convertType(segmenter.Type())
|
||||
|
||||
if len(rv) >= cap(rv) { // When rv is full, save it into rvx.
|
||||
rvx = append(rvx, rv)
|
||||
|
||||
rvCap := cap(rv) * 2
|
||||
if rvCap > 256 {
|
||||
rvCap = 256
|
||||
}
|
||||
|
||||
rv = make(analysis.TokenStream, 0, rvCap) // Next rv cap is bigger.
|
||||
}
|
||||
|
||||
rv = append(rv, token)
|
||||
pos++
|
||||
}
|
||||
start = end
|
||||
}
|
||||
|
||||
if len(rvx) > 0 {
|
||||
n := len(rv)
|
||||
for _, r := range rvx {
|
||||
n += len(r)
|
||||
}
|
||||
rall := make(analysis.TokenStream, 0, n)
|
||||
for _, r := range rvx {
|
||||
rall = append(rall, r...)
|
||||
}
|
||||
return append(rall, rv...)
|
||||
}
|
||||
|
||||
return rv
|
||||
}
|
||||
|
||||
func UnicodeTokenizerConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.Tokenizer, error) {
|
||||
return NewUnicodeTokenizer(), nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
err := registry.RegisterTokenizer(Name, UnicodeTokenizerConstructor)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func convertType(segmentWordType int) analysis.TokenType {
|
||||
switch segmentWordType {
|
||||
case segment.Ideo:
|
||||
return analysis.Ideographic
|
||||
case segment.Kana:
|
||||
return analysis.Ideographic
|
||||
case segment.Number:
|
||||
return analysis.Numeric
|
||||
}
|
||||
return analysis.AlphaNumeric
|
||||
}
|
202
analysis/tokenizer/unicode/unicode_test.go
Normal file
202
analysis/tokenizer/unicode/unicode_test.go
Normal file
|
@ -0,0 +1,202 @@
|
|||
// Copyright (c) 2014 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package unicode
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/blevesearch/bleve/v2/analysis"
|
||||
"github.com/blevesearch/segment"
|
||||
)
|
||||
|
||||
func TestUnicode(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
input []byte
|
||||
output analysis.TokenStream
|
||||
}{
|
||||
{
|
||||
[]byte("Hello World"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 5,
|
||||
Term: []byte("Hello"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 6,
|
||||
End: 11,
|
||||
Term: []byte("World"),
|
||||
Position: 2,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte("steven's"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 8,
|
||||
Term: []byte("steven's"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte("こんにちは世界"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 3,
|
||||
Term: []byte("こ"),
|
||||
Position: 1,
|
||||
Type: analysis.Ideographic,
|
||||
},
|
||||
{
|
||||
Start: 3,
|
||||
End: 6,
|
||||
Term: []byte("ん"),
|
||||
Position: 2,
|
||||
Type: analysis.Ideographic,
|
||||
},
|
||||
{
|
||||
Start: 6,
|
||||
End: 9,
|
||||
Term: []byte("に"),
|
||||
Position: 3,
|
||||
Type: analysis.Ideographic,
|
||||
},
|
||||
{
|
||||
Start: 9,
|
||||
End: 12,
|
||||
Term: []byte("ち"),
|
||||
Position: 4,
|
||||
Type: analysis.Ideographic,
|
||||
},
|
||||
{
|
||||
Start: 12,
|
||||
End: 15,
|
||||
Term: []byte("は"),
|
||||
Position: 5,
|
||||
Type: analysis.Ideographic,
|
||||
},
|
||||
{
|
||||
Start: 15,
|
||||
End: 18,
|
||||
Term: []byte("世"),
|
||||
Position: 6,
|
||||
Type: analysis.Ideographic,
|
||||
},
|
||||
{
|
||||
Start: 18,
|
||||
End: 21,
|
||||
Term: []byte("界"),
|
||||
Position: 7,
|
||||
Type: analysis.Ideographic,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte("age 25"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 3,
|
||||
Term: []byte("age"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 4,
|
||||
End: 6,
|
||||
Term: []byte("25"),
|
||||
Position: 2,
|
||||
Type: analysis.Numeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte("カ"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 3,
|
||||
Term: []byte("カ"),
|
||||
Position: 1,
|
||||
Type: analysis.Ideographic,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
tokenizer := NewUnicodeTokenizer()
|
||||
actual := tokenizer.Tokenize(test.input)
|
||||
|
||||
if !reflect.DeepEqual(actual, test.output) {
|
||||
t.Errorf("Expected %v, got %v for %s", test.output, actual, string(test.input))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var sampleLargeInput = []byte(`There are three characteristics of liquids which are relevant to the discussion of a BLEVE:
|
||||
If a liquid in a sealed container is boiled, the pressure inside the container increases. As the liquid changes to a gas it expands - this expansion in a vented container would cause the gas and liquid to take up more space. In a sealed container the gas and liquid are not able to take up more space and so the pressure rises. Pressurized vessels containing liquids can reach an equilibrium where the liquid stops boiling and the pressure stops rising. This occurs when no more heat is being added to the system (either because it has reached ambient temperature or has had a heat source removed).
|
||||
The boiling temperature of a liquid is dependent on pressure - high pressures will yield high boiling temperatures, and low pressures will yield low boiling temperatures. A common simple experiment is to place a cup of water in a vacuum chamber, and then reduce the pressure in the chamber until the water boils. By reducing the pressure the water will boil even at room temperature. This works both ways - if the pressure is increased beyond normal atmospheric pressures, the boiling of hot water could be suppressed far beyond normal temperatures. The cooling system of a modern internal combustion engine is a real-world example.
|
||||
When a liquid boils it turns into a gas. The resulting gas takes up far more space than the liquid did.
|
||||
Typically, a BLEVE starts with a container of liquid which is held above its normal, atmospheric-pressure boiling temperature. Many substances normally stored as liquids, such as CO2, oxygen, and other similar industrial gases have boiling temperatures, at atmospheric pressure, far below room temperature. In the case of water, a BLEVE could occur if a pressurized chamber of water is heated far beyond the standard 100 °C (212 °F). That container, because the boiling water pressurizes it, is capable of holding liquid water at very high temperatures.
|
||||
If the pressurized vessel, containing liquid at high temperature (which may be room temperature, depending on the substance) ruptures, the pressure which prevents the liquid from boiling is lost. If the rupture is catastrophic, where the vessel is immediately incapable of holding any pressure at all, then there suddenly exists a large mass of liquid which is at very high temperature and very low pressure. This causes the entire volume of liquid to instantaneously boil, which in turn causes an extremely rapid expansion. Depending on temperatures, pressures and the substance involved, that expansion may be so rapid that it can be classified as an explosion, fully capable of inflicting severe damage on its surroundings.`)
|
||||
|
||||
func BenchmarkTokenizeEnglishText(b *testing.B) {
|
||||
|
||||
tokenizer := NewUnicodeTokenizer()
|
||||
b.ResetTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
tokenizer.Tokenize(sampleLargeInput)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestConvertType(t *testing.T) {
|
||||
tests := []struct {
|
||||
in int
|
||||
out analysis.TokenType
|
||||
}{
|
||||
{
|
||||
segment.Ideo, analysis.Ideographic,
|
||||
},
|
||||
{
|
||||
segment.Kana, analysis.Ideographic,
|
||||
},
|
||||
{
|
||||
segment.Number, analysis.Numeric,
|
||||
},
|
||||
{
|
||||
segment.Letter, analysis.AlphaNumeric,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
actual := convertType(test.in)
|
||||
if actual != test.out {
|
||||
t.Errorf("expected %d, got %d for %d", test.out, actual, test.in)
|
||||
}
|
||||
}
|
||||
}
|
50
analysis/tokenizer/web/web.go
Normal file
50
analysis/tokenizer/web/web.go
Normal file
|
@ -0,0 +1,50 @@
|
|||
// Copyright (c) 2014 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package web
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/blevesearch/bleve/v2/analysis"
|
||||
"github.com/blevesearch/bleve/v2/analysis/tokenizer/exception"
|
||||
"github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode"
|
||||
"github.com/blevesearch/bleve/v2/registry"
|
||||
)
|
||||
|
||||
const Name = "web"
|
||||
|
||||
var email = `(?:[a-z0-9!#$%&'*+/=?^_` + "`" + `{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_` + "`" + `{|}~-]+)*|"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\[(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?|[a-z0-9-]*[a-z0-9]:(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])`
|
||||
var url = `(?i)\b((?:[a-z][\w-]+:(?:/{1,3}|[a-z0-9%])|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s` + "`" + `!()\[\]{};:'".,<>?«»“”‘’]))`
|
||||
var twitterHandle = `@([a-zA-Z0-9_]){1,15}`
|
||||
var twitterHashtag = `#([a-zA-Z0-9_])+`
|
||||
var exceptions = []string{email, url, twitterHandle, twitterHashtag}
|
||||
|
||||
var exceptionsRegexp = regexp.MustCompile(strings.Join(exceptions, "|"))
|
||||
|
||||
func TokenizerConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.Tokenizer, error) {
|
||||
remainingTokenizer, err := cache.TokenizerNamed(unicode.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return exception.NewExceptionsTokenizer(exceptionsRegexp, remainingTokenizer), nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
err := registry.RegisterTokenizer(Name, TokenizerConstructor)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
148
analysis/tokenizer/web/web_test.go
Normal file
148
analysis/tokenizer/web/web_test.go
Normal file
|
@ -0,0 +1,148 @@
|
|||
// Copyright (c) 2014 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package web
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/blevesearch/bleve/v2/analysis"
|
||||
"github.com/blevesearch/bleve/v2/registry"
|
||||
)
|
||||
|
||||
func TestWeb(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
input []byte
|
||||
output analysis.TokenStream
|
||||
}{
|
||||
{
|
||||
[]byte("Hello info@blevesearch.com"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 5,
|
||||
Term: []byte("Hello"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 6,
|
||||
End: 26,
|
||||
Term: []byte("info@blevesearch.com"),
|
||||
Position: 2,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte("That http://blevesearch.com"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 4,
|
||||
Term: []byte("That"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 5,
|
||||
End: 27,
|
||||
Term: []byte("http://blevesearch.com"),
|
||||
Position: 2,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte("Hey @blevesearch"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 3,
|
||||
Term: []byte("Hey"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 4,
|
||||
End: 16,
|
||||
Term: []byte("@blevesearch"),
|
||||
Position: 2,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte("This #bleve"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 4,
|
||||
Term: []byte("This"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 5,
|
||||
End: 11,
|
||||
Term: []byte("#bleve"),
|
||||
Position: 2,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte("What about @blevesearch?"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 4,
|
||||
Term: []byte("What"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 5,
|
||||
End: 10,
|
||||
Term: []byte("about"),
|
||||
Position: 2,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 11,
|
||||
End: 23,
|
||||
Term: []byte("@blevesearch"),
|
||||
Position: 3,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
cache := registry.NewCache()
|
||||
tokenizer, err := cache.TokenizerNamed(Name)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
|
||||
actual := tokenizer.Tokenize(test.input)
|
||||
if !reflect.DeepEqual(actual, test.output) {
|
||||
t.Errorf("Expected %v, got %v for %s", test.output, actual, string(test.input))
|
||||
}
|
||||
}
|
||||
}
|
40
analysis/tokenizer/whitespace/whitespace.go
Normal file
40
analysis/tokenizer/whitespace/whitespace.go
Normal file
|
@ -0,0 +1,40 @@
|
|||
// Copyright (c) 2014 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package whitespace
|
||||
|
||||
import (
|
||||
"unicode"
|
||||
|
||||
"github.com/blevesearch/bleve/v2/analysis"
|
||||
"github.com/blevesearch/bleve/v2/analysis/tokenizer/character"
|
||||
"github.com/blevesearch/bleve/v2/registry"
|
||||
)
|
||||
|
||||
const Name = "whitespace"
|
||||
|
||||
func TokenizerConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.Tokenizer, error) {
|
||||
return character.NewCharacterTokenizer(notSpace), nil
|
||||
}
|
||||
|
||||
func notSpace(r rune) bool {
|
||||
return !unicode.IsSpace(r)
|
||||
}
|
||||
|
||||
func init() {
|
||||
err := registry.RegisterTokenizer(Name, TokenizerConstructor)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
106
analysis/tokenizer/whitespace/whitespace_test.go
Normal file
106
analysis/tokenizer/whitespace/whitespace_test.go
Normal file
|
@ -0,0 +1,106 @@
|
|||
// Copyright (c) 2014 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package whitespace
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/blevesearch/bleve/v2/analysis"
|
||||
"github.com/blevesearch/bleve/v2/analysis/tokenizer/character"
|
||||
)
|
||||
|
||||
func TestBoundary(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
input []byte
|
||||
output analysis.TokenStream
|
||||
}{
|
||||
{
|
||||
[]byte("Hello World."),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 5,
|
||||
Term: []byte("Hello"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 6,
|
||||
End: 12,
|
||||
Term: []byte("World."),
|
||||
Position: 2,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte("こんにちは世界"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 21,
|
||||
Term: []byte("こんにちは世界"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte(""),
|
||||
analysis.TokenStream{},
|
||||
},
|
||||
{
|
||||
[]byte("abc界"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 6,
|
||||
Term: []byte("abc界"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
tokenizer := character.NewCharacterTokenizer(notSpace)
|
||||
actual := tokenizer.Tokenize(test.input)
|
||||
|
||||
if !reflect.DeepEqual(actual, test.output) {
|
||||
t.Errorf("Expected %v, got %v for %s", test.output, actual, string(test.input))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var sampleLargeInput = []byte(`There are three characteristics of liquids which are relevant to the discussion of a BLEVE:
|
||||
If a liquid in a sealed container is boiled, the pressure inside the container increases. As the liquid changes to a gas it expands - this expansion in a vented container would cause the gas and liquid to take up more space. In a sealed container the gas and liquid are not able to take up more space and so the pressure rises. Pressurized vessels containing liquids can reach an equilibrium where the liquid stops boiling and the pressure stops rising. This occurs when no more heat is being added to the system (either because it has reached ambient temperature or has had a heat source removed).
|
||||
The boiling temperature of a liquid is dependent on pressure - high pressures will yield high boiling temperatures, and low pressures will yield low boiling temperatures. A common simple experiment is to place a cup of water in a vacuum chamber, and then reduce the pressure in the chamber until the water boils. By reducing the pressure the water will boil even at room temperature. This works both ways - if the pressure is increased beyond normal atmospheric pressures, the boiling of hot water could be suppressed far beyond normal temperatures. The cooling system of a modern internal combustion engine is a real-world example.
|
||||
When a liquid boils it turns into a gas. The resulting gas takes up far more space than the liquid did.
|
||||
Typically, a BLEVE starts with a container of liquid which is held above its normal, atmospheric-pressure boiling temperature. Many substances normally stored as liquids, such as CO2, oxygen, and other similar industrial gases have boiling temperatures, at atmospheric pressure, far below room temperature. In the case of water, a BLEVE could occur if a pressurized chamber of water is heated far beyond the standard 100 °C (212 °F). That container, because the boiling water pressurizes it, is capable of holding liquid water at very high temperatures.
|
||||
If the pressurized vessel, containing liquid at high temperature (which may be room temperature, depending on the substance) ruptures, the pressure which prevents the liquid from boiling is lost. If the rupture is catastrophic, where the vessel is immediately incapable of holding any pressure at all, then there suddenly exists a large mass of liquid which is at very high temperature and very low pressure. This causes the entire volume of liquid to instantaneously boil, which in turn causes an extremely rapid expansion. Depending on temperatures, pressures and the substance involved, that expansion may be so rapid that it can be classified as an explosion, fully capable of inflicting severe damage on its surroundings.`)
|
||||
|
||||
func BenchmarkTokenizeEnglishText(b *testing.B) {
|
||||
|
||||
tokenizer := character.NewCharacterTokenizer(notSpace)
|
||||
b.ResetTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
tokenizer.Tokenize(sampleLargeInput)
|
||||
}
|
||||
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue