15
15
# specific language governing permissions and limitations
16
16
# under the License.
17
17
18
- import six
19
-
20
18
from .connections import get_connection
21
19
from .utils import AttrDict , DslBase , merge
22
20
23
21
__all__ = ["tokenizer" , "analyzer" , "char_filter" , "token_filter" , "normalizer" ]
24
22
25
23
26
- class AnalysisBase ( object ) :
24
+ class AnalysisBase :
27
25
@classmethod
28
26
def _type_shortcut (cls , name_or_instance , type = None , ** kwargs ):
29
27
if isinstance (name_or_instance , cls ):
30
28
if type or kwargs :
31
- raise ValueError ("%s () cannot accept parameters." % cls . __name__ )
29
+ raise ValueError (f" { cls . __name__ } () cannot accept parameters." )
32
30
return name_or_instance
33
31
34
32
if not (type or kwargs ):
@@ -39,20 +37,20 @@ def _type_shortcut(cls, name_or_instance, type=None, **kwargs):
39
37
)
40
38
41
39
42
- class CustomAnalysis ( object ) :
40
+ class CustomAnalysis :
43
41
name = "custom"
44
42
45
43
def __init__ (self , filter_name , builtin_type = "custom" , ** kwargs ):
46
44
self ._builtin_type = builtin_type
47
45
self ._name = filter_name
48
- super (CustomAnalysis , self ).__init__ (** kwargs )
46
+ super ().__init__ (** kwargs )
49
47
50
48
def to_dict (self ):
51
49
# only name to present in lists
52
50
return self ._name
53
51
54
52
def get_definition (self ):
55
- d = super (CustomAnalysis , self ).to_dict ()
53
+ d = super ().to_dict ()
56
54
d = d .pop (self .name )
57
55
d ["type" ] = self ._builtin_type
58
56
return d
@@ -92,12 +90,12 @@ def get_analysis_definition(self):
92
90
return out
93
91
94
92
95
- class BuiltinAnalysis ( object ) :
93
+ class BuiltinAnalysis :
96
94
name = "builtin"
97
95
98
96
def __init__ (self , name ):
99
97
self ._name = name
100
- super (BuiltinAnalysis , self ).__init__ ()
98
+ super ().__init__ ()
101
99
102
100
def to_dict (self ):
103
101
# only name to present in lists
@@ -148,7 +146,7 @@ def simulate(self, text, using="default", explain=False, attributes=None):
148
146
sec_def = definition .get (section , {})
149
147
sec_names = analyzer_def [section ]
150
148
151
- if isinstance (sec_names , six . string_types ):
149
+ if isinstance (sec_names , str ):
152
150
body [section ] = sec_def .get (sec_names , sec_names )
153
151
else :
154
152
body [section ] = [
@@ -213,7 +211,7 @@ def get_definition(self):
213
211
if "filters" in d :
214
212
d ["filters" ] = [
215
213
# comma delimited string given by user
216
- fs if isinstance (fs , six . string_types ) else
214
+ fs if isinstance (fs , str ) else
217
215
# list of strings or TokenFilter objects
218
216
", " .join (f .to_dict () if hasattr (f , "to_dict" ) else f for f in fs )
219
217
for fs in self .filters
@@ -227,7 +225,7 @@ def get_analysis_definition(self):
227
225
fs = {}
228
226
d = {"filter" : fs }
229
227
for filters in self .filters :
230
- if isinstance (filters , six . string_types ):
228
+ if isinstance (filters , str ):
231
229
continue
232
230
fs .update (
233
231
{
0 commit comments