@@ -250,53 +250,14 @@ def escape(pattern):
250
250
# --------------------------------------------------------------------
251
251
# internals
252
252
253
- _cache = {}
254
- _cache_repl = {}
255
-
256
253
_pattern_type = type (sre_compile .compile ("" , 0 ))
257
254
258
- _MAXCACHE = 500
259
-
260
- def _shrink_cache (cache_dict , max_length , divisor = 5 ):
261
- """Make room in the given cache.
262
-
263
- Args:
264
- cache_dict: The cache dictionary to modify.
265
- max_length: Maximum # of entries in cache_dict before it is shrunk.
266
- divisor: Cache will shrink to max_length - 1/divisor*max_length items.
267
- """
268
- # Toss out a fraction of the entries at random to make room for new ones.
269
- # A random algorithm was chosen as opposed to simply cache_dict.popitem()
270
- # as popitem could penalize the same regular expression repeatedly based
271
- # on its internal hash value. Being random should spread the cache miss
272
- # love around.
273
- cache_keys = tuple (cache_dict .keys ())
274
- overage = len (cache_keys ) - max_length
275
- if overage < 0 :
276
- # Cache is already within limits. Normally this should not happen
277
- # but it could due to multithreading.
278
- return
279
- number_to_toss = max_length // divisor + overage
280
- # The import is done here to avoid a circular depencency.
281
- import random
282
- if not hasattr (random , 'sample' ):
283
- # Do nothing while resolving the circular dependency:
284
- # re->random->warnings->tokenize->string->re
285
- return
286
- for doomed_key in random .sample (cache_keys , number_to_toss ):
287
- try :
288
- del cache_dict [doomed_key ]
289
- except KeyError :
290
- # Ignore problems if the cache changed from another thread.
291
- pass
292
-
293
- def _compile (* args ):
294
- return _compile_typed (type (args [0 ]), * args )
295
-
296
- @functools .lru_cache (maxsize = _MAXCACHE )
297
- def _compile_typed (type , * key ):
255
+ def _compile (pattern , flags ):
256
+ return _compile_typed (type (pattern ), pattern , flags )
257
+
258
+ @functools .lru_cache (maxsize = 500 )
259
+ def _compile_typed (text_bytes_type , pattern , flags ):
298
260
# internal: compile pattern
299
- pattern , flags = key
300
261
if isinstance (pattern , _pattern_type ):
301
262
if flags :
302
263
raise ValueError (
@@ -305,12 +266,10 @@ def _compile_typed(type, *key):
305
266
if not sre_compile .isstring (pattern ):
306
267
raise TypeError ("first argument must be string or compiled pattern" )
307
268
return sre_compile .compile (pattern , flags )
308
- return p
309
269
310
- @functools .lru_cache (maxsize = _MAXCACHE )
311
- def _compile_repl (* key ):
270
+ @functools .lru_cache (maxsize = 500 )
271
+ def _compile_repl (repl , pattern ):
312
272
# internal: compile replacement pattern
313
- repl , pattern = key
314
273
return sre_parse .parse_template (repl , pattern )
315
274
316
275
def _expand (pattern , match , template ):
0 commit comments