@@ -50,9 +50,7 @@ def hash_digest(text):
5050 Arguments:
5151 text -- the text for which the digest should be created
5252 """
53- text_encoded = text .encode ("utf8" )
54- hash_result = hashlib .md5 (text_encoded )
55- return hash_result .hexdigest ()
53+ return hashlib .md5 (text .encode ("utf-8" )).hexdigest ()
5654
5755
5856def cache_local (tex_root , key , func ):
@@ -173,27 +171,28 @@ def _global_cache_path():
173171
174172# marker class for invalidated result
175173class InvalidObject :
176- _HASH = hash ("_LaTeXTools_InvalidObject" )
174+ __slots__ = []
175+ __hash = hash ("_LaTeXTools_InvalidObject" )
177176
178- def __eq__ (self , other ):
177+ @classmethod
178+ def __hash__ (cls ):
179+ return cls .__hash
180+
181+ @classmethod
182+ def __eq__ (cls , other ):
179183 # in general, this is a bad pattern, since it will treat the
180184 # literal string "_LaTeXTools_InvalidObject" as being an invalid
181185 # object; nevertheless, we need an object identity that persists
182186 # across reloads, and this seems to be the only way to guarantee
183187 # that
184- return self ._HASH == hash (other )
185-
186- def __ne__ (self , other ):
187- return not self == other
188-
189- def __hash__ (self ):
190- return self ._HASH
191-
188+ try :
189+ return cls .__hash == hash (other )
190+ except TypeError :
191+ return False
192192
193- try :
194- _invalid_object
195- except NameError :
196- _invalid_object = InvalidObject ()
193+ @classmethod
194+ def __ne__ (cls , other ):
195+ return not cls == other
197196
198197
199198class Cache :
@@ -216,14 +215,12 @@ def __init__(self):
216215 self ._disk_lock = threading .Lock ()
217216 if not hasattr (self , "_write_lock" ):
218217 self ._write_lock = threading .Lock ()
219- if not hasattr (self , "_save_lock" ):
220- self ._save_lock = threading .Lock ()
221218 if not hasattr (self , "_objects" ):
222219 self ._objects = {}
223220 if not hasattr (self , "_dirty" ):
224221 self ._dirty = False
225222 if not hasattr (self , "_save_queue" ):
226- self ._save_queue = []
223+ self ._save_queue = 0
227224 if not hasattr (self , "_pool" ):
228225 self ._pool = ThreadPool (2 )
229226
@@ -247,7 +244,7 @@ def get(self, key):
247244 # note: will raise CacheMiss if can't be found
248245 result = self .load (key )
249246
250- if result == _invalid_object :
247+ if result == InvalidObject :
251248 raise CacheMiss ("{0} is invalid" .format (key ))
252249
253250 # return a copy of any objects
@@ -269,7 +266,7 @@ def has(self, key):
269266 if key is None :
270267 raise ValueError ("key cannot be None" )
271268
272- return key in self ._objects and self ._objects [key ] != _invalid_object
269+ return key in self ._objects and self ._objects [key ] != InvalidObject
273270
274271 def set (self , key , obj ):
275272 """
@@ -284,11 +281,6 @@ def set(self, key, obj):
284281 if key is None :
285282 raise ValueError ("key cannot be None" )
286283
287- try :
288- pickle .dumps (obj , protocol = - 1 )
289- except pickle .PicklingError :
290- raise ValueError ("obj must be picklable" )
291-
292284 if isinstance (obj , list ):
293285 obj = tuple (obj )
294286 elif isinstance (obj , dict ):
@@ -336,7 +328,7 @@ def invalidate(self, key=None):
336328
337329 def _invalidate (key ):
338330 try :
339- self ._objects [key ] = _invalid_object
331+ self ._objects [key ] = InvalidObject
340332 except Exception :
341333 logger .error ("error occurred while invalidating %s" , key )
342334 traceback .print_exc ()
@@ -412,12 +404,12 @@ def save(self, key=None):
412404 with self ._disk_lock :
413405 # operate on a stable copy of the object
414406 with self ._write_lock :
415- _objs = pickle . loads ( pickle . dumps ( self ._objects , protocol = - 1 ) )
407+ _objs = self ._objects . copy ( )
416408 self ._dirty = False
417409
418410 if key is None :
419411 # remove all InvalidObjects
420- delete_keys = [k for k in _objs if _objs [k ] == _invalid_object ]
412+ delete_keys = [k for k in _objs if _objs [k ] == InvalidObject ]
421413
422414 for k in delete_keys :
423415 del _objs [k ]
@@ -442,7 +434,7 @@ def save(self, key=None):
442434 logger .error ("error while deleting %s: %s" , self .cache_path , e )
443435
444436 elif key in _objs :
445- if _objs [key ] == _invalid_object :
437+ if _objs [key ] == InvalidObject :
446438 file_path = os .path .join (self .cache_path , key )
447439 try :
448440 os .remove (file_path )
@@ -475,17 +467,16 @@ def _write(self, key, obj):
475467 raise CacheMiss ()
476468
477469 def _schedule_save (self ):
478- with self ._save_lock :
479- self ._save_queue .append (0 )
480- threading .Timer (0.5 , self ._debounce_save ).start ()
481-
482- def _debounce_save (self ):
483- with self ._save_lock :
484- if len (self ._save_queue ) > 1 :
485- self ._save_queue .pop ()
470+ def _debounce ():
471+ self ._save_queue -= 1
472+ if self ._save_queue > 0 :
473+ sublime .set_timeout (_debounce , 1000 )
486474 else :
487- self ._save_queue = []
488- sublime .set_timeout (self .save_async , 0 )
475+ self ._save_queue = 0
476+ self .save_async ()
477+
478+ self ._save_queue += 1
479+ sublime .set_timeout (_debounce , 1000 )
489480
490481 # ensure cache is saved to disk when removed from memory
491482 def __del__ (self ):
@@ -561,8 +552,6 @@ def get(self, key):
561552
562553 return super (ValidatingCache , self ).get (key )
563554
564- get .__doc__ = Cache .get .__doc__
565-
566555 def set (self , key , obj ):
567556 if key is None :
568557 raise ValueError ("key cannot be None" )
@@ -571,8 +560,6 @@ def set(self, key, obj):
571560
572561 return super (ValidatingCache , self ).set (key , obj )
573562
574- set .__doc__ = Cache .set .__doc__
575-
576563
577564class InstanceTrackingCache (Cache ):
578565 """
0 commit comments