+ if force_refresh:
+ ns.do_refresh = True
+ else:
+ src_mtime = getmtime(src_path)
+ try: cache_mtime = getmtime(cache_path)
+ except OSError: ns.do_refresh = True
+ else: ns.do_refresh = src_mtime > cache_mtime
+
+ if not ns.do_refresh:
+ # Try reading from the cache first. This must be transactionally
+ # isolated from concurrent writes to prevent reading an incomplete
+ # (changing) version of the data (but the transaction can share the
+ # lock with other concurrent reads).
+ @with_lock_file(lock_file, False)
+ def read_cache():
+ try: ns.cfg = with_closing(file(cache_path)) (
+ lambda f: json.read(f.read()))
+ except: ns.do_refresh = True
+
+ if ns.do_refresh:
+ # Atomically reload the source and regenerate the cache. The read and
+ # write must be a single transaction, or a stale version may be
+ # written.
+ @with_lock_file(lock_file)
+ def refresh_cache():
+ import yaml
+ ns.cfg = with_closing(file(src_path)) (
+ lambda f: yaml.load(f, yaml.CSafeLoader))
+ try: with_closing(file(cache_path, 'w')) (
+ lambda f: f.write(json.write(ns.cfg)))
+ except: pass # silent failure
+ return ns.cfg
+
+dicts = load()
+structs = dicts2struct(dicts)