[TIC-CORE] support caching for analysis data
[archive/20170607/tools/tic-core.git] / tic / utils / file.py
index daaee91..72911eb 100644 (file)
 import errno
 import gzip
 import os
+import shutil
+import json
 
+class FileLockException(Exception):
+    pass
+class FileLock(object):
+    def __init__(self, file_name, timeout=10, delay=.05):
+        self.is_locked = False
+        self.lockfile = os.path.join(os.getcwd(), "%s.lock" % file_name)
+        self.file_name = file_name
+        self.timeout = timeout
+        self.delay = delay
+    def acquire(self):
+        self.fd = os.open(self.lockfile, os.O_CREAT|os.O_EXCL|os.O_RDWR)
+        self.is_locked = True
+    def release(self):
+        if self.is_locked:
+            os.close(self.fd)
+            os.unlink(self.lockfile)
+            self.is_locked = False
+    def __enter__(self):
+        if not self.is_locked:
+            self.acquire()
+        return self
+    def __exit__(self, type, value, traceback):
+        if self.is_locked:
+            self.release()
+    def __del__(self):
+        self.release()
 
-def make_dirs(dirname):
+def make_dirs(path):
     try:
-        os.makedirs(dirname)
+        os.makedirs(path)
     except OSError as err:
         if err.errno != errno.EEXIST:
             raise
 
 def write(path, data):
-    file_path = os.path.join(path, 'tic_view.json')
-    with(open(file_path, 'w')) as f:
+    # make directory
+    make_dirs(os.path.dirname(path))
+    with(open(path, 'w')) as f:
         f.write(data)
         
+def write_json_flock(path, data):
+    try:
+        make_dirs(os.path.dirname(path))
+        with FileLock(path):
+            with(open(path, 'w')) as f:
+                f.write(json.dumps(data))
+    except OSError as e:
+        if e.errno != errno.EEXIST:
+            print(e)
+
+def read_json(path):
+    ret=None
+    try:
+        with open(path) as f:
+            ret=json.load(f)
+    except ValueError as ve:
+        print(ve)
+    except (OSError, IOError) as e:
+        print(e)
+    return ret
+
 def decompress_gzip(intput_path, output_path):
     with(gzip.open(intput_path, 'rb')) as fobj:
         f = open(output_path, 'wb')
         f.write(fobj.read())
         f.close()
     return output_path
-     
-# def decompress_bunzip(intput_path, output_path):
-#     with open(output_path, 'wb') as new_file, bz2.BZ2File(intput_path, 'rb') as file:
-#         for data in iter(lambda : file.read(100 * 1024), b''):
-#             new_file.write(data)
 
+def copyfile_flock(src, dest):
+    ret = dest;
+    try:
+        with FileLock(dest):
+            shutil.copy(src, dest)
+    except OSError as e:
+        if e.errno != errno.EEXIST:
+            print(e)
+        ret = src
+    return ret
 
\ No newline at end of file
+def copyfile(src, dst, filename=None):
+    abs_dst=os.path.abspath(os.path.expanduser(dst))
+    make_dirs(abs_dst)
+    if filename:
+        abs_dst = os.path.join(abs_dst, filename)
+    shutil.copy(src, abs_dst)
+    if filename:
+        return abs_dst
+    return os.path.join(abs_dst, os.path.basename(src))
\ No newline at end of file