-
Notifications
You must be signed in to change notification settings - Fork 18
/
utils.py
91 lines (83 loc) · 2.48 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
import time
import sys
from multiprocessing import Process, Pipe
from itertools import izip
import os.path
import json
# Courtesy of
#http://stackoverflow.com/questions/3288595/multiprocessing-using-pool-map-on-a
#-function-defined-in-a-class
def spawn(f):
def fun(pipe,x):
pipe.send(f(x))
pipe.close()
return fun
def parmap(f,X):
pipe=[Pipe() for x in X]
proc=[Process(target=spawn(f),args=(c,x)) for x,(p,c) in izip(X,pipe)]
[p.start() for p in proc]
[p.join() for p in proc]
return [p.recv() for (p,c) in pipe]
def timer(func):
def wrapped(*args, **kwargs):
start = time.time()
rv = func(*args, **kwargs)
print "%02.1fs in %s" % (time.time() - start, func.__name__)
return rv
return wrapped
def fail_print(func):
def wrapped(*args, **kwargs):
try:
rv = func(*args, **kwargs)
except:
print sys.exc_info()[0]
rv = None
return rv
return wrapped
def persist_to_file(original_func):
""" Each query gets written out to a page
Obviously, this is much slower than the save key-values
to Redis. But it's quick and doesn't break too much"""
n = 100
def decorator(*args, **kwargs):
file_name = "./cache/"
file_name += original_func.__name__
for arg in args:
file_name += str(arg)[:n]
keys = sorted(kwargs.keys())
for k in keys:
v = kwargs[k]
v = str(v)[:n]
temp = "%s_%s-" %(k, v)
temp = temp.replace("'","")
temp = temp.replace('"',"")
temp = temp.replace('/',"")
file_name += temp
try:
ret = json.load(open(file_name, 'r'))
except (IOError, ValueError):
ret = None
if ret is None:
ret = original_func(*args,**kwargs)
try:
json.dump(ret, open(file_name, 'w'))
except:
print "Failed to cache"
return ret
return decorator
def json_exception(original_func):
def wrapper(*args, **kwargs):
try:
rv = original_func(*args, **kwargs)
except:
print "ERROR"
dv = dict(error=str(sys.exc_info()))
rv = json.dumps(dv)
return rv
return wrapper
class dummy_async():
"""This is faking an async result for debugging purposes"""
def __init__(self, val):
self.val = val
def get(self):
return self.val