Lines Matching full:self

21     def __init__(self, sample):  argument
22 self.sample = sample
24 def start(self): argument
25 self.start_time = time.perf_counter()
27 def end(self): argument
28 self.sample.add(time.perf_counter() - self.start_time)
30 def __enter__(self): argument
31 self.start()
32 return self
34 def __exit__(self, *args, **kwargs): argument
35 self.end()
39 def __init__(self, stats): argument
40 self.stats = stats
41 self.num_samples = 0
42 self.elapsed = 0
44 def measure(self): argument
45 return Measurement(self)
47 def __enter__(self): argument
48 return self
50 def __exit__(self, *args, **kwargs): argument
51 self.end()
53 def add(self, elapsed): argument
54 self.num_samples += 1
55 self.elapsed += elapsed
57 def end(self): argument
58 if self.num_samples:
59 self.stats.add(self.elapsed)
60 self.num_samples = 0
61 self.elapsed = 0
65 def __init__(self): argument
66 self.reset()
68 def reset(self): argument
69 self.num = 0
70 self.total_time = 0
71 self.max_time = 0
72 self.m = 0
73 self.s = 0
74 self.current_elapsed = None
76 def add(self, elapsed): argument
77 self.num += 1
78 if self.num == 1:
79 self.m = elapsed
80 self.s = 0
82 last_m = self.m
83 self.m = last_m + (elapsed - last_m) / self.num
84 self.s = self.s + (elapsed - last_m) * (elapsed - self.m)
86 self.total_time += elapsed
88 if self.max_time < elapsed:
89 self.max_time = elapsed
91 def start_sample(self): argument
92 return Sample(self)
95 def average(self): argument
96 if self.num == 0:
98 return self.total_time / self.num
101 def stdev(self): argument
102 if self.num <= 1:
104 return math.sqrt(self.s / (self.num - 1))
106 def todict(self): argument
107 return {k: getattr(self, k) for k in ('num', 'total_time', 'max_time', 'average', 'stdev')}
161 def __init__(self, db, cursor, upstream): argument
162 self.db = db
163 self.cursor = cursor
164 self.upstream = upstream
168 def __init__(self, reader, writer, db, request_stats, backfill_queue, upstream, read_only): argument
170 self.db = db
171 self.request_stats = request_stats
172 self.max_chunk = bb.asyncrpc.DEFAULT_MAX_CHUNK
173 self.backfill_queue = backfill_queue
174 self.upstream = upstream
176 self.handlers.update({
177 'get': self.handle_get,
178 'get-outhash': self.handle_get_outhash,
179 'get-stream': self.handle_get_stream,
180 'get-stats': self.handle_get_stats,
184 self.handlers.update({
185 'report': self.handle_report,
186 'report-equiv': self.handle_equivreport,
187 'reset-stats': self.handle_reset_stats,
188 'backfill-wait': self.handle_backfill_wait,
191 def validate_proto_version(self): argument
192 return (self.proto_version > (1, 0) and self.proto_version <= (1, 1))
194 async def process_requests(self): argument
195 if self.upstream is not None:
196 self.upstream_client = await create_async_client(self.upstream)
198 self.upstream_client = None
202 if self.upstream_client is not None:
203 await self.upstream_client.close()
205 async def dispatch_message(self, msg): argument
206 for k in self.handlers.keys():
210 await self.handlers[k](msg[k])
212 with self.request_stats.start_sample() as self.request_sample, \
213 self.request_sample.measure():
214 await self.handlers[k](msg[k])
219 async def handle_get(self, request): argument
224 with closing(self.db.cursor()) as cursor:
225 d = await self.get_unihash(cursor, method, taskhash, fetch_all)
227 self.write_message(d)
229 async def get_unihash(self, cursor, method, taskhash, fetch_all=False): argument
251 elif self.upstream_client is not None:
252 d = await self.upstream_client.get_taskhash(method, taskhash, True)
253 self.update_unified(cursor, d)
254 self.db.commit()
256 row = self.query_equivalent(cursor, method, taskhash)
260 elif self.upstream_client is not None:
261 d = await self.upstream_client.get_taskhash(method, taskhash)
264 self.db.commit()
268 async def handle_get_outhash(self, request): argument
273 with closing(self.db.cursor()) as cursor:
274 d = await self.get_outhash(cursor, method, outhash, taskhash)
276 self.write_message(d)
278 async def get_outhash(self, cursor, method, outhash, taskhash): argument
297 elif self.upstream_client is not None:
298 d = await self.upstream_client.get_outhash(method, outhash, taskhash)
299 self.update_unified(cursor, d)
300 self.db.commit()
304 def update_unified(self, cursor, data): argument
319 async def handle_get_stream(self, request): argument
320 self.write_message('ok')
325 l = await self.reader.readline()
334 self.request_sample = self.request_stats.start_sample()
335 request_measure = self.request_sample.measure()
340 self.writer.write('ok\n'.encode('utf-8'))
345 cursor = self.db.cursor()
347 row = self.query_equivalent(cursor, method, taskhash)
354 elif self.upstream_client is not None:
355 upstream = await self.upstream_client.get_unihash(method, taskhash)
363 self.writer.write(msg)
366 self.request_sample.end()
368 await self.writer.drain()
373 await self.backfill_queue.put((method, taskhash))
375 async def handle_report(self, data): argument
376 with closing(self.db.cursor()) as cursor:
424 if self.upstream_client is not None:
425 …upstream_data = await self.upstream_client.get_outhash(data['method'], data['outhash'], data['task…
440 unihash_data = await self.get_unihash(cursor, data['method'], data['taskhash'])
446 self.db.commit()
454 self.write_message(d)
456 async def handle_equivreport(self, data): argument
457 with closing(self.db.cursor()) as cursor:
464 self.db.commit()
469 row = self.query_equivalent(cursor, data['method'], data['taskhash'])
477 self.write_message(d)
480 async def handle_get_stats(self, request): argument
482 'requests': self.request_stats.todict(),
485 self.write_message(d)
487 async def handle_reset_stats(self, request): argument
489 'requests': self.request_stats.todict(),
492 self.request_stats.reset()
493 self.write_message(d)
495 async def handle_backfill_wait(self, request): argument
497 'tasks': self.backfill_queue.qsize(),
499 await self.backfill_queue.join()
500 self.write_message(d)
502 def query_equivalent(self, cursor, method, taskhash): argument
515 def __init__(self, db, upstream=None, read_only=False): argument
521 self.request_stats = Stats()
522 self.db = db
523 self.upstream = upstream
524 self.read_only = read_only
526 def accept_client(self, reader, writer): argument
527 …return ServerClient(reader, writer, self.db, self.request_stats, self.backfill_queue, self.upstrea…
530 def _backfill_worker(self): argument
532 client = await create_async_client(self.upstream)
535 item = await self.backfill_queue.get()
537 self.backfill_queue.task_done()
540 await copy_unihash_from_upstream(client, self.db, method, taskhash)
541 self.backfill_queue.task_done()
546 await self.backfill_queue.put(None)
549 if self.upstream is not None:
554 self.loop.run_until_complete(join_worker(worker))
558 def run_loop_forever(self): argument
559 self.backfill_queue = asyncio.Queue()
561 with self._backfill_worker():