/usr/lib/python3/dist-packages/pgq/remoteconsumer.py is in python3-pgq 3.3.0-1.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 | """
old RemoteConsumer / SerialConsumer classes.
"""
from __future__ import division, absolute_import, print_function
import sys
from pgq.consumer import Consumer
__all__ = ['RemoteConsumer', 'SerialConsumer']
class RemoteConsumer(Consumer):
"""Helper for doing event processing in another database.
Requires that whole batch is processed in one TX.
"""
def __init__(self, service_name, db_name, remote_db, args):
super(RemoteConsumer, self).__init__(service_name, db_name, args)
self.remote_db = remote_db
def process_batch(self, db, batch_id, event_list):
"""Process all events in batch.
By default calls process_event for each.
"""
dst_db = self.get_database(self.remote_db)
curs = dst_db.cursor()
if self.is_last_batch(curs, batch_id):
return
self.process_remote_batch(db, batch_id, event_list, dst_db)
self.set_last_batch(curs, batch_id)
dst_db.commit()
def is_last_batch(self, dst_curs, batch_id):
"""Helper function to keep track of last successful batch
in external database.
"""
q = "select pgq_ext.is_batch_done(%s, %s)"
dst_curs.execute(q, [self.consumer_name, batch_id])
return dst_curs.fetchone()[0]
def set_last_batch(self, dst_curs, batch_id):
"""Helper function to set last successful batch
in external database.
"""
q = "select pgq_ext.set_batch_done(%s, %s)"
dst_curs.execute(q, [self.consumer_name, batch_id])
def process_remote_batch(self, db, batch_id, event_list, dst_db):
raise Exception('process_remote_batch not implemented')
class SerialConsumer(Consumer):
"""Consumer that applies batches sequentially in second database.
Requirements:
- Whole batch in one TX.
- Must not use retry queue.
Features:
- Can detect if several batches are already applied to dest db.
- If some ticks are lost. allows to seek back on queue.
Whether it succeeds, depends on pgq configuration.
"""
def __init__(self, service_name, db_name, remote_db, args):
super(SerialConsumer, self).__init__(service_name, db_name, args)
self.remote_db = remote_db
self.dst_schema = "pgq_ext"
def startup(self):
if self.options.rewind:
self.rewind()
sys.exit(0)
if self.options.reset:
self.dst_reset()
sys.exit(0)
return Consumer.startup(self)
def init_optparse(self, parser=None):
p = super(SerialConsumer, self).init_optparse(parser)
p.add_option("--rewind", action="store_true",
help="change queue position according to destination")
p.add_option("--reset", action="store_true",
help="reset queue pos on destination side")
return p
def process_batch(self, db, batch_id, event_list):
"""Process all events in batch.
"""
dst_db = self.get_database(self.remote_db)
curs = dst_db.cursor()
# check if done
if self.is_batch_done(curs):
return
# actual work
self.process_remote_batch(db, batch_id, event_list, dst_db)
# finish work
self.set_batch_done(curs)
dst_db.commit()
def is_batch_done(self, dst_curs):
"""Helper function to keep track of last successful batch
in external database.
"""
cur_tick = self.batch_info['tick_id']
prev_tick = self.batch_info['prev_tick_id']
dst_tick = self.get_last_tick(dst_curs)
if not dst_tick:
# seems this consumer has not run yet against dst_db
return False
if prev_tick == dst_tick:
# on track
return False
if cur_tick == dst_tick:
# current batch is already applied, skip it
return True
# anything else means problems
raise Exception('Lost position: batch %d..%d, dst has %d' % (
prev_tick, cur_tick, dst_tick))
def set_batch_done(self, dst_curs):
"""Helper function to set last successful batch
in external database.
"""
tick_id = self.batch_info['tick_id']
self.set_last_tick(dst_curs, tick_id)
def register_consumer(self):
new = Consumer.register_consumer(self)
if new: # fixme
self.dst_reset()
def unregister_consumer(self):
"""If unregistering, also clean completed tick table on dest."""
Consumer.unregister_consumer(self)
self.dst_reset()
def process_remote_batch(self, db, batch_id, event_list, dst_db):
raise Exception('process_remote_batch not implemented')
def rewind(self):
self.log.info("Rewinding queue")
src_db = self.get_database(self.db_name)
dst_db = self.get_database(self.remote_db)
src_curs = src_db.cursor()
dst_curs = dst_db.cursor()
dst_tick = self.get_last_tick(dst_curs)
if dst_tick:
q = "select pgq.register_consumer_at(%s, %s, %s)"
src_curs.execute(q, [self.queue_name, self.consumer_name, dst_tick])
else:
self.log.warning('No tick found on dst side')
dst_db.commit()
src_db.commit()
def dst_reset(self):
self.log.info("Resetting queue tracking on dst side")
dst_db = self.get_database(self.remote_db)
dst_curs = dst_db.cursor()
self.set_last_tick(dst_curs, None)
dst_db.commit()
def get_last_tick(self, dst_curs):
q = "select %s.get_last_tick(%%s)" % self.dst_schema
dst_curs.execute(q, [self.consumer_name])
res = dst_curs.fetchone()
return res[0]
def set_last_tick(self, dst_curs, tick_id):
q = "select %s.set_last_tick(%%s, %%s)" % self.dst_schema
dst_curs.execute(q, [self.consumer_name, tick_id])
|