/usr/lib/python2.7/dist-packages/carbon/protocols.py is in graphite-carbon 0.9.15-1.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 | import time
from twisted.internet.protocol import DatagramProtocol
from twisted.internet.error import ConnectionDone
from twisted.protocols.basic import LineOnlyReceiver, Int32StringReceiver
from carbon import log, events, state, management
from carbon.conf import settings
from carbon.regexlist import WhiteList, BlackList
from carbon.util import pickle, get_unpickler
class MetricReceiver:
""" Base class for all metric receiving protocols, handles flow
control events and connection state logging.
"""
def connectionMade(self):
self.peerName = self.getPeerName()
if settings.LOG_LISTENER_CONNECTIONS:
log.listener("%s connection with %s established" % (self.__class__.__name__, self.peerName))
if state.metricReceiversPaused:
self.pauseReceiving()
state.connectedMetricReceiverProtocols.add(self)
events.pauseReceivingMetrics.addHandler(self.pauseReceiving)
events.resumeReceivingMetrics.addHandler(self.resumeReceiving)
def getPeerName(self):
if hasattr(self.transport, 'getPeer'):
peer = self.transport.getPeer()
return "%s:%d" % (peer.host, peer.port)
else:
return "peer"
def pauseReceiving(self):
self.transport.pauseProducing()
def resumeReceiving(self):
self.transport.resumeProducing()
def connectionLost(self, reason):
if reason.check(ConnectionDone):
if settings.LOG_LISTENER_CONNECTIONS:
log.listener("%s connection with %s closed cleanly" % (self.__class__.__name__, self.peerName))
else:
log.listener("%s connection with %s lost: %s" % (self.__class__.__name__, self.peerName, reason.value))
state.connectedMetricReceiverProtocols.remove(self)
events.pauseReceivingMetrics.removeHandler(self.pauseReceiving)
events.resumeReceivingMetrics.removeHandler(self.resumeReceiving)
def metricReceived(self, metric, datapoint):
if BlackList and metric in BlackList:
instrumentation.increment('blacklistMatches')
return
if WhiteList and metric not in WhiteList:
instrumentation.increment('whitelistRejects')
return
if datapoint[1] != datapoint[1]: # filter out NaN values
return
if int(datapoint[0]) == -1: # use current time if none given
datapoint = (time.time(), datapoint[1])
events.metricReceived(metric, datapoint)
class MetricLineReceiver(MetricReceiver, LineOnlyReceiver):
delimiter = '\n'
def lineReceived(self, line):
try:
metric, value, timestamp = line.strip().split()
datapoint = (float(timestamp), float(value))
except ValueError:
log.listener('invalid line (%s) received from client %s, ignoring' % (line, self.peerName))
return
self.metricReceived(metric, datapoint)
class MetricDatagramReceiver(MetricReceiver, DatagramProtocol):
def datagramReceived(self, data, (host, port)):
for line in data.splitlines():
try:
metric, value, timestamp = line.strip().split()
datapoint = (float(timestamp), float(value))
self.metricReceived(metric, datapoint)
except ValueError:
log.listener('invalid line (%s) received from %s, ignoring' % (line, host))
class MetricPickleReceiver(MetricReceiver, Int32StringReceiver):
MAX_LENGTH = 2 ** 20
def connectionMade(self):
MetricReceiver.connectionMade(self)
self.unpickler = get_unpickler(insecure=settings.USE_INSECURE_UNPICKLER)
def stringReceived(self, data):
try:
datapoints = self.unpickler.loads(data)
except pickle.UnpicklingError:
log.listener('invalid pickle received from %s, ignoring' % self.peerName)
return
for raw in datapoints:
try:
(metric, (value, timestamp)) = raw
except Exception, e:
log.listener('Error decoding pickle: %s' % e)
try:
datapoint = (float(value), float(timestamp)) # force proper types
except ValueError:
continue
self.metricReceived(metric, datapoint)
class CacheManagementHandler(Int32StringReceiver):
MAX_LENGTH = 1024 ** 3 # 1mb
def connectionMade(self):
peer = self.transport.getPeer()
self.peerAddr = "%s:%d" % (peer.host, peer.port)
log.query("%s connected" % self.peerAddr)
self.unpickler = get_unpickler(insecure=settings.USE_INSECURE_UNPICKLER)
def connectionLost(self, reason):
if reason.check(ConnectionDone):
log.query("%s disconnected" % self.peerAddr)
else:
log.query("%s connection lost: %s" % (self.peerAddr, reason.value))
def stringReceived(self, rawRequest):
request = self.unpickler.loads(rawRequest)
if request['type'] == 'cache-query':
metric = request['metric']
datapoints = MetricCache.get(metric, [])
result = dict(datapoints=datapoints)
if settings.LOG_CACHE_HITS:
log.query('[%s] cache query for \"%s\" returned %d values' % (self.peerAddr, metric, len(datapoints)))
instrumentation.increment('cacheQueries')
elif request['type'] == 'cache-query-bulk':
datapointsByMetric = {}
metrics = request['metrics']
for metric in metrics:
datapointsByMetric[metric] = MetricCache.get(metric, [])
result = dict(datapointsByMetric=datapointsByMetric)
if settings.LOG_CACHE_HITS:
log.query('[%s] cache query bulk for \"%d\" metrics returned %d values' %
(self.peerAddr, len(metrics), sum([len(datapoints) for datapoints in datapointsByMetric.values()])))
instrumentation.increment('cacheBulkQueries')
instrumentation.append('cacheBulkQuerySize', len(metrics))
elif request['type'] == 'get-metadata':
result = management.getMetadata(request['metric'], request['key'])
elif request['type'] == 'set-metadata':
result = management.setMetadata(request['metric'], request['key'], request['value'])
else:
result = dict(error="Invalid request type \"%s\"" % request['type'])
response = pickle.dumps(result, protocol=-1)
self.sendString(response)
# Avoid import circularities
from carbon.cache import MetricCache
from carbon import instrumentation
|