/usr/lib/python2.7/dist-packages/carbon/util.py is in graphite-carbon 0.9.15-1.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 | import copy
import os
import pwd
import sys
import __builtin__
from os.path import abspath, basename, dirname
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
import cPickle as pickle
USING_CPICKLE = True
except ImportError:
import pickle
USING_CPICKLE = False
from time import sleep, time
from twisted.python.util import initgroups
from twisted.scripts.twistd import runApp
def dropprivs(user):
uid, gid = pwd.getpwnam(user)[2:4]
initgroups(uid, gid)
os.setregid(gid, gid)
os.setreuid(uid, uid)
return (uid, gid)
def run_twistd_plugin(filename):
from carbon.conf import get_parser
from twisted.scripts.twistd import ServerOptions
bin_dir = dirname(abspath(filename))
root_dir = dirname(bin_dir)
os.environ.setdefault('GRAPHITE_ROOT', root_dir)
program = basename(filename).split('.')[0]
# First, parse command line options as the legacy carbon scripts used to
# do.
parser = get_parser(program)
(options, args) = parser.parse_args()
if not args:
parser.print_usage()
return
# This isn't as evil as you might think
__builtin__.instance = options.instance
__builtin__.program = program
# Then forward applicable options to either twistd or to the plugin itself.
twistd_options = ["--no_save"]
# If no reactor was selected yet, try to use the epoll reactor if
# available.
try:
from twisted.internet import epollreactor
twistd_options.append("--reactor=epoll")
except ImportError:
pass
if options.debug or options.nodaemon:
twistd_options.extend(["--nodaemon"])
if options.profile:
twistd_options.extend(["--profile", options.profile])
if options.profiler:
twistd_options.extend(["--profiler", options.profiler])
if options.pidfile:
twistd_options.extend(["--pidfile", options.pidfile])
if options.umask:
twistd_options.extend(["--umask", options.umask])
if options.syslog:
twistd_options.append("--syslog")
# Now for the plugin-specific options.
twistd_options.append(program)
if options.debug:
twistd_options.append("--debug")
for option_name, option_value in vars(options).items():
if (option_value is not None and
option_name not in ("debug", "profile", "profiler", "pidfile", "umask", "nodaemon", "syslog")):
twistd_options.extend(["--%s" % option_name.replace("_", "-"),
option_value])
# Finally, append extra args so that twistd has a chance to process them.
twistd_options.extend(args)
config = ServerOptions()
config.parseOptions(twistd_options)
runApp(config)
def parseDestinations(destination_strings):
destinations = []
for dest_string in destination_strings:
parts = dest_string.strip().split(':')
if len(parts) == 2:
server, port = parts
instance = None
elif len(parts) == 3:
server, port, instance = parts
else:
raise ValueError("Invalid destination string \"%s\"" % dest_string)
destinations.append((server, int(port), instance))
return destinations
# This whole song & dance is due to pickle being insecure
# yet performance critical for carbon. We leave the insecure
# mode (which is faster) as an option (USE_INSECURE_UNPICKLER).
# The SafeUnpickler classes were largely derived from
# http://nadiana.com/python-pickle-insecure
if USING_CPICKLE:
class SafeUnpickler(object):
PICKLE_SAFE = {
'copy_reg': set(['_reconstructor']),
'__builtin__': set(['object']),
}
@classmethod
def find_class(cls, module, name):
if not module in cls.PICKLE_SAFE:
raise pickle.UnpicklingError('Attempting to unpickle unsafe module %s' % module)
__import__(module)
mod = sys.modules[module]
if not name in cls.PICKLE_SAFE[module]:
raise pickle.UnpicklingError('Attempting to unpickle unsafe class %s' % name)
return getattr(mod, name)
@classmethod
def loads(cls, pickle_string):
pickle_obj = pickle.Unpickler(StringIO(pickle_string))
pickle_obj.find_global = cls.find_class
return pickle_obj.load()
else:
class SafeUnpickler(pickle.Unpickler):
PICKLE_SAFE = {
'copy_reg': set(['_reconstructor']),
'__builtin__': set(['object']),
}
def find_class(self, module, name):
if not module in self.PICKLE_SAFE:
raise pickle.UnpicklingError('Attempting to unpickle unsafe module %s' % module)
__import__(module)
mod = sys.modules[module]
if not name in self.PICKLE_SAFE[module]:
raise pickle.UnpicklingError('Attempting to unpickle unsafe class %s' % name)
return getattr(mod, name)
@classmethod
def loads(cls, pickle_string):
return cls(StringIO(pickle_string)).load()
def get_unpickler(insecure=False):
if insecure:
return pickle
else:
return SafeUnpickler
class TokenBucket(object):
'''This is a basic tokenbucket rate limiter implementation for use in
enforcing various configurable rate limits'''
def __init__(self, capacity, fill_rate):
'''Capacity is the total number of tokens the bucket can hold, fill rate is
the rate in tokens (or fractional tokens) to be added to the bucket per
second.'''
self.capacity = float(capacity)
self._tokens = float(capacity)
self.fill_rate = float(fill_rate)
self.timestamp = time()
def drain(self, cost, blocking=False):
'''Given a number of tokens (or fractions) drain will return True and
drain the number of tokens from the bucket if the capacity allows,
otherwise we return false and leave the contents of the bucket.'''
if cost <= self.tokens:
self._tokens -= cost
return True
else:
if blocking:
tokens_needed = cost - self._tokens
seconds_per_token = 1 / self.fill_rate
seconds_left = seconds_per_token * self.fill_rate
sleep(self.timestamp + seconds_left - time())
self._tokens -= cost
return True
return False
def setCapacityAndFillRate(self, new_capacity, new_fill_rate):
delta = float(new_capacity) - self.capacity
self.capacity = float(new_capacity)
self.fill_rate = float(new_fill_rate)
self._tokens = delta + self._tokens
@property
def tokens(self):
'''The tokens property will return the current number of tokens in the
bucket.'''
if self._tokens < self.capacity:
now = time()
delta = self.fill_rate * (now - self.timestamp)
self._tokens = min(self.capacity, self._tokens + delta)
self.timestamp = now
return self._tokens
class defaultdict(dict):
def __init__(self, default_factory=None, *a, **kw):
if (default_factory is not None and not hasattr(default_factory, '__call__')):
raise TypeError('first argument must be callable')
dict.__init__(self, *a, **kw)
self.default_factory = default_factory
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
return self.__missing__(key)
def __missing__(self, key):
if self.default_factory is None:
raise KeyError(key)
self[key] = value = self.default_factory()
return value
def __reduce__(self):
if self.default_factory is None:
args = tuple()
else:
args = self.default_factory,
return type(self), args, None, None, self.iteritems()
def copy(self):
return self.__copy__()
def __copy__(self):
return type(self)(self.default_factory, self)
def __deepcopy__(self, memo):
return type(self)(self.default_factory, copy.deepcopy(self.items()))
def __repr__(self):
return 'defaultdict(%s, %s)' % (self.default_factory, dict.__repr__(self))
|