This file is indexed.

/usr/lib/python2.7/dist-packages/pygccxml/parser/declarations_cache.py is in python-pygccxml 1.8.0-1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
# Copyright 2014-2016 Insight Software Consortium.
# Copyright 2004-2008 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0.
# See http://www.boost.org/LICENSE_1_0.txt

import os
import time
import hashlib
try:
    import cPickle as pickle
except ImportError:
    import pickle
from pygccxml import utils
from . import config as cxx_parsers_cfg


def file_signature(filename):
    """
    Return a signature for a file.

    """

    if not os.path.isfile(filename):
        return None
    if not os.path.exists(filename):
        return None

    # Duplicate auto-generated files can be recognized with the sha1 hash.
    sig = hashlib.sha1()
    with open(filename, "rb") as f:
        buf = f.read()
        sig.update(buf)

    return sig.hexdigest()


def configuration_signature(config):
    """
    Return a signature for a configuration (xml_generator_configuration_t)
    object.

    This can then be used as a key in the cache.
    This method must take into account anything about
    a configuration that could cause the declarations generated
    to be different between runs.

    """

    sig = hashlib.sha1()
    if isinstance(config, cxx_parsers_cfg.xml_generator_configuration_t):
        sig.update(str(config.xml_generator_path).encode())
    sig.update(str(config.working_directory).encode('utf-8'))
    if isinstance(config, cxx_parsers_cfg.xml_generator_configuration_t):
        sig.update(str(config.cflags).encode('utf-8'))
    for p in config.include_paths:
        sig.update(str(p).encode('utf-8'))
    for s in config.define_symbols:
        sig.update(str(s).encode('utf-8'))
    for u in config.undefine_symbols:
        sig.update(str(u).encode('utf-8'))
    return sig.hexdigest()


class cache_base_t(object):
    logger = utils.loggers.declarations_cache

    def __init__(self):
        object.__init__(self)

    def flush(self):
        """ Flush (write out) the cache to disk if needed. """

        raise NotImplementedError()

    def update(self, source_file, configuration, declarations, included_files):
        """
        update cache entry

        :param source_file: path to the C++ source file being parsed
        :param configuration: configuration used in
               parsing :class:`xml_generator_configuration_t`
        :param declarations: declaration tree found when parsing
        :param included_files: files included by parsing.
        """

        raise NotImplementedError()

    def cached_value(self, source_file, configuration):
        """
        Return declarations, we have cached, for the source_file and the
        given configuration.

        :param source_file: path to the C++ source file being parsed.
        :param configuration: configuration that was used for parsing.

        """

        raise NotImplementedError()


class record_t(object):

    def __init__(
            self,
            xml_generator,
            source_signature,
            config_signature,
            included_files,
            included_files_signature,
            declarations):
        self.__xml_generator = xml_generator
        self.__source_signature = source_signature
        self.__config_signature = config_signature
        self.__included_files = included_files
        self.__included_files_signature = included_files_signature
        self.__declarations = declarations
        self.__was_hit = True  # Track if there was a cache hit

    @property
    def was_hit(self):
        return self.__was_hit

    @was_hit.setter
    def was_hit(self, was_hit):
        self.__was_hit = was_hit

    def key(self):
        return self.__source_signature, self.__config_signature

    @staticmethod
    def create_key(source_file, configuration):
        return (
            file_signature(source_file),
            configuration_signature(configuration))

    @property
    def source_signature(self):
        return self.__source_signature

    @property
    def config_signature(self):
        return self.__config_signature

    @property
    def included_files(self):
        return self.__included_files

    @property
    def included_files_signature(self):
        return self.__included_files_signature

    @property
    def declarations(self):
        return self.__declarations

    @property
    def xml_generator(self):
        return self.__xml_generator


class file_cache_t(cache_base_t):

    """ Cache implementation to store data in a pickled form in a file.
        This class contains some cache logic that keeps track of which entries
        have been 'hit' in the cache and if an entry has not been hit then
        it is deleted at the time of the flush().  This keeps the cache from
        growing larger when files change and are not used again.
    """

    def __init__(self, name):
        """
        :param name: name of the cache file.
        """

        cache_base_t.__init__(self)
        self.__name = name  # Name of cache file
        # Map record_key to record_t
        self.__cache = self.__load(self.__name)
        self.__needs_flushed = not bool(
            self.__cache)  # If empty then we need to flush
        for entry in self.__cache.values():  # Clear hit flags
            entry.was_hit = False
            try:
                # Make sure the xml_generator variable is defined, else it
                # will stay None.
                xml_generator = entry.xml_generator
            except AttributeError:
                msg = (
                    "The %s cache file is not compatible with this version " +
                    "of pygccxml. Please regenerate it.") % name
                raise RuntimeError(msg)
            if utils.xml_generator is None:
                # Set the xml_generator to the one read in the cache file
                utils.xml_generator = xml_generator
            elif utils.xml_generator != xml_generator:
                msg = (
                    "The %s cache file was generated with a different xml " +
                    "generator. Please regenerate it.") % name
                raise RuntimeError(msg)

    @staticmethod
    def __load(file_name):
        """ Load pickled cache from file and return the object. """

        if os.path.exists(file_name) and not os.path.isfile(file_name):
            raise RuntimeError(
                'Cache should be initialized with valid full file name')
        if not os.path.exists(file_name):
            open(file_name, 'w+b').close()
            return {}
        cache_file_obj = open(file_name, 'rb')
        try:
            file_cache_t.logger.info('Loading cache file "%s".' % file_name)
            start_time = time.clock()
            cache = pickle.load(cache_file_obj)
            file_cache_t.logger.debug(
                "Cache file has been loaded in %.1f secs" %
                (time.clock() - start_time))
            file_cache_t.logger.debug("Found cache in file: [%s]  entries: %s"
                                      % (file_name, len(list(cache.keys()))))
        except Exception as error:
            file_cache_t.logger.exception(
                "Error occured while reading cache file: %s",
                error)
            cache_file_obj.close()
            file_cache_t.logger.info(
                "Invalid cache file: [%s]  Regenerating." %
                file_name)
            open(file_name, 'w+b').close()   # Create empty file
            cache = {}                       # Empty cache
        return cache

    def flush(self):
        # If not marked as needing flushed, then return immediately
        if not self.__needs_flushed:
            self.logger.debug("Cache did not change, ignoring flush.")
            return

        # Remove entries that did not get a cache hit
        num_removed = 0
        for key in list(self.__cache.keys()):
            if not self.__cache[key].was_hit:
                num_removed += 1
                del self.__cache[key]
        if num_removed > 0:
            self.logger.debug(
                "There are %s removed entries from cache." %
                num_removed)
        # Save out the cache to disk
        with open(self.__name, "w+b") as cache_file:
            pickle.dump(self.__cache, cache_file, pickle.HIGHEST_PROTOCOL)

    def update(self, source_file, configuration, declarations, included_files):
        """ Update a cached record with the current key and value contents. """

        record = record_t(
            xml_generator=utils.xml_generator,
            source_signature=file_signature(source_file),
            config_signature=configuration_signature(configuration),
            included_files=included_files,
            included_files_signature=list(
                map(
                    file_signature,
                    included_files)),
            declarations=declarations)
        # Switched over to holding full record in cache so we don't have
        # to keep creating records in the next method.
        self.__cache[record.key()] = record
        self.__cache[record.key()].was_hit = True
        self.__needs_flushed = True

    def cached_value(self, source_file, configuration):
        """
        Attempt to lookup the cached declarations for the given file and
        configuration.

        Returns None if declaration not found or signature check fails.

        """

        key = record_t.create_key(source_file, configuration)
        if key not in self.__cache:
            return None
        record = self.__cache[key]
        if self.__is_valid_signature(record):
            record.was_hit = True  # Record cache hit
            return record.declarations
        else:  # some file has been changed
            del self.__cache[key]
            return None

    def __is_valid_signature(self, record):
        for index, included_file in enumerate(record.included_files):
            if file_signature(included_file) != \
                    record.included_files_signature[index]:
                return False
        return True


class dummy_cache_t(cache_base_t):

    """
    This is an empty cache object.

    By default no caching is enabled in pygccxml.

    """

    def __init__(self):
        cache_base_t.__init__(self)

    def flush(self):
        pass

    def update(self, source_file, configuration, declarations, included_files):
        pass

    def cached_value(self, source_file, configuration):
        return None