This file is indexed.

/usr/lib/python3/dist-packages/pyfits/hdu/streaming.py is in python3-pyfits 1:3.3-2+b1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
import gzip
import os

from ..file import _File
from ..util import _pad_length, fileobj_name
from .base import _BaseHDU
from .hdulist import HDUList
from .image import PrimaryHDU, _ImageBaseHDU


class StreamingHDU(object):
    """
    A class that provides the capability to stream data to a FITS file
    instead of requiring data to all be written at once.

    The following pseudocode illustrates its use::

        header = pyfits.Header()

        for all the cards you need in the header:
            header[key] = (value, comment)

        shdu = pyfits.StreamingHDU('filename.fits', header)

        for each piece of data:
            shdu.write(data)

        shdu.close()
    """

    def __init__(self, name, header):
        """
        Construct a `StreamingHDU` object given a file name and a header.

        Parameters
        ----------
        name : file path, file object, or file like object
            The file to which the header and data will be streamed.  If opened,
            the file object must be opened in a writeable binary mode such as
            'wb' or 'ab+'.

        header : `Header` instance
            The header object associated with the data to be written
            to the file.

        Notes
        -----
        The file will be opened and the header appended to the end of
        the file.  If the file does not already exist, it will be
        created, and if the header represents a Primary header, it
        will be written to the beginning of the file.  If the file
        does not exist and the provided header is not a Primary
        header, a default Primary HDU will be inserted at the
        beginning of the file and the provided header will be added as
        the first extension.  If the file does already exist, but the
        provided header represents a Primary header, the header will
        be modified to an image extension header and appended to the
        end of the file.
        """

        if isinstance(name, gzip.GzipFile):
            raise TypeError('StreamingHDU not supported for GzipFile objects.')

        self._header = header.copy()

        # handle a file object instead of a file name
        filename = fileobj_name(name) or ''
#
#       Check if the file already exists.  If it does not, check to see
#       if we were provided with a Primary Header.  If not we will need
#       to prepend a default PrimaryHDU to the file before writing the
#       given header.
#
        newfile = False

        if filename:
            if not os.path.exists(filename) or os.path.getsize(filename) == 0:
                newfile = True
        elif (hasattr(name, 'len') and name.len == 0):
            newfile = True

        if newfile:
            if 'SIMPLE' not in self._header:
                hdulist = HDUList([PrimaryHDU()])
                hdulist.writeto(name, 'exception')
        else:
#
#               This will not be the first extension in the file so we
#               must change the Primary header provided into an image
#               extension header.
#
            if 'SIMPLE' in self._header:
                self._header.set('XTENSION', 'IMAGE', 'Image extension',
                                 after='SIMPLE')
                del self._header['SIMPLE']

                if 'PCOUNT' not in self._header:
                    dim = self._header['NAXIS']

                    if dim == 0:
                        dim = ''
                    else:
                        dim = str(dim)

                    self._header.set('PCOUNT', 0, 'number of parameters',
                                     after='NAXIS' + dim)

                if 'GCOUNT' not in self._header:
                    self._header.set('GCOUNT', 1, 'number of groups',
                                     after='PCOUNT')

        self._ffo = _File(name, 'append')

        # TODO : Fix this once the HDU writing API is cleaned up
        tmp_hdu = _BaseHDU()
        # Passing self._header as an argument to _BaseHDU() will cause its
        # values to be modified in undesired ways...need to have a better way
        # of doing this
        tmp_hdu._header = self._header
        self._header_offset = tmp_hdu._writeheader(self._ffo)[0]
        self._data_offset = self._ffo.tell()
        self._size = self.size

        if self._size != 0:
            self.writecomplete = False
        else:
            self.writecomplete = True

    # Support the 'with' statement
    def __enter__(self):
        return self

    def __exit__(self, type, value, traceback):
        self.close()

    def write(self, data):
        """
        Write the given data to the stream.

        Parameters
        ----------
        data : ndarray
            Data to stream to the file.

        Returns
        -------
        writecomplete : int
            Flag that when `True` indicates that all of the required
            data has been written to the stream.

        Notes
        -----
        Only the amount of data specified in the header provided to the class
        constructor may be written to the stream.  If the provided data would
        cause the stream to overflow, an `IOError` exception is raised and the
        data is not written.  Once sufficient data has been written to the
        stream to satisfy the amount specified in the header, the stream is
        padded to fill a complete FITS block and no more data will be accepted.
        An attempt to write more data after the stream has been filled will
        raise an `IOError` exception.  If the dtype of the input data does not
        match what is expected by the header, a `TypeError` exception is
        raised.
        """

        size = self._ffo.tell() - self._data_offset

        if self.writecomplete or size + data.nbytes > self._size:
            raise IOError('Attempt to write more data to the stream than the '
                          'header specified.')

        if _ImageBaseHDU.NumCode[self._header['BITPIX']] != data.dtype.name:
            raise TypeError('Supplied data does not match the type specified '
                            'in the header.')

        if data.dtype.str[0] != '>':
#
#           byteswap little endian arrays before writing
#
            output = data.byteswap()
        else:
            output = data

        self._ffo.writearray(output)

        if self._ffo.tell() - self._data_offset == self._size:
#
#           the stream is full so pad the data to the next FITS block
#
            self._ffo.write(_pad_length(self._size) * '\0')
            self.writecomplete = True

        self._ffo.flush()

        return self.writecomplete

    @property
    def size(self):
        """
        Return the size (in bytes) of the data portion of the HDU.
        """

        size = 0
        naxis = self._header.get('NAXIS', 0)

        if naxis > 0:
            simple = self._header.get('SIMPLE', 'F')
            random_groups = self._header.get('GROUPS', 'F')

            if simple == 'T' and random_groups == 'T':
                groups = 1
            else:
                groups = 0

            size = 1

            for idx in range(groups, naxis):
                size = size * self._header['NAXIS' + str(idx + 1)]
            bitpix = self._header['BITPIX']
            gcount = self._header.get('GCOUNT', 1)
            pcount = self._header.get('PCOUNT', 0)
            size = abs(bitpix) * gcount * (pcount + size) // 8
        return size

    def close(self):
        """
        Close the physical FITS file.
        """

        self._ffo.close()