This file is indexed.

/usr/share/pyshared/fabio/binaryimage.py is in python-fabio 0.1.4-1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
#!/usr/bin/env python
# coding: utf8
"""
Authors: Gael Goret, Jerome Kieffer, ESRF, France
Emails: gael.goret@esrf.fr, jerome.kieffer@esrf.fr
        Brian Richard Pauw <brian@stack.nl>

Binary files images are simple none-compressed 2D images only defined by their :
data-type, dimensions, byte order and offset

This simple library has been made for manipulating exotic/unknown files format.
"""

# Get ready for python3:
from __future__ import with_statement, print_function

__authors__ = ["Gaël Goret", "Jérôme Kieffer", "Brian Pauw"]
__contact__ = "gael.goret@esrf.fr"
__license__ = "GPLv3+"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
__version__ = "17 Oct 2012"

from .fabioimage import fabioimage
import numpy, logging
logger = logging.getLogger("binaryimage")

class binaryimage(fabioimage):
    """
    This simple library has been made for manipulating exotic/unknown files format.

    Binary files images are simple none-compressed 2D images only defined by their :
    data-type, dimensions, byte order and offset

    if offset is set to a negative value, the image is read using the last data but n
    data in the file, skipping any header.
    """

    def __init__(self, *args, **kwargs):
        fabioimage.__init__(self, *args, **kwargs)

    @staticmethod
    def swap_needed(endian):
        """
        Decide if we need to byteswap
        """
        if (endian == '<' and numpy.little_endian) or (endian == '>' and not numpy.little_endian):
            return False
        if (endian == '>' and numpy.little_endian) or (endian == '<' and not numpy.little_endian):
            return True

    def read(self, fname, dim1, dim2, offset=0, bytecode="int32", endian="<"):
        """
        Read a binary image

        @param fname: file name
        @type fname: string
        @param dim1: image dimensions (Fast index)
        @param dim2: image dimensions (Slow index)
        @param offset: starting position of the data-block. If negative, starts at the end.
        @param bytecode: can be "int8","int16","int32","int64","uint8","uint16","uint32","uint64","float32","float64",...
        @param endian:  among short or long endian ("<" or ">")

        """
        self.filename = fname
        self.dim1 = dim1
        self.dim2 = dim2
        self.bytecode = bytecode
        f = open(self.filename, "rb")
        dims = [dim2, dim1]
        bpp = len(numpy.array(0, bytecode).tostring())
        size = dims[0] * dims[1] * bpp

        if offset >= 0:
            f.seek(offset)
        else:
            try:
                f.seek(-size + offset + 1, 2) #seek from EOF backwards
            except IOError:
                logging.warn('expected datablock too large, please check bytecode settings: {}'.format(bytecode))
            except:
                logging.error('Uncommon error encountered when reading file')
        rawData = f.read(size)
        if  self.swap_needed(endian):
            data = numpy.fromstring(rawData, bytecode).byteswap().reshape(tuple(dims))
        else:
            data = numpy.fromstring(rawData, bytecode).reshape(tuple(dims))
        self.data = data
        return self

    def estimate_offset_value(self, fname, dim1, dim2, bytecode="int32"):
        "Estimates the size of a file"
        with open(fname, "rb") as f:
            bpp = len(numpy.array(0, bytecode).tostring())
            size = dim1 * dim2 * bpp
            totsize = len(f.read())
        logger.info('total size (bytes): %s', totsize)
        logger.info('expected data size given parameters (bytes): %s', size)
        logger.info('estimation of the offset value (bytes): %s', totsize - size)

    def write(self, fname):
        with open(fname, mode="wb") as outfile:
            outfile.write(self.data.tostring())