This file is indexed.

/usr/share/pyshared/neo/io/asciispiketrainio.py is in python-neo 0.3.3-1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
# -*- coding: utf-8 -*-

"""
Classe for reading/writing SpikeTrains in a text file.
It is the simple case where different spiketrains are written line by line.

Supported : Read/Write

Author: sgarcia

"""

import os

import numpy as np
import quantities as pq

from neo.io.baseio import BaseIO
from neo.core import Segment, SpikeTrain
from neo.io.tools import create_many_to_one_relationship


class AsciiSpikeTrainIO(BaseIO):
    """

    Classe for reading/writing SpikeTrain in a text file.
    Each Spiketrain is a line.

    Usage:
        >>> from neo import io
        >>> r = io.AsciiSpikeTrainIO( filename = 'File_ascii_spiketrain_1.txt')
        >>> seg = r.read_segment(lazy = False, cascade = True,)
        >>> print seg.spiketrains     # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
        [<SpikeTrain(array([ 3.89981604,  4.73258781,  0.608428  ,  4.60246277,  1.23805797,
        ...

    """

    is_readable        = True
    is_writable        = True

    supported_objects  = [Segment , SpikeTrain]
    readable_objects   = [Segment]
    writeable_objects  = [Segment]

    has_header         = False
    is_streameable     = False

    read_params        = {
                            Segment : [
                                        ('delimiter' , {'value' :  '\t', 'possible' : ['\t' , ' ' , ',' , ';'] }) ,
                                        ('t_start' , { 'value' : 0., } ),
                                        ]
                            }
    write_params       = {
                            Segment : [
                                        ('delimiter' , {'value' :  '\t', 'possible' : ['\t' , ' ' , ',' , ';'] }) ,
                                        ]
                            }

    name               = None
    extensions          = [ 'txt' ]

    mode = 'file'


    def __init__(self , filename = None) :
        """
        This class read/write SpikeTrains in a text file.
        Each row is a spiketrain.

        **Arguments**

        filename : the filename to read/write

        """
        BaseIO.__init__(self)
        self.filename = filename

    def read_segment(self,
                            lazy = False,
                            cascade = True,
                            delimiter = '\t',
                            t_start = 0.*pq.s,
                            unit = pq.s,
                            ):
        """
        Arguments:
            delimiter  :  columns delimiter in file  '\t' or one space or two space or ',' or ';'
            t_start : time start of all spiketrain 0 by default
            unit : unit of spike times, can be a str or directly a Quantities
        """
        unit = pq.Quantity(1, unit)

        seg = Segment(file_origin = os.path.basename(self.filename))
        if not cascade:
            return seg

        f = open(self.filename, 'Ur')
        for i,line in enumerate(f) :
            alldata = line[:-1].split(delimiter)
            if alldata[-1] == '': alldata = alldata[:-1]
            if alldata[0] == '': alldata = alldata[1:]
            if lazy:
                spike_times = [ ]
                t_stop = t_start
            else:
                spike_times = np.array(alldata).astype('f')
                t_stop = spike_times.max()*unit

            sptr = SpikeTrain(spike_times*unit, t_start=t_start, t_stop=t_stop)
            if lazy:
                sptr.lazy_shape = len(alldata)

            sptr.annotate(channel_index = i)
            seg.spiketrains.append(sptr)
        f.close()

        create_many_to_one_relationship(seg)
        return seg

    def write_segment(self, segment,
                                delimiter = '\t',
                                ):
        """
        Write SpikeTrain of a Segment in a txt file.
        Each row is a spiketrain.

         Arguments:
            segment : the segment to write. Only analog signals will be written.
            delimiter  :  columns delimiter in file  '\t' or one space or two space or ',' or ';'

            information of t_start is lost

        """

        f = open(self.filename, 'w')
        for s,sptr in enumerate(segment.spiketrains) :
            for ts in sptr :
                f.write('%f%s'% (ts , delimiter) )
            f.write('\n')
        f.close()