This file is indexed.

/usr/share/pyshared/foomatic/urlutils.py is in python-foomatic 0.7.9.5build1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
#
# urlutils.py - Simplified urllib handling
#
#   Written by Chris Lawrence <lawrencc@debian.org>
#   (C) 1999-2003 Chris Lawrence
#
# This program is freely distributable per the following license:
#
##  Permission to use, copy, modify, and distribute this software and its
##  documentation for any purpose and without fee is hereby granted,
##  provided that the above copyright notice appears in all copies and that
##  both that copyright notice and this permission notice appear in
##  supporting documentation.
##
##  I DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL
##  IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL I
##  BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
##  DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
##  WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
##  ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
##  SOFTWARE.
#
# Version 2.10; see changelog for revision history

import httplib, urllib, urllib2, getpass, re, socket, robotparser, urlparse
import mimetools

TIMEOUT=180
try:
    socket.setdefaulttimeout(TIMEOUT)
    Timeout = socket.timeout
except:
    import timeoutsocket
    timeoutsocket.setDefaultSocketTimeout(TIMEOUT)
    Timeout = timeoutsocket.Timeout

def decode (page):
    "gunzip or deflate a compressed page"
    encoding = page.info().get("Content-Encoding") 
    if encoding in ('gzip', 'x-gzip', 'deflate'):
        from cStringIO import StringIO
        # cannot seek in socket descriptors, so must get content now
        content = page.read()
        if encoding == 'deflate':
            import zlib
            fp = StringIO(zlib.decompress(content))
        else:
            import gzip
            fp = gzip.GzipFile('', 'rb', 9, StringIO(content))
        # remove content-encoding header
        hstr = ''
        ceheader = re.compile(r"content-encoding", re.I)
        for (h, v) in page.info().items():
            if not ceheader.match(h):
                hstr += '%s: %s\n' % (h, v)
        mheaders = mimetools.Message(StringIO(hstr))
        newpage = urllib.addinfourl(fp, mheaders, page.geturl())
        # Propagate code, msg through
        if hasattr(page, 'code'):
            newpage.code = page.code
        if hasattr(page, 'msg'):
            newpage.msg = page.msg
        return newpage
    return page

class HttpWithGzipHandler (urllib2.HTTPHandler):
    "support gzip encoding"
    def http_open (self, req):
        return decode(urllib2.HTTPHandler.http_open(self, req))

if hasattr(httplib, 'HTTPS'):
    class HttpsWithGzipHandler (urllib2.HTTPSHandler):
        "support gzip encoding"
        def http_open (self, req):
            return decode(urllib2.HTTPSHandler.http_open(self, req))

class handlepasswd(urllib2.HTTPPasswordMgrWithDefaultRealm):
    def find_user_password(self, realm, authurl):
        user, password = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(self, realm, authurl)
        if user is not None:
            return user, password

        user = raw_input('Enter username for %s at %s: ' % (realm, authurl))
        password = getpass.getpass(
            "Enter password for %s in %s at %s: " % (user, realm, authurl))
        self.add_password(realm, authurl, user, password)
        return user, password
        
_opener = None
def urlopen(req, proxies=None, data=None):
    global _opener

    if not proxies:
        proxies = urllib.getproxies()

##  # Deflate seems to trigger an "unknown compression method" exception
##  # with ESPN.com
##  headers = {'Accept-Encoding' : 'gzip;q=1.0, deflate;q=0.9, identity;q=0.5'}
    headers = {'Accept-Encoding' : 'gzip;q=1.0, identity;q=0.5'}

    if isinstance(req, urllib2.Request):
        for (header, val) in headers.iteritems():
            req.add_header(header, val)
    else:
        req = urllib2.Request(req, data, headers)

    #proxy_support = urllib2.ProxyHandler(proxies)
    if _opener is None:
        handlers = [#proxy_support,
                    urllib2.UnknownHandler, HttpWithGzipHandler,
                    #urllib2.ProxyBasicAuthHandler(handlepasswd()),
                    #urllib2.ProxyDigestAuthHandler(handlepasswd()),
                    urllib2.HTTPDefaultErrorHandler,
                    urllib2.HTTPRedirectHandler, urllib2.FileHandler]
        if hasattr(httplib, 'HTTPS'):
            handlers.append(HttpsWithGzipHandler)
        _opener = urllib2.build_opener(*handlers)
        # print _opener.handlers
        urllib2.install_opener(_opener)
    
    return _opener.open(req)

# Global useful URL opener; returns None if the page is absent, otherwise
# like urlopen
def open_url(url, http_proxy=None):
    proxies = urllib.getproxies()
    if http_proxy:
        proxies['http'] = http_proxy

    try:
        page = urlopen(url, proxies)
    except urllib2.HTTPError, x:
        if (x.code >= 400) or (x.code == -1) or (x.code in [301, 302]):
            return None
        else:
            raise
    except (socket.error, socket.gaierror, urllib2.URLError, Timeout), x:
        return None
    except IOError, data:
        if data and data[0] == 'http error' and data[1] == 404:
            return None
        else:
            return None
    return page