This file is indexed.

/usr/lib/python2.7/dist-packages/odoorpc/rpc/jsonrpclib.py is in python-odoorpc 0.4.1-1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
# -*- coding: utf-8 -*-
##############################################################################
#
#    OdooRPC
#    Copyright (C) 2014 Sébastien Alix.
#
#    This program is free software: you can redistribute it and/or modify
#    it under the terms of the GNU Lesser General Public License as published
#    by the Free Software Foundation, either version 3 of the License, or
#    (at your option) any later version.
#
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Lesser General Public License for more details.
#
#    You should have received a copy of the GNU Lesser General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
"""Provides the :class:`ProxyJSON` class for JSON-RPC requests."""
import json
import random
import sys
# Python 2
if sys.version_info[0] < 3:
    from urllib2 import build_opener, HTTPCookieProcessor, Request
    from cookielib import CookieJar

    def encode_data(data):
        return data

    def decode_data(data):
        return data
# Python >= 3
else:
    from urllib.request import build_opener, HTTPCookieProcessor, Request
    from http.cookiejar import CookieJar
    import io

    def encode_data(data):
        try:
            return bytes(data, 'utf-8')
        except:
            return bytes(data)

    def decode_data(data):
        return io.StringIO(data.read().decode('utf-8'))


class Proxy(object):
    """Base class to implement a proxy to perform requests."""
    def __init__(self, host, port, timeout=120, ssl=False, opener=None):
        self._root_url = "{http}{host}:{port}".format(
            http=(ssl and "https://" or "http://"), host=host, port=port)
        self._timeout = timeout
        self._builder = URLBuilder(self)
        self._opener = opener
        if not opener:
            cookie_jar = CookieJar()
            self._opener = build_opener(HTTPCookieProcessor(cookie_jar))

    def __getattr__(self, name):
        return getattr(self._builder, name)

    def __getitem__(self, url):
        return self._builder[url]


class ProxyJSON(Proxy):
    """The :class:`ProxyJSON` class provides a dynamic access
    to all JSON methods.
    """
    def __init__(self, host, port, timeout=120, ssl=False, opener=None,
                 deserialize=True):
        Proxy.__init__(self, host, port, timeout, ssl, opener)
        self._deserialize = deserialize

    def __call__(self, url, params):
        data = json.dumps({
            "jsonrpc": "2.0",
            "method": "call",
            "params": params,
            "id": random.randint(0, 1000000000),
        })
        if url.startswith('/'):
            url = url[1:]
        request = Request(url='/'.join([self._root_url, url]),
                          data=encode_data(data))
        request.add_header('Content-Type', 'application/json')
        response = self._opener.open(request, timeout=self._timeout)
        if not self._deserialize:
            return response
        return json.load(decode_data(response))


class ProxyHTTP(Proxy):
    """The :class:`ProxyHTTP` class provides a dynamic access
    to all HTTP methods.
    """
    def __call__(self, url, data=None, headers=None):
        kwargs = {
            'url': '/'.join([self._root_url, url]),
        }
        if data:
            kwargs['data'] = encode_data(data)
        request = Request(**kwargs)
        if headers:
            for hkey in headers:
                hvalue = headers[hkey]
                request.add_header(hkey, hvalue)
        return self._opener.open(request, timeout=self._timeout)


class URLBuilder(object):
    """Auto-builds an URL while getting its attributes.
    Used by the :class:`ProxyJSON` and :class:`ProxyHTTP` classes.
    """
    def __init__(self, rpc, url=None):
        self._rpc = rpc
        self._url = url

    def __getattr__(self, path):
        new_url = self._url and '/'.join([self._url, path]) or path
        return URLBuilder(self._rpc, new_url)

    def __getitem__(self, path):
        if path and path[0] == '/':
            path = path[1:]
        if path and path[-1] == '/':
            path = path[:-1]
        return getattr(self, path)

    def __call__(self, **kwargs):
        return self._rpc(self._url, kwargs)

    def __str__(self):
        return self._url

# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: