This file is indexed.

/usr/lib/python2.7/dist-packages/azure/storage/_http/batchclient.py is in python-azure-storage 0.33.0-1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
#-------------------------------------------------------------------------
# Copyright (c) Microsoft.  All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import sys
import uuid

from azure.common import (
    AzureHttpError,
)
from ..models import (
    AzureBatchOperationError,
    AzureBatchValidationError,
)
from .._common_error import (
    _ERROR_CANNOT_FIND_PARTITION_KEY,
    _ERROR_CANNOT_FIND_ROW_KEY,
    _ERROR_INCORRECT_TABLE_IN_BATCH,
    _ERROR_INCORRECT_PARTITION_KEY_IN_BATCH,
    _ERROR_DUPLICATE_ROW_KEY_IN_BATCH,
    _ERROR_BATCH_COMMIT_FAIL,
)
from .._common_serialization import (
    ETree,
    url_unquote,
    _get_etree_text,
    _etree_entity_feed_namespaces,
    _update_request_uri_query,
)
from ..table._serialization import (
    _update_storage_table_header,
)
from . import HTTPError, HTTPRequest, HTTPResponse
from .httpclient import _HTTPClient

_DATASERVICES_NS = 'http://schemas.microsoft.com/ado/2007/08/dataservices'

if sys.version_info < (3,):
    def _new_boundary():
        return str(uuid.uuid1())
else:
    def _new_boundary():
        return str(uuid.uuid1()).encode('utf-8')


class _BatchClient(_HTTPClient):

    '''
    This is the class that is used for batch operation for storage table
    service. It only supports one changeset.
    '''

    def __init__(self, service_instance, authentication,
                 protocol='http', request_session=None, timeout=65, user_agent=''):
        _HTTPClient.__init__(self, service_instance, protocol=protocol, request_session=request_session, timeout=timeout, user_agent=user_agent)
        self.authentication = authentication
        self.is_batch = False
        self.batch_requests = []
        self.batch_table = ''
        self.batch_partition_key = ''
        self.batch_row_keys = []

    def get_request_table(self, request):
        '''
        Extracts table name from request.uri. The request.uri has either
        "/mytable(...)" or "/mytable" format.

        request:
            the request to insert, update or delete entity
        '''
        if '(' in request.path:
            pos = request.path.find('(')
            return request.path[1:pos]
        else:
            return request.path[1:]

    def get_request_partition_key(self, request):
        '''
        Extracts PartitionKey from request.body if it is a POST request or from
        request.path if it is not a POST request. Only insert operation request
        is a POST request and the PartitionKey is in the request body.

        request:
            the request to insert, update or delete entity
        '''
        if request.method == 'POST':
            doc = ETree.fromstring(request.body)
            part_key = doc.find('./atom:content/m:properties/d:PartitionKey', _etree_entity_feed_namespaces)
            if part_key is None:
                raise AzureBatchValidationError(_ERROR_CANNOT_FIND_PARTITION_KEY)
            return _get_etree_text(part_key)
        else:
            uri = url_unquote(request.path)
            pos1 = uri.find('PartitionKey=\'')
            pos2 = uri.find('\',', pos1)
            if pos1 == -1 or pos2 == -1:
                raise AzureBatchValidationError(_ERROR_CANNOT_FIND_PARTITION_KEY)
            return uri[pos1 + len('PartitionKey=\''):pos2]

    def get_request_row_key(self, request):
        '''
        Extracts RowKey from request.body if it is a POST request or from
        request.path if it is not a POST request. Only insert operation request
        is a POST request and the Rowkey is in the request body.

        request:
            the request to insert, update or delete entity
        '''
        if request.method == 'POST':
            doc = ETree.fromstring(request.body)
            row_key = doc.find('./atom:content/m:properties/d:RowKey', _etree_entity_feed_namespaces)
            if row_key is None:
                raise AzureBatchValidationError(_ERROR_CANNOT_FIND_ROW_KEY)
            return _get_etree_text(row_key)
        else:
            uri = url_unquote(request.path)
            pos1 = uri.find('RowKey=\'')
            pos2 = uri.find('\')', pos1)
            if pos1 == -1 or pos2 == -1:
                raise AzureBatchValidationError(_ERROR_CANNOT_FIND_ROW_KEY)
            row_key = uri[pos1 + len('RowKey=\''):pos2]
            return row_key

    def validate_request_table(self, request):
        '''
        Validates that all requests have the same table name. Set the table
        name if it is the first request for the batch operation.

        request:
            the request to insert, update or delete entity
        '''
        if self.batch_table:
            if self.get_request_table(request) != self.batch_table:
                raise AzureBatchValidationError(_ERROR_INCORRECT_TABLE_IN_BATCH)
        else:
            self.batch_table = self.get_request_table(request)

    def validate_request_partition_key(self, request):
        '''
        Validates that all requests have the same PartitiionKey. Set the
        PartitionKey if it is the first request for the batch operation.

        request:
            the request to insert, update or delete entity
        '''
        if self.batch_partition_key:
            if self.get_request_partition_key(request) != \
                self.batch_partition_key:
                raise AzureBatchValidationError(_ERROR_INCORRECT_PARTITION_KEY_IN_BATCH)
        else:
            self.batch_partition_key = self.get_request_partition_key(request)

    def validate_request_row_key(self, request):
        '''
        Validates that all requests have the different RowKey and adds RowKey
        to existing RowKey list.

        request:
            the request to insert, update or delete entity
        '''
        if self.batch_row_keys:
            if self.get_request_row_key(request) in self.batch_row_keys:
                raise AzureBatchValidationError(_ERROR_DUPLICATE_ROW_KEY_IN_BATCH)
        else:
            self.batch_row_keys.append(self.get_request_row_key(request))

    def begin_batch(self):
        '''
        Starts the batch operation. Intializes the batch variables

        is_batch:
            batch operation flag.
        batch_table:
            the table name of the batch operation
        batch_partition_key:
            the PartitionKey of the batch requests.
        batch_row_keys:
            the RowKey list of adding requests.
        batch_requests:
            the list of the requests.
        '''
        self.is_batch = True
        self.batch_table = ''
        self.batch_partition_key = ''
        self.batch_row_keys = []
        self.batch_requests = []

    def insert_request_to_batch(self, request):
        '''
        Adds request to batch operation.

        request:
            the request to insert, update or delete entity
        '''
        self.validate_request_table(request)
        self.validate_request_partition_key(request)
        self.validate_request_row_key(request)
        self.batch_requests.append(request)

    def commit_batch(self):
        ''' Resets batch flag and commits the batch requests. '''
        if self.is_batch:
            self.is_batch = False
            self.commit_batch_requests()

    def commit_batch_requests(self):
        ''' Commits the batch requests. '''

        batch_boundary = b'batch_' + _new_boundary()
        changeset_boundary = b'changeset_' + _new_boundary()

        # Commits batch only the requests list is not empty.
        if self.batch_requests:
            request = HTTPRequest()
            request.method = 'POST'
            request.host = self.batch_requests[0].host
            request.path = '/$batch'
            request.headers = [
                ('Content-Type', 'multipart/mixed; boundary=' + \
                    batch_boundary.decode('utf-8')),
                ('Accept', 'application/atom+xml,application/xml'),
                ('Accept-Charset', 'UTF-8')]

            request.body = b'--' + batch_boundary + b'\n'
            request.body += b'Content-Type: multipart/mixed; boundary='
            request.body += changeset_boundary + b'\n\n'

            content_id = 1

            # Adds each request body to the POST data.
            for batch_request in self.batch_requests:
                request.body += b'--' + changeset_boundary + b'\n'
                request.body += b'Content-Type: application/http\n'
                request.body += b'Content-Transfer-Encoding: binary\n\n'
                request.body += batch_request.method.encode('utf-8')
                request.body += b' http://'
                request.body += batch_request.host.encode('utf-8')
                request.body += batch_request.path.encode('utf-8')
                request.body += b' HTTP/1.1\n'
                request.body += b'Content-ID: '
                request.body += str(content_id).encode('utf-8') + b'\n'
                content_id += 1

                # Add different headers for different type requests.
                if not batch_request.method == 'DELETE':
                    request.body += \
                        b'Content-Type: application/atom+xml;type=entry\n'
                    for name, value in batch_request.headers:
                        if name == 'If-Match':
                            request.body += name.encode('utf-8') + b': '
                            request.body += value.encode('utf-8') + b'\n'
                            break
                    request.body += b'Content-Length: '
                    request.body += str(len(batch_request.body)).encode('utf-8')
                    request.body += b'\n\n'
                    request.body += batch_request.body + b'\n'
                else:
                    for name, value in batch_request.headers:
                        # If-Match should be already included in
                        # batch_request.headers, but in case it is missing,
                        # just add it.
                        if name == 'If-Match':
                            request.body += name.encode('utf-8') + b': '
                            request.body += value.encode('utf-8') + b'\n\n'
                            break
                    else:
                        request.body += b'If-Match: *\n\n'

            request.body += b'--' + changeset_boundary + b'--' + b'\n'
            request.body += b'--' + batch_boundary + b'--'

            request.path, request.query = _update_request_uri_query(request)
            request.headers = _update_storage_table_header(request)
            self.authentication.sign_request(request)

            # Submit the whole request as batch request.
            response = self.perform_request(request)
            if response.status >= 300:
                # This exception will be caught by the general error handler
                # and raised as an azure http exception
                raise HTTPError(response.status,
                                _ERROR_BATCH_COMMIT_FAIL,
                                self.respheader,
                                response.body)

            # http://www.odata.org/documentation/odata-version-2-0/batch-processing/
            # The body of a ChangeSet response is either a response for all the
            # successfully processed change request within the ChangeSet,
            # formatted exactly as it would have appeared outside of a batch, 
            # or a single response indicating a failure of the entire ChangeSet.
            responses = self._parse_batch_response(response.body)
            if responses and responses[0].status >= 300:
                self._report_batch_error(responses[0])

    def cancel_batch(self):
        ''' Resets the batch flag. '''
        self.is_batch = False

    def _parse_batch_response(self, body):
        parts = body.split(b'--changesetresponse_')

        responses = []
        for part in parts:
            httpLocation = part.find(b'HTTP/')
            if httpLocation > 0:
                response = self._parse_batch_response_part(part[httpLocation:])
                responses.append(response)

        return responses

    def _parse_batch_response_part(self, part):
        lines = part.splitlines();

        # First line is the HTTP status/reason
        status, _, reason = lines[0].partition(b' ')[2].partition(b' ')

        # Followed by headers and body
        headers = []
        body = b''
        isBody = False
        for line in lines[1:]:
            if line == b'' and not isBody:
                isBody = True
            elif isBody:
                body += line
            else:
                headerName, _, headerVal = line.partition(b':')
                headers.append((headerName.lower(), headerVal))

        return HTTPResponse(int(status), reason.strip(), headers, body)

    def _report_batch_error(self, response):
        doc = ETree.fromstring(response.body)

        code_element = doc.find('./m:code', _etree_entity_feed_namespaces)
        code = _get_etree_text(code_element) if code_element is not None else ''

        message_element = doc.find('./m:message', _etree_entity_feed_namespaces)
        message = _get_etree_text(message_element) if message_element is not None else ''

        raise AzureBatchOperationError(message, response.status, code)