Python twisted.web.http.HTTPChannel() Examples
The following are 30
code examples of twisted.web.http.HTTPChannel().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
twisted.web.http
, or try the search function
.
Example #1
Source File: test_http.py From python-for-android with Apache License 2.0 | 6 votes |
def test_requestBodyTimeout(self): """ L{HTTPChannel} resets its timeout whenever data from a request body is delivered to it. """ clock = Clock() transport = StringTransport() protocol = http.HTTPChannel() protocol.timeOut = 100 protocol.callLater = clock.callLater protocol.makeConnection(transport) protocol.dataReceived('POST / HTTP/1.0\r\nContent-Length: 2\r\n\r\n') clock.advance(99) self.assertFalse(transport.disconnecting) protocol.dataReceived('x') clock.advance(99) self.assertFalse(transport.disconnecting) protocol.dataReceived('x') self.assertEqual(len(protocol.requests), 1)
Example #2
Source File: test_http.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_basicAuth(self): """ L{HTTPChannel} provides username and password information supplied in an I{Authorization} header to the L{Request} which makes it available via its C{getUser} and C{getPassword} methods. """ requests = [] class Request(http.Request): def process(self): self.credentials = (self.getUser(), self.getPassword()) requests.append(self) for u, p in [(b"foo", b"bar"), (b"hello", b"there:z")]: s = base64.encodestring(b":".join((u, p))).strip() f = b"GET / HTTP/1.0\nAuthorization: Basic " + s + b"\n\n" self.runRequest(f, Request, 0) req = requests.pop() self.assertEqual((u, p), req.credentials)
Example #3
Source File: test_http.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_chunkedResponses(self): """ Test that the L{HTTPChannel} correctly chunks responses when needed. """ channel = http.HTTPChannel() req = http.Request(channel, False) trans = StringTransport() channel.transport = trans req.setResponseCode(200) req.clientproto = b"HTTP/1.1" req.responseHeaders.setRawHeaders(b"test", [b"lemur"]) req.write(b'Hello') req.write(b'World!') self.assertResponseEquals( trans.value(), [(b"HTTP/1.1 200 OK", b"Test: lemur", b"Transfer-Encoding: chunked", b"5\r\nHello\r\n6\r\nWorld!\r\n")])
Example #4
Source File: test_http.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_requestBodyDefaultTimeout(self): """ L{HTTPChannel}'s default timeout is 60 seconds. """ clock = Clock() transport = StringTransport() factory = http.HTTPFactory() protocol = factory.buildProtocol(None) # This is a terrible violation of the abstraction later of # _genericHTTPChannelProtocol, but we need to do it because # policies.TimeoutMixin doesn't accept a reactor on the object. # See https://twistedmatrix.com/trac/ticket/8488 protocol._channel.callLater = clock.callLater protocol.makeConnection(transport) protocol.dataReceived(b'POST / HTTP/1.0\r\nContent-Length: 2\r\n\r\n') clock.advance(59) self.assertFalse(transport.disconnecting) clock.advance(1) self.assertTrue(transport.disconnecting)
Example #5
Source File: test_http.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_headers(self): """ Headers received by L{HTTPChannel} in a request are made available to the L{Request}. """ processed = [] class MyRequest(http.Request): def process(self): processed.append(self) self.finish() requestLines = [ b"GET / HTTP/1.0", b"Foo: bar", b"baz: Quux", b"baz: quux", b"", b""] self.runRequest(b'\n'.join(requestLines), MyRequest, 0) [request] = processed self.assertEqual( request.requestHeaders.getRawHeaders(b'foo'), [b'bar']) self.assertEqual( request.requestHeaders.getRawHeaders(b'bAz'), [b'Quux', b'quux'])
Example #6
Source File: test_http.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_losingConnection(self): """ Calling L{http.Request.loseConnection} causes the transport to be disconnected. """ b = StringTransport() a = http.HTTPChannel() a.requestFactory = self.ShutdownHTTPHandler a.makeConnection(b) a.dataReceived(self.request) # The transport should have been shut down. self.assertTrue(b.disconnecting) # No response should have been written. value = b.value() self.assertEqual(value, b'')
Example #7
Source File: test_http.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_tooManyHeaders(self): """ L{HTTPChannel} enforces a limit of C{HTTPChannel.maxHeaders} on the number of headers received per request. """ processed = [] class MyRequest(http.Request): def process(self): processed.append(self) requestLines = [b"GET / HTTP/1.0"] for i in range(http.HTTPChannel.maxHeaders + 2): requestLines.append(networkString("%s: foo" % (i,))) requestLines.extend([b"", b""]) channel = self.runRequest(b"\n".join(requestLines), MyRequest, 0) self.assertEqual(processed, []) self.assertEqual( channel.transport.value(), b"HTTP/1.1 400 Bad Request\r\n\r\n")
Example #8
Source File: test_http.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_noPipelining(self): """ Test that pipelined requests get buffered, not processed in parallel. """ b = StringTransport() a = http.HTTPChannel() a.requestFactory = DelayedHTTPHandler a.makeConnection(b) # one byte at a time, to stress it. for byte in iterbytes(self.requests): a.dataReceived(byte) value = b.value() # So far only one request should have been dispatched. self.assertEqual(value, b'') self.assertEqual(1, len(a.requests)) # Now, process each request one at a time. while a.requests: self.assertEqual(1, len(a.requests)) a.requests[0].delayedProcess() value = b.value() self.assertResponseEquals(value, self.expected_response)
Example #9
Source File: test_http.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_connectionLost(self): """ L{http.Request.connectionLost} closes L{Request.content} and drops the reference to the L{HTTPChannel} to assist with garbage collection. """ req = http.Request(DummyChannel(), False) # Cause Request.content to be created at all. req.gotLength(10) # Grab a reference to content in case the Request drops it later on. content = req.content # Put some bytes into it req.handleContentChunk(b"hello") # Then something goes wrong and content should get closed. req.connectionLost(Failure(ConnectionLost("Finished"))) self.assertTrue(content.closed) self.assertIdentical(req.channel, None)
Example #10
Source File: test_http.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_requestBodyTimeoutFromFactory(self): """ L{HTTPChannel} timeouts whenever data from a request body is not delivered to it in time, even when it gets built from a L{HTTPFactory}. """ clock = Clock() factory = http.HTTPFactory(timeout=100, reactor=clock) factory.startFactory() protocol = factory.buildProtocol(None) transport = StringTransport() # Confirm that the timeout is what we think it is. self.assertEqual(protocol.timeOut, 100) # This is a terrible violation of the abstraction later of # _genericHTTPChannelProtocol, but we need to do it because # policies.TimeoutMixin doesn't accept a reactor on the object. # See https://twistedmatrix.com/trac/ticket/8488 protocol._channel.callLater = clock.callLater protocol.makeConnection(transport) protocol.dataReceived(b'POST / HTTP/1.0\r\nContent-Length: 2\r\n\r\n') clock.advance(99) self.assertFalse(transport.disconnecting) clock.advance(2) self.assertTrue(transport.disconnecting)
Example #11
Source File: test_http.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_HTTP10(self): """ HTTP/1.0 requests do not get 100-continue returned, even if 'Expect: 100-continue' is included (RFC 2616 10.1.1). """ transport = StringTransport() channel = http.HTTPChannel() channel.requestFactory = DummyHTTPHandler channel.makeConnection(transport) channel.dataReceived(b"GET / HTTP/1.0\r\n") channel.dataReceived(b"Host: www.example.com\r\n") channel.dataReceived(b"Content-Length: 3\r\n") channel.dataReceived(b"Expect: 100-continue\r\n") channel.dataReceived(b"\r\n") self.assertEqual(transport.value(), b"") channel.dataReceived(b"abc") self.assertResponseEquals( transport.value(), [(b"HTTP/1.0 200 OK", b"Command: GET", b"Content-Length: 13", b"Version: HTTP/1.0", b"Request: /", b"'''\n3\nabc'''\n")])
Example #12
Source File: test_http.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_HTTPChannelStopRequestProducer(self): """ If there is a request producer registered with L{HTTPChannel}, calling C{stopProducing} causes that producer to be stopped as well. """ channel, transport = self.buildChannelAndTransport( StringTransport(), DelayedHTTPHandler ) # Feed a request in to spawn a Request object, then grab it. channel.dataReceived(self.request) request = channel.requests[0] # Register a dummy producer. producer = DummyProducer() request.registerProducer(producer, True) # The dummy producer is currently unpaused. self.assertEqual(producer.events, []) # The transport now stops production. This stops the request producer. channel.stopProducing() self.assertEqual(producer.events, ['stop'])
Example #13
Source File: test_http.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_HTTPChannelUnregistersSelfWhenTimingOut(self): """ L{HTTPChannel} unregisters itself when it times out a connection. """ clock = Clock() transport = StringTransport() channel = http.HTTPChannel() # Patch the channel's callLater method. channel.timeOut = 100 channel.callLater = clock.callLater channel.makeConnection(transport) # Tick the clock forward almost to the timeout. clock.advance(99) self.assertIs(transport.producer, channel) self.assertIs(transport.streaming, True) # Fire the timeout. clock.advance(1) self.assertIs(transport.producer, None) self.assertIs(transport.streaming, None)
Example #14
Source File: test_http.py From learn_python3_spider with MIT License | 6 votes |
def test_requestBodyTimeout(self): """ L{HTTPChannel} resets its timeout whenever data from a request body is delivered to it. """ clock = Clock() transport = StringTransport() protocol = http.HTTPChannel() protocol.timeOut = 100 protocol.callLater = clock.callLater protocol.makeConnection(transport) protocol.dataReceived(b'POST / HTTP/1.0\r\nContent-Length: 2\r\n\r\n') clock.advance(99) self.assertFalse(transport.disconnecting) protocol.dataReceived(b'x') clock.advance(99) self.assertFalse(transport.disconnecting) protocol.dataReceived(b'x') self.assertEqual(len(protocol.requests), 1)
Example #15
Source File: test_http.py From learn_python3_spider with MIT License | 6 votes |
def test_transportNotAbortedWithZeroAbortTimeout(self): """ If the L{HTTPChannel} has its c{abortTimeout} set to L{None}, it never aborts. """ clock = Clock() transport = StringTransport() factory = http.HTTPFactory() protocol = factory.buildProtocol(None) protocol._channel.abortTimeout = None protocol = parametrizeTimeoutMixin(protocol, clock) protocol.makeConnection(transport) protocol.dataReceived(b'POST / HTTP/1.0\r\nContent-Length: 2\r\n\r\n') self.assertFalse(transport.disconnecting) self.assertFalse(transport.disconnected) # Force the initial timeout. clock.advance(60) self.assertTrue(transport.disconnecting) self.assertFalse(transport.disconnected) # Move an absurdly long way just to prove the point. clock.advance(2**32) self.assertTrue(transport.disconnecting) self.assertFalse(transport.disconnected)
Example #16
Source File: test_http.py From learn_python3_spider with MIT License | 6 votes |
def test_noPipelining(self): """ Test that pipelined requests get buffered, not processed in parallel. """ b = StringTransport() a = http.HTTPChannel() a.requestFactory = DelayedHTTPHandlerProxy a.makeConnection(b) # one byte at a time, to stress it. for byte in iterbytes(self.requests): a.dataReceived(byte) value = b.value() # So far only one request should have been dispatched. self.assertEqual(value, b'') self.assertEqual(1, len(a.requests)) # Now, process each request one at a time. while a.requests: self.assertEqual(1, len(a.requests)) request = a.requests[0].original request.delayedProcess() value = b.value() self.assertResponseEquals(value, self.expected_response)
Example #17
Source File: test_http.py From learn_python3_spider with MIT License | 6 votes |
def test_noPipelining(self): """ Test that pipelined requests get buffered, not processed in parallel. """ b = StringTransport() a = http.HTTPChannel() a.requestFactory = DelayedHTTPHandlerProxy a.makeConnection(b) # one byte at a time, to stress it. for byte in iterbytes(self.requests): a.dataReceived(byte) value = b.value() # So far only one request should have been dispatched. self.assertEqual(value, b'') self.assertEqual(1, len(a.requests)) # Now, process each request one at a time. while a.requests: self.assertEqual(1, len(a.requests)) request = a.requests[0].original request.delayedProcess() value = b.value() self.assertResponseEquals(value, self.expectedResponses)
Example #18
Source File: test_http.py From learn_python3_spider with MIT License | 6 votes |
def test_pipeliningReadLimit(self): """ When pipelined requests are received, we will optimistically continue receiving data up to a specified limit, then pause the transport. @see: L{http.HTTPChannel._optimisticEagerReadSize} """ b = StringTransport() a = http.HTTPChannel() a.requestFactory = DelayedHTTPHandlerProxy a.makeConnection(b) underLimit = a._optimisticEagerReadSize // len(self.requests) for x in range(1, underLimit + 1): a.dataReceived(self.requests) self.assertEqual(b.producerState, 'producing', 'state was {state!r} after {x} iterations' .format(state=b.producerState, x=x)) a.dataReceived(self.requests) self.assertEquals(b.producerState, 'paused')
Example #19
Source File: test_http.py From learn_python3_spider with MIT License | 6 votes |
def test_losingConnection(self): """ Calling L{http.Request.loseConnection} causes the transport to be disconnected. """ b = StringTransport() a = http.HTTPChannel() a.requestFactory = _makeRequestProxyFactory(self.ShutdownHTTPHandler) a.makeConnection(b) a.dataReceived(self.request) # The transport should have been shut down. self.assertTrue(b.disconnecting) # No response should have been written. value = b.value() self.assertEqual(value, b'')
Example #20
Source File: test_http.py From learn_python3_spider with MIT License | 6 votes |
def test_chunkedResponses(self): """ Test that the L{HTTPChannel} correctly chunks responses when needed. """ trans = StringTransport() channel = http.HTTPChannel() channel.makeConnection(trans) req = http.Request(channel, False) req.setResponseCode(200) req.clientproto = b"HTTP/1.1" req.responseHeaders.setRawHeaders(b"test", [b"lemur"]) req.write(b'Hello') req.write(b'World!') self.assertResponseEquals( trans.value(), [(b"HTTP/1.1 200 OK", b"Test: lemur", b"Transfer-Encoding: chunked", b"5\r\nHello\r\n6\r\nWorld!\r\n")])
Example #21
Source File: test_http.py From learn_python3_spider with MIT License | 6 votes |
def test_basicAuth(self): """ L{HTTPChannel} provides username and password information supplied in an I{Authorization} header to the L{Request} which makes it available via its C{getUser} and C{getPassword} methods. """ requests = [] class Request(http.Request): def process(self): self.credentials = (self.getUser(), self.getPassword()) requests.append(self) for u, p in [(b"foo", b"bar"), (b"hello", b"there:z")]: s = base64.encodestring(b":".join((u, p))).strip() f = b"GET / HTTP/1.0\nAuthorization: Basic " + s + b"\n\n" self.runRequest(f, Request, 0) req = requests.pop() self.assertEqual((u, p), req.credentials)
Example #22
Source File: test_http.py From learn_python3_spider with MIT License | 6 votes |
def test_tooManyHeaders(self): """ L{HTTPChannel} enforces a limit of C{HTTPChannel.maxHeaders} on the number of headers received per request. """ processed = [] class MyRequest(http.Request): def process(self): processed.append(self) requestLines = [b"GET / HTTP/1.0"] for i in range(http.HTTPChannel.maxHeaders + 2): requestLines.append(networkString("%s: foo" % (i,))) requestLines.extend([b"", b""]) channel = self.runRequest(b"\n".join(requestLines), MyRequest, 0) self.assertEqual(processed, []) self.assertEqual( channel.transport.value(), b"HTTP/1.1 400 Bad Request\r\n\r\n")
Example #23
Source File: test_http.py From learn_python3_spider with MIT License | 6 votes |
def test_connectionLost(self): """ L{http.Request.connectionLost} closes L{Request.content} and drops the reference to the L{HTTPChannel} to assist with garbage collection. """ req = http.Request(DummyChannel(), False) # Cause Request.content to be created at all. req.gotLength(10) # Grab a reference to content in case the Request drops it later on. content = req.content # Put some bytes into it req.handleContentChunk(b"hello") # Then something goes wrong and content should get closed. req.connectionLost(Failure(ConnectionLost("Finished"))) self.assertTrue(content.closed) self.assertIdentical(req.channel, None)
Example #24
Source File: test_http.py From learn_python3_spider with MIT License | 6 votes |
def test_requestBodyTimeoutFromFactory(self): """ L{HTTPChannel} timeouts whenever data from a request body is not delivered to it in time, even when it gets built from a L{HTTPFactory}. """ clock = Clock() factory = http.HTTPFactory(timeout=100, reactor=clock) factory.startFactory() protocol = factory.buildProtocol(None) transport = StringTransport() protocol = parametrizeTimeoutMixin(protocol, clock) # Confirm that the timeout is what we think it is. self.assertEqual(protocol.timeOut, 100) protocol.makeConnection(transport) protocol.dataReceived(b'POST / HTTP/1.0\r\nContent-Length: 2\r\n\r\n') clock.advance(99) self.assertFalse(transport.disconnecting) clock.advance(2) self.assertTrue(transport.disconnecting)
Example #25
Source File: test_http.py From learn_python3_spider with MIT License | 6 votes |
def test_HTTP10(self): """ HTTP/1.0 requests do not get 100-continue returned, even if 'Expect: 100-continue' is included (RFC 2616 10.1.1). """ transport = StringTransport() channel = http.HTTPChannel() channel.requestFactory = DummyHTTPHandlerProxy channel.makeConnection(transport) channel.dataReceived(b"GET / HTTP/1.0\r\n") channel.dataReceived(b"Host: www.example.com\r\n") channel.dataReceived(b"Content-Length: 3\r\n") channel.dataReceived(b"Expect: 100-continue\r\n") channel.dataReceived(b"\r\n") self.assertEqual(transport.value(), b"") channel.dataReceived(b"abc") self.assertResponseEquals( transport.value(), [(b"HTTP/1.0 200 OK", b"Command: GET", b"Content-Length: 13", b"Version: HTTP/1.0", b"Request: /", b"'''\n3\nabc'''\n")])
Example #26
Source File: test_http.py From learn_python3_spider with MIT License | 6 votes |
def test_HTTPChannelStopRequestProducer(self): """ If there is a request producer registered with L{HTTPChannel}, calling C{stopProducing} causes that producer to be stopped as well. """ channel, transport = self.buildChannelAndTransport( StringTransport(), DelayedHTTPHandler ) # Feed a request in to spawn a Request object, then grab it. channel.dataReceived(self.request) request = channel.requests[0].original # Register a dummy producer. producer = DummyProducer() request.registerProducer(producer, True) # The dummy producer is currently unpaused. self.assertEqual(producer.events, []) # The transport now stops production. This stops the request producer. channel.stopProducing() self.assertEqual(producer.events, ['stop'])
Example #27
Source File: test_http.py From learn_python3_spider with MIT License | 6 votes |
def test_HTTPChannelUnregistersSelfWhenTimingOut(self): """ L{HTTPChannel} unregisters itself when it times out a connection. """ clock = Clock() transport = StringTransport() channel = http.HTTPChannel() # Patch the channel's callLater method. channel.timeOut = 100 channel.callLater = clock.callLater channel.makeConnection(transport) # Tick the clock forward almost to the timeout. clock.advance(99) self.assertIs(transport.producer, channel) self.assertIs(transport.streaming, True) # Fire the timeout. clock.advance(1) self.assertIs(transport.producer, None) self.assertIs(transport.streaming, None)
Example #28
Source File: test_http.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_requestBodyTimeout(self): """ L{HTTPChannel} resets its timeout whenever data from a request body is delivered to it. """ clock = Clock() transport = StringTransport() protocol = http.HTTPChannel() protocol.timeOut = 100 protocol.callLater = clock.callLater protocol.makeConnection(transport) protocol.dataReceived(b'POST / HTTP/1.0\r\nContent-Length: 2\r\n\r\n') clock.advance(99) self.assertFalse(transport.disconnecting) protocol.dataReceived(b'x') clock.advance(99) self.assertFalse(transport.disconnecting) protocol.dataReceived(b'x') self.assertEqual(len(protocol.requests), 1)
Example #29
Source File: test_http.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 5 votes |
def test_isSecure(self): """ Calling L{http.Request.isSecure} when the channel is backed with a secure transport will return L{True}. """ b = DummyChannel.SSL() a = http.HTTPChannel() a.makeConnection(b) req = http.Request(a) self.assertTrue(req.isSecure())
Example #30
Source File: test_http.py From learn_python3_spider with MIT License | 5 votes |
def test_headersTooBigInitialCommand(self): """ Enforces a limit of C{HTTPChannel.totalHeadersSize} on the size of headers received per request starting from initial command line. """ processed = [] class MyRequest(http.Request): def process(self): processed.append(self) self.finish() channel = http.HTTPChannel() channel.totalHeadersSize = 10 httpRequest = b'GET /path/longer/than/10 HTTP/1.1\n' channel = self.runRequest( httpRequest=httpRequest, requestFactory=MyRequest, channel=channel, success=False ) self.assertEqual(processed, []) self.assertEqual( channel.transport.value(), b"HTTP/1.1 400 Bad Request\r\n\r\n")