Python aiohttp.Timeout() Examples

The following are 30 code examples of aiohttp.Timeout(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module aiohttp , or try the search function .
Example #1
Source File: __init__.py    From mlimages with MIT License 8 votes vote down vote up
def fetch_image(self, session, relative, image_url):
        fname = self.file_api.get_file_name(image_url)
        p = os.path.join(relative, fname)
        fetched = False
        try:
            with aiohttp.Timeout(self.timeout):
                async with session.get(image_url) as r:
                    if r.status == 200 and self.file_api.get_file_name(r.url) == fname:
                        c = await r.read()
                        if c:
                            with open(self.file_api.to_abs(p), "wb") as f:
                                f.write(c)
                                fetched = True
        except FileNotFoundError as ex:
            self.logger.error("{0} is not found.".format(p))
        except concurrent.futures._base.TimeoutError as tx:
            self.logger.warning("{0} is timeouted.".format(image_url))
        except Exception as ex:
            self.logger.warning("fetch image is failed. url: {0}, cause: {1}".format(image_url, str(ex)))
        return fetched 
Example #2
Source File: gitter.py    From fishroom with GNU General Public License v3.0 6 votes vote down vote up
def _must_post(self, api, data=None, json=None, timeout=10, **kwargs):
        if data is not None:
            kwargs['data'] = data
        elif json is not None:
            kwargs['json'] = json
        else:
            kwargs['data'] = {}
        kwargs['timeout'] = timeout

        try:
            r = requests.post(api, **kwargs)
            return r
        except requests.exceptions.Timeout:
            logger.error("Timeout requesting Gitter")
        except KeyboardInterrupt:
            raise
        except:
            logger.exception("Unknown error requesting Gitter")
        return None 
Example #3
Source File: funcs.py    From NotSoBot with MIT License 6 votes vote down vote up
def queue_message(self, channel_id:str, msg):
		embed = '0'
		if type(msg) == discord.Embed:
			embed = '1'
			msg = jsonpickle.encode(msg)
		else:
			msg = str(msg)
		message_id = random.randint(0, 1000000)
		payload = {'key': 'verysecretkey', 'id': message_id, 'channel_id': channel_id, 'message': msg, 'embed': embed}
		try:
			with aiohttp.Timeout(15):
				async with self.session.post('http://ip:port/queue', data=payload) as r:
					return True
		except (asyncio.TimeoutError, aiohttp.errors.ClientConnectionError, aiohttp.errors.ClientError):
			await asyncio.sleep(5)
			return
		except Exception as e:
			print('queue error: '+str(e)) 
Example #4
Source File: misc.py    From DueUtil with GNU General Public License v3.0 6 votes vote down vote up
def get_glitter_text(gif_text):
    """
    Screen scrape glitter text
    """

    with aiohttp.Timeout(10):
        async with aiohttp.ClientSession() as session:
            async with session.get(GLITTER_TEXT_URL % urllib.parse.quote(gif_text.replace("'", ""))) as page_response:
                html = await page_response.text()
                soup = BeautifulSoup(html, "html.parser")
                box = soup.find("textarea", {"id": "dLink"})
                gif_text_area = str(box)
                gif_url = gif_text_area.replace(
                    '<textarea class="field" cols="12" id="dLink" onclick="this.focus();this.select()" readonly="">',
                    "",
                    1).replace('</textarea>', "", 1)
                return await util.download_file(gif_url) 
Example #5
Source File: aiohttp_fetcher.py    From web_develop with GNU General Public License v3.0 6 votes vote down vote up
def fetch(retry=0):
    proxy = 'http://{}'.format(Proxy.get_random()['address'])
    headers = {'user-agent': get_user_agent()}
    conn = aiohttp.ProxyConnector(proxy=proxy)

    url = 'http://httpbin.org/ip'

    try:
        with aiohttp.ClientSession(connector=conn) as session:
            with aiohttp.Timeout(TIMEOUT):
                async with session.get(url, headers=headers) as resp:
                    return await resp.json()
    except (ProxyConnectionError, TimeoutError):
        try:
            p = Proxy.objects.get(address=proxy)
            if p:
                p.delete()
        except DoesNotExist:
            pass
        retry += 1
        if retry > 5:
            raise TimeoutError()
        await asyncio.sleep(1)
        return await fetch(retry=retry) 
Example #6
Source File: save_article_content.py    From web_develop with GNU General Public License v3.0 6 votes vote down vote up
def fetch(url, retry=0):
    proxy = 'http://{}'.format(Proxy.get_random()['address'])
    headers = {'user-agent': get_user_agent()}
    conn = aiohttp.ProxyConnector(proxy=proxy)

    js_url = gen_js_url(url)

    try:
        with aiohttp.ClientSession(connector=conn) as session:
            with aiohttp.Timeout(TIMEOUT):
                async with session.get(url, headers=headers) as resp:
                    html_text = await resp.text()

                async with session.get(js_url, headers=headers) as resp:
                    js_data = await resp.json()
    except:
        retry += 1
        if retry > 5:
            raise CrawlerError()
        await asyncio.sleep(1)
        return await fetch(url, retry=retry)
    return html_text, js_data 
Example #7
Source File: message_queue_bot.py    From NotSoBot with MIT License 6 votes vote down vote up
def get_queue():
	payload = {'key': ''}
	try:
		with aiohttp.ClientSession() as session:
			with aiohttp.Timeout(15):
				async with session.post('http://ip:port/queued', data=payload) as resp:
					load = await resp.json()
		queue = {}
		for s in load:
			queue[s] = int(load[s][2])
		q = {}
		for key in sorted(queue, key=lambda k: queue[k], reverse=False):
			q[key] = load[key]
		return q
	except Exception as e:
		print(e)
		return {} 
Example #8
Source File: gitter.py    From fishroom with GNU General Public License v3.0 6 votes vote down vote up
def fetch(self, session, room, id_blacklist):
        url = self._stream_api.format(room=room)
        while True:
            # print("polling on url %s" % url)
            try:
                with aiohttp.Timeout(300):
                    async with session.get(url, headers=self.headers) as resp:
                        while True:
                            line = await resp.content.readline()
                            line = bytes.decode(line, 'utf-8').strip()
                            if not line:
                                continue
                            msg = self.parse_jmsg(room, json.loads(line))
                            if msg.sender in id_blacklist:
                                continue
                            self.send_to_bus(msg)
            except asyncio.TimeoutError:
                pass
            except:
                raise 
Example #9
Source File: sorter.py    From fingerprint-securedrop with GNU Affero General Public License v3.0 5 votes vote down vote up
def fetch(self, url):
        """Load a webpage and read return the body as plaintext."""
        self.logger.info("{url}: loading...".format(**locals()))
        try:
            with aiohttp.Timeout(self.page_load_timeout, loop=self.loop):
                async with self.session.get(url,
                                            allow_redirects=True,
                                            headers=self.headers) as resp:

                    if resp.status != 200:
                        self.logger.warning("{url} was not reachable. HTTP "
                                            "error code {resp.status} was "
                                            "returned".format(**locals()))
                        raise SorterResponseCodeError

                    self.logger.info("{url}: loaded "
                                     "successfully.".format(**locals()))
                    return await resp.text()
        except asyncio.TimeoutError:
            self.logger.warning("{url}: timed out after "
                                "{self.page_load_timeout}.".format(**locals()))
            raise SorterTimeoutError
        except (aiosocks.errors.SocksError,
                aiohttp.errors.ServerDisconnectedError,
                aiohttp.errors.ClientResponseError) as exc:
            self.logger.warning("{url} was not reachable: "
                                "{exc}".format(**locals()))
            raise SorterConnectionError
        except aiohttp.errors.ClientOSError as exception_msg:
            if "SSL" in exception_msg:
                self.logger.warning("{url}: certificate error (probably due to "
                                    "use of a self-signed "
                                    "cert.".format(**locals()))
                raise SorterCertError
            else:
                raise
        except (ssl.CertificateError, aiohttp.errors.ClientOSError):
            self.logger.warning("{url}: certificate error (probably due to "
                                "use of a self-signed "
                                "cert.".format(**locals()))
            raise SorterCertError 
Example #10
Source File: utilities.py    From discordbot.py with MIT License 5 votes vote down vote up
def fetchURL(url, loop):
    async with aiohttp.ClientSession(loop=loop) as session:
        with aiohttp.Timeout(10, loop=session.loop):
            async with session.get(url) as response:
                return await response.text() 
Example #11
Source File: utilities.py    From discordbot.py with MIT License 5 votes vote down vote up
def downloadImage(url, folder, name, loop, chunkSize=20):
    result = {'canAccessURL': False, 'isImage': False, 'fileSaved': False}
    headers = {
        'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
        'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
        'Accept-Encoding': 'none',
        'Accept-Language': 'en-US,en;q=0.8',
        'Connection': 'keep-alive'}
    async with aiohttp.ClientSession(loop=loop) as session:
        with aiohttp.Timeout(10, loop=session.loop):
            async with session.get(url, headers=headers) as response:
                content_type = response.headers['content-type']
                if response.status == 200:
                    result['canAccessURL'] = True
                if "image" in content_type:
                    result['isImage'] = True
                if not result['canAccessURL'] or not result['isImage']:
                    return result
                extension = mimetypes.guess_extension(content_type)
                if extension == '.jpe':
                    extension = '.jpg'

                with open(folder + "/" + name + extension, 'wb') as fd:
                    while True:
                        chunk = await response.content.read(chunkSize)
                        if not chunk:
                            break
                        fd.write(chunk)
                result['fileSaved'] = True
                return result 
Example #12
Source File: Fun.py    From NotSoBot with MIT License 5 votes vote down vote up
def do_retro(self, text, bcg):
		if '|' not in text:
			if len(text) >= 15:
				text = [text[i:i + 15] for i in range(0, len(text), 15)]
			else:
				split = text.split()
				if len(split) == 1:
					text = [x for x in text]
					if len(text) == 4:
						text[2] = text[2]+text[-1]
						del text[3]
				else:
					text = split
		else:
			text = text.split('|')
		headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:43.0) Gecko/20100101 Firefox/43.0'}
		payload = aiohttp.FormData()
		payload.add_field('current-category', 'all_effects')
		payload.add_field('bcg', bcg)
		payload.add_field('txt', '4')
		count = 1
		for s in text:
			if count > 3:
				break
			payload.add_field('text'+str(count), s.replace("'", "\'"))
			count += 1
		try:
			with aiohttp.ClientSession() as session:
				with aiohttp.Timeout(5):
					async with session.post('https://photofunia.com/effects/retro-wave?server=3', data=payload, headers=headers) as r:
						txt = await r.text()
		except asyncio.TimeoutError:
			return
		match = self.retro_regex.findall(txt)
		if match:
			download_url = match[0][0]
			b = await self.bytes_download(download_url)
			return b
		return False 
Example #13
Source File: webutilities.py    From discordbot.py with MIT License 5 votes vote down vote up
def fetchURL(url, loop):
        async with aiohttp.ClientSession(loop=loop) as session:
            with aiohttp.Timeout(10, loop=session.loop):
                async with session.get(url) as response:
                    return await response.text() 
Example #14
Source File: Fun.py    From NotSoBot with MIT License 5 votes vote down vote up
def youtube_scrap(self, search:str, safety=False):
		headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:43.0) Gecko/20100101 Firefox/43.0'}
		search = quote(search)
		api = 'https://www.youtube.com/results?search_query={0}'.format(search)
		entries = {}
		cookies = {'PREF': 'cvdm=grid&al=en&f4=4000000&f5=30&f1=50000000&f2=8000000'} if safety else None
		with aiohttp.ClientSession(cookies=cookies) as session:
			with aiohttp.Timeout(5):
				async with session.get(api, headers=headers) as r:
					assert r.status == 200
					txt = await r.text()
		root = etree.fromstring(txt, etree.HTMLParser())
		search_nodes = root.findall(".//ol[@class='section-list']/li/ol[@class='item-section']/li")
		if len(search_nodes) == 0:
			return False
		search_nodes.pop(0)
		result = False
		for node in search_nodes:
			if result != False:
				break
			try:
				url_node = node.find('div/div/div/h3/a')
				if url_node is None:
					continue
				title = get_deep_text(url_node)
				url = 'https://www.youtube.com/{0}'.format(url_node.attrib['href'])
				result = [title, url]
			except:
				continue
		return result 
Example #15
Source File: funcs.py    From NotSoBot with MIT License 5 votes vote down vote up
def get_text(self, url:str):
		try:
			with aiohttp.Timeout(5):
				async with self.session.get(url) as resp:
					try:
						text = await resp.text()
						return text
					except:
						return False
		except asyncio.TimeoutError:
			return False 
Example #16
Source File: funcs.py    From NotSoBot with MIT License 5 votes vote down vote up
def bytes_download(self, url:str):
		try:
			with aiohttp.Timeout(5):
				async with self.session.get(url) as resp:
					data = await resp.read()
					b = BytesIO(data)
					b.seek(0)
					return b
		except asyncio.TimeoutError:
			return False
		except Exception as e:
			print(e)
			return False 
Example #17
Source File: funcs.py    From NotSoBot with MIT License 5 votes vote down vote up
def download(self, url:str, path:str):
		try:
			with aiohttp.Timeout(5):
				async with self.session.get(url) as resp:
					data = await resp.read()
					with open(path, "wb") as f:
						f.write(data)
		except asyncio.TimeoutError:
			return False 
Example #18
Source File: funcs.py    From NotSoBot with MIT License 5 votes vote down vote up
def isgif(self, url:str):
		try:
			with aiohttp.Timeout(5):
				async with self.session.head(url) as resp:
					if resp.status == 200:
						mime = resp.headers.get('Content-type', '').lower()
						if mime == "image/gif":
							return True
						else:
							return False
		except:
			return False 
Example #19
Source File: funcs.py    From NotSoBot with MIT License 5 votes vote down vote up
def isimage(self, url:str):
		try:
			with aiohttp.Timeout(5):
				async with self.session.head(url) as resp:
					if resp.status == 200:
						mime = resp.headers.get('Content-type', '').lower()
						if any([mime == x for x in self.image_mimes]):
							return True
						else:
							return False
		except:
			return False 
Example #20
Source File: webutilities.py    From discordbot.py with MIT License 5 votes vote down vote up
def downloadImage(url, folder, name, loop, chunkSize=20):
        result = {'canAccessURL': False, 'isImage': False, 'fileSaved': False}
        headers = {
            'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
            'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
            'Accept-Encoding': 'none',
            'Accept-Language': 'en-US,en;q=0.8',
            'Connection': 'keep-alive'}
        async with aiohttp.ClientSession(loop=loop) as session:
            with aiohttp.Timeout(10, loop=session.loop):
                async with session.get(url, headers=headers) as response:
                    content_type = response.headers['content-type']
                    if response.status == 200:
                        result['canAccessURL'] = True
                    if "image" in content_type:
                        result['isImage'] = True
                    if not result['canAccessURL'] or not result['isImage']:
                        return result
                    extension = mimetypes.guess_extension(content_type)
                    if extension == '.jpe':
                        extension = '.jpg'

                    with open(folder + "/" + name + extension, 'wb') as fd:
                        while True:
                            chunk = await response.content.read(chunkSize)
                            if not chunk:
                                break
                            fd.write(chunk)
                    result['fileSaved'] = True
                    return result 
Example #21
Source File: proxy_client.py    From PRCDNS with The Unlicense 5 votes vote down vote up
def fetch(session, url, proxy=None):
        with aiohttp.Timeout(10):
            # http://127.0.0.1:8123
            async with session.get(url, proxy=proxy) as response:
                return await response.text() 
Example #22
Source File: owner.py    From rewrite with GNU General Public License v3.0 5 votes vote down vote up
def changepic(self, ctx, *, url: str):
        with aiohttp.Timeout(10):
            async with aiohttp.request("get", url) as res:
                await self.bot.user.edit(avatar=await res.read()) 
Example #23
Source File: util.py    From DueUtil with GNU General Public License v3.0 5 votes vote down vote up
def download_file(url):
    with aiohttp.Timeout(10):
        async with aiohttp.ClientSession() as session:
            async with session.get(url) as response:
                file_data = io.BytesIO()
                while True:
                    chunk = await response.content.read(128)
                    if not chunk:
                        break
                    file_data.write(chunk)
                response.release()
                file_data.seek(0)
                return file_data 
Example #24
Source File: discoin.py    From DueUtil with GNU General Public License v3.0 5 votes vote down vote up
def reverse_transaction(receipt):

    reverse_data = {"receipt": receipt}

    with aiohttp.Timeout(10):
        async with aiohttp.ClientSession() as session:
            async with session.post(DISCOIN + REVERSE,
                                    data=json.dumps(reverse_data), headers=headers) as response:
                return await response.json() 
Example #25
Source File: discoin.py    From DueUtil with GNU General Public License v3.0 5 votes vote down vote up
def make_transaction(sender_id, amount, to):

    transaction_data = {
        "user": sender_id,
        "amount": amount,
        "exchangeTo": to
    }

    with aiohttp.Timeout(10):
        async with aiohttp.ClientSession() as session:
            async with session.post(DISCOIN + TRANSACTION,
                                    data=json.dumps(transaction_data), headers=headers) as response:
                return await response.json() 
Example #26
Source File: imagehelper.py    From DueUtil with GNU General Public License v3.0 5 votes vote down vote up
def url_image(url):
    # Checks headers only
    try:
        with aiohttp.Timeout(3):
            async with aiohttp.ClientSession() as session:
                async with session.head(url=url, allow_redirects=True) as response:
                    return "Content-Type" in response.headers and \
                           response.headers["Content-Type"].lower().startswith("image")
    except Exception as exception:
        util.logger.error("Got %s while checking image url.", exception)
        # Do not care about any of the network errors that could occur.
        pass
    return False 
Example #27
Source File: utils.py    From web_develop with GNU General Public License v3.0 5 votes vote down vote up
def fetch(url, proxy=None):
    conn = aiohttp.ProxyConnector(proxy=proxy)
    headers = {'user-agent': get_user_agent()}
    with aiohttp.ClientSession(connector=conn) as session:
        with aiohttp.Timeout(TIMEOUT):
            async with session.get('http://python.org', headers) as resp:
                return resp.json() 
Example #28
Source File: rss.py    From Squid-Plugins with MIT License 5 votes vote down vote up
def _get_feed(self, url):
        text = None
        try:
            with aiohttp.ClientSession() as session:
                with aiohttp.Timeout(3):
                    async with session.get(url) as r:
                        text = await r.text()
        except:
            pass
        return text 
Example #29
Source File: bot.py    From RubyRoseBot with Mozilla Public License 2.0 5 votes vote down vote up
def setavatar(ctx, *, url:str=None):
    """Changes the bot's avatar"""
    if ctx.message.attachments:
        url = ctx.message.attachments[0].url
    elif url is None:
        await ctx.send("Please specify an avatar url if you did not attach a file")
        return
    try:
        with aiohttp.Timeout(10):
            async with aiosession.get(url.strip("<>")) as image:
                await bot.user.edit(avatar=await image.read())
    except Exception as e:
        await ctx.send("Unable to change avatar: {}".format(e))
        return
    await ctx.send(":eyes:") 
Example #30
Source File: client.py    From telegram-uz-bot with MIT License 5 votes vote down vote up
def call(self, endpoint, method='POST', raw=False, *args, **kwargs):
        if 'headers' not in kwargs:
            kwargs['headers'] = await self.get_headers()

        uri = self.uri(endpoint)
        logger.debug('Fetching: %s', uri)
        logger.debug('Headers: %s', kwargs['headers'])
        logger.debug('Cookies: %s', self.session.cookies)

        with aiohttp.Timeout(self.request_timeout):
            async with self.session.request(
                    method, uri, *args, **kwargs) as response:
                body = await response.read()
                if not response.status == 200:
                    try:
                        json = await response.json()
                    except Exception:  # TODO: narrow exception
                        json = None
                    ex = BadRequest if response.status == 400 else HTTPError
                    raise ex(response.status, body, kwargs.get('data'), json)
                if raw:
                    return body
                json = await response.json()
                if json.get('error'):
                    raise ResponseError(response.status, body, kwargs.get('data'), json)
                return json