Python urllib.request.HTTPSHandler() Examples

The following are 27 code examples of urllib.request.HTTPSHandler(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module urllib.request , or try the search function .
Example #1
Source File: githubpy.py    From osint-scraper with MIT License 7 votes vote down vote up
def get_access_token(self, code, state=None):
        '''
        In callback url: http://host/callback?code=123&state=xyz
        use code and state to get an access token.
        '''
        kw = dict(client_id=self._client_id, client_secret=self._client_secret, code=code)
        if self._redirect_uri:
            kw['redirect_uri'] = self._redirect_uri
        if state:
            kw['state'] = state
        opener = build_opener(HTTPSHandler)
        request = Request('https://github.com/login/oauth/access_token', data=_encode_params(kw))
        request.get_method = _METHOD_MAP['POST']
        request.add_header('Accept', 'application/json')
        try:
            response = opener.open(request, timeout=TIMEOUT)
            r = _parse_json(response.read())
            if 'error' in r:
                raise ApiAuthError(str(r.error))
            return str(r.access_token)
        except HTTPError as e:
            raise ApiAuthError('HTTPError when get access token') 
Example #2
Source File: github.py    From services-to-wordcloud with MIT License 6 votes vote down vote up
def get_access_token(self, code, state=None):
        '''
        In callback url: http://host/callback?code=123&state=xyz

        use code and state to get an access token.        
        '''
        kw = dict(client_id=self._client_id, client_secret=self._client_secret, code=code)
        if self._redirect_uri:
            kw['redirect_uri'] = self._redirect_uri
        if state:
            kw['state'] = state
        opener = build_opener(HTTPSHandler)
        request = Request('https://github.com/login/oauth/access_token', data=_encode_params(kw))
        request.get_method = _METHOD_MAP['POST']
        request.add_header('Accept', 'application/json')
        try:
            response = opener.open(request, timeout=TIMEOUT)
            r = _parse_json(response.read())
            if 'error' in r:
                raise ApiAuthError(str(r.error))
            return str(r.access_token)
        except HTTPError as e:
            raise ApiAuthError('HTTPError when get access token') 
Example #3
Source File: Tracker.py    From plugin.video.netflix with MIT License 5 votes vote down vote up
def debug():
        """ Activate debugging on urllib2 """
        handler = HTTPSHandler(debuglevel = 1)
        opener = build_opener(handler)
        install_opener(opener)

    # Store properties for all requests 
Example #4
Source File: pipstrap.py    From pipstrap with MIT License 5 votes vote down vote up
def hashed_download(url, temp, digest):
    """Download ``url`` to ``temp``, make sure it has the SHA-256 ``digest``,
    and return its path."""
    # Based on pip 1.4.1's URLOpener but with cert verification removed. Python
    # >=2.7.9 verifies HTTPS certs itself, and, in any case, the cert
    # authenticity has only privacy (not arbitrary code execution)
    # implications, since we're checking hashes.
    def opener(using_https=True):
        opener = build_opener(HTTPSHandler())
        if using_https:
            # Strip out HTTPHandler to prevent MITM spoof:
            for handler in opener.handlers:
                if isinstance(handler, HTTPHandler):
                    opener.handlers.remove(handler)
        return opener

    def read_chunks(response, chunk_size):
        while True:
            chunk = response.read(chunk_size)
            if not chunk:
                break
            yield chunk

    parsed_url = urlparse(url)
    response = opener(using_https=parsed_url.scheme == 'https').open(url)
    path = join(temp, parsed_url.path.split('/')[-1])
    actual_hash = sha256()
    with open(path, 'wb') as file:
        for chunk in read_chunks(response, 4096):
            file.write(chunk)
            actual_hash.update(chunk)

    actual_digest = actual_hash.hexdigest()
    if actual_digest != digest:
        raise HashError(url, path, actual_digest, digest)
    return path 
Example #5
Source File: httpsclient.py    From temboard-agent with PostgreSQL License 5 votes vote down vote up
def __init__(self, connection_class=UnverifiedHTTPSConnection):
        self.specialized_conn_class = connection_class
        urllib2.HTTPSHandler.__init__(self) 
Example #6
Source File: httpsclient.py    From temboard-agent with PostgreSQL License 5 votes vote down vote up
def __init__(self, connection_class=VerifiedHTTPSConnection):
        self.specialized_conn_class = connection_class
        urllib2.HTTPSHandler.__init__(self) 
Example #7
Source File: instabrute.py    From BruteSploit with GNU General Public License v3.0 5 votes vote down vote up
def get_csrf():
    """
    get CSRF token from login page to use in POST requests
    """
    global csrf_token

    print(bcolors.WARNING + "[+] Getting CSRF Token: " + bcolors.ENDC)

    try:
        opener = rq.build_opener(rq.HTTPHandler(), rq.HTTPSHandler())
        opener.addheaders = [('User-agent', 'Mozilla/5.0')]
        rq.install_opener(opener)

        request = rq.Request('https://www.instagram.com/')
        try:
            # python 2
            headers = rq.urlopen(request).info().headers
        except Exception:
            # python 3
            headers = rq.urlopen(request).info().get_all('Set-Cookie')

        for header in headers:
            if header.find('csrftoken') != -1:
                csrf_token = header.partition(';')[0].partition('=')[2]
                print(bcolors.OKGREEN + "[+] CSRF Token :", csrf_token, "\n" + bcolors.ENDC)
    except Exception as err:
        print(bcolors.FAIL + "[!] Can't get CSRF token , please use -d for debug" + bcolors.ENDC)

        if _debug:
            logger.error(err)

        print(bcolors.FAIL + "[!] Exiting..." + bcolors.ENDC)
        exit(3) 
Example #8
Source File: instabrute.py    From BruteSploit with GNU General Public License v3.0 5 votes vote down vote up
def check_proxy(q):
    """
    check proxy for and append to working proxies
    :param q:
    """
    if not q.empty():

        proxy = q.get(False)
        proxy = proxy.replace("\r", "").replace("\n", "")

        try:
            opener = rq.build_opener(
                rq.ProxyHandler({'https': 'https://' + proxy}),
                rq.HTTPHandler(),
                rq.HTTPSHandler()
            )

            opener.addheaders = [('User-agent', 'Mozilla/5.0')]
            rq.install_opener(opener)

            req = rq.Request('https://api.ipify.org/')

            if rq.urlopen(req).read().decode() == proxy.partition(':')[0]:
                proxys_working_list.update({proxy: proxy})
                if _verbose:
                    print(bcolors.OKGREEN + " --[+] ", proxy, " | PASS" + bcolors.ENDC)
            else:
                if _verbose:
                    print(" --[!] ", proxy, " | FAILED")

        except Exception as err:
            if _verbose:
                print(" --[!] ", proxy, " | FAILED")
            if _debug:
                logger.error(err)
            pass 
Example #9
Source File: urls.py    From ansible-role-jenkins with Apache License 2.0 5 votes vote down vote up
def __init__(self, client_cert=None, client_key=None, unix_socket=None, **kwargs):
            urllib_request.HTTPSHandler.__init__(self, **kwargs)
            self.client_cert = client_cert
            self.client_key = client_key
            self._unix_socket = unix_socket 
Example #10
Source File: PriceTradeAnalyzer.py    From Stock-Price-Trade-Analyzer with GNU General Public License v3.0 5 votes vote down vote up
def GetProxiedOpener():
	testURL = 'https://stooq.com'
	userName, password = 'mUser', 'SecureAccess'
	context = ssl._create_unverified_context()
	handler = webRequest.HTTPSHandler(context=context)
	i = -1
	functioning = False
	global currentProxyServer
	while not functioning and i < len(proxyList):
		if i >=0 or currentProxyServer==None: currentProxyServer = proxyList[i]
		proxy = webRequest.ProxyHandler({'https': r'http://' + userName + ':' + password + '@' + currentProxyServer})
		auth = webRequest.HTTPBasicAuthHandler()
		opener = webRequest.build_opener(proxy, auth, handler) 
		opener.addheaders = [('User-agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_4) AppleWebKit/603.1.30 (KHTML, like Gecko) Version/10.1 Safari/603.1.30')]
		#opener.addheaders = [('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/12.1.1 Safari/605.1.15')]	
		#opener.addheaders = [('Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/18.17763')]
		try:
			conn = opener.open(testURL)
			print('Proxy ' + currentProxyServer + ' is functioning')
			functioning = True
		except:
			print('Proxy ' + currentProxyServer + ' is not responding')
		i+=1
	return opener

#-------------------------------------------- Classes ----------------------------------------------- 
Example #11
Source File: github.py    From services-to-wordcloud with MIT License 5 votes vote down vote up
def _http(self, _method, _path, **kw):
        data = None
        params = None
        if _method=='GET' and kw:
            _path = '%s?%s' % (_path, _encode_params(kw))
        if _method in ['POST', 'PATCH', 'PUT']:
            data = bytes(_encode_json(kw), 'utf-8')
        url = '%s%s' % (_URL, _path)
        opener = build_opener(HTTPSHandler)
        request = Request(url, data=data)
        request.get_method = _METHOD_MAP[_method]
        if self._authorization:
            request.add_header('Authorization', self._authorization)
        if _method in ['POST', 'PATCH', 'PUT']:
            request.add_header('Content-Type', 'application/x-www-form-urlencoded')
        try:
            response = opener.open(request, timeout=TIMEOUT)
            is_json = self._process_resp(response.headers)
            if is_json:
                return _parse_json(response.read().decode('utf-8'))
        except HTTPError as e:
            is_json = self._process_resp(e.headers)
            if is_json:
                json = _parse_json(e.read().decode('utf-8'))
            else:
                json = e.read().decode('utf-8')
            req = JsonObject(method=_method, url=url)
            resp = JsonObject(code=e.code, json=json)
            if resp.code==404:
                raise ApiNotFoundError(url, req, resp)
            raise ApiError(url, req, resp) 
Example #12
Source File: httpclient.py    From opsbro with MIT License 5 votes vote down vote up
def get(self, uri, params={}, headers={}, with_status_code=False, timeout=10, user=None, password=None):
        data = None  # always none in GET
        
        if params:
            uri = "%s?%s" % (uri, urlencode(params))
        
        # SSL, user/password and basic
        # NOTE: currently don't manage ssl & user/password
        if uri.startswith('https://'):
            handler = HTTPSHandler(context=self.ssl_context)
        elif user and password:
            passwordMgr = HTTPPasswordMgrWithDefaultRealm()
            passwordMgr.add_password(None, uri, user, password)
            handler = HTTPBasicAuthHandler(passwordMgr)
        else:
            handler = HTTPHandler
        
        url_opener = build_opener(handler)
        
        req = Request(uri, data)
        req.get_method = lambda: 'GET'
        for (k, v) in headers.items():
            req.add_header(k, v)
        
        request = url_opener.open(req, timeout=timeout)
        
        response = request.read()
        status_code = request.code
        request.close()
        
        if not with_status_code:
            return response
        else:
            return (status_code, response) 
Example #13
Source File: connection.py    From python-mysql-pool with MIT License 5 votes vote down vote up
def __init__(self, ssl_config):  # pylint: disable=E1002
            """Initialize"""
            if PY2:
                urllib2.HTTPSHandler.__init__(self)
            else:
                super().__init__()  # pylint: disable=W0104
            self._ssl_config = ssl_config 
Example #14
Source File: common.py    From googleads-python-lib with Apache License 2.0 5 votes vote down vote up
def GetHandlers(self):
    """Retrieve the appropriate urllib handlers for the given configuration.

    Returns:
      A list of urllib.request.BaseHandler subclasses to be used when making
      calls with proxy.
    """
    handlers = []

    if self.ssl_context:
      handlers.append(HTTPSHandler(context=self.ssl_context))

    if self.proxies:
      handlers.append(ProxyHandler(self.proxies))

    return handlers 
Example #15
Source File: common.py    From googleads-python-lib with Apache License 2.0 5 votes vote down vote up
def __init__(self, http_proxy=None, https_proxy=None, cafile=None,
               disable_certificate_validation=False):
    self._http_proxy = http_proxy
    self._https_proxy = https_proxy
    self.proxies = {}
    if self._https_proxy:
      self.proxies['https'] = str(self._https_proxy)
    if self._http_proxy:
      self.proxies['http'] = str(self._http_proxy)

    self.disable_certificate_validation = disable_certificate_validation
    self.cafile = None if disable_certificate_validation else cafile
    # Initialize the context used to generate the HTTPSHandler.
    self.ssl_context = self._InitSSLContext(
        self.cafile, self.disable_certificate_validation) 
Example #16
Source File: instabrute.py    From instabrute with GNU General Public License v3.0 5 votes vote down vote up
def check_proxy(q):
    """
    check proxy for and append to working proxies
    :param q:
    """
    if not q.empty():

        proxy = q.get(False)
        proxy = proxy.replace("\r", "").replace("\n", "")

        try:
            opener = rq.build_opener(
                rq.ProxyHandler({'https': 'https://' + proxy}),
                rq.HTTPHandler(),
                rq.HTTPSHandler()
            )

            opener.addheaders = [('User-agent', 'Mozilla/5.0')]
            rq.install_opener(opener)

            req = rq.Request('https://api.ipify.org/')

            if rq.urlopen(req).read().decode() == proxy.partition(':')[0]:
                proxys_working_list.update({proxy: proxy})
                if _verbose:
                    print(bcolors.OKGREEN + " --[+] ", proxy, " | PASS" + bcolors.ENDC)
            else:
                if _verbose:
                    print(" --[!] ", proxy, " | FAILED")

        except Exception as err:
            if _verbose:
                print(" --[!] ", proxy, " | FAILED")
            if _debug:
                logger.error(err)
            pass 
Example #17
Source File: http.py    From exactonline with GNU Lesser General Public License v3.0 5 votes vote down vote up
def __init__(self, cacert_file):
        self.cacert_file = cacert_file
        request.HTTPSHandler.__init__(self) 
Example #18
Source File: httpbot.py    From humblebundle with GNU General Public License v3.0 5 votes vote down vote up
def __init__(self, base_url="", tag="", cookiejar=None, debug=False):
        self.tag = tag
        hh  = urllib2.HTTPHandler( debuglevel=1 if debug else 0)
        hsh = urllib2.HTTPSHandler(debuglevel=1 if debug else 0)
        cp  = urllib2.HTTPCookieProcessor(cookiejar)
        self._opener = urllib2.build_opener(hh, hsh, cp)
        scheme, netloc, path, q, f  = urlparse.urlsplit(base_url, "http")
        if not netloc:
            netloc, _, path = path.partition('/')
        self.base_url = urlparse.urlunsplit((scheme, netloc, path, q, f)) 
Example #19
Source File: pipstrap.py    From sugardough with Apache License 2.0 5 votes vote down vote up
def hashed_download(url, temp, digest):
    """Download ``url`` to ``temp``, make sure it has the SHA-256 ``digest``,
    and return its path."""
    # Based on pip 1.4.1's URLOpener but with cert verification removed
    def opener():
        opener = build_opener(HTTPSHandler())
        # Strip out HTTPHandler to prevent MITM spoof:
        for handler in opener.handlers:
            if isinstance(handler, HTTPHandler):
                opener.handlers.remove(handler)
        return opener

    def read_chunks(response, chunk_size):
        while True:
            chunk = response.read(chunk_size)
            if not chunk:
                break
            yield chunk

    response = opener().open(url)
    path = join(temp, urlparse(url).path.split('/')[-1])
    actual_hash = sha256()
    with open(path, 'wb') as file:
        for chunk in read_chunks(response, 4096):
            file.write(chunk)
            actual_hash.update(chunk)

    actual_digest = actual_hash.hexdigest()
    if actual_digest != digest:
        raise HashError(url, path, actual_digest, digest)
    return path 
Example #20
Source File: github.py    From loghub with MIT License 5 votes vote down vote up
def _http(self, _method, _path, **kw):
        data = None
        params = None
        if _method == 'GET' and kw:
            _path = '%s?%s' % (_path, _encode_params(kw))
        if _method in ['POST', 'PATCH', 'PUT']:
            data = bytes(_encode_json(kw), 'utf-8')
        url = '%s%s' % (_URL, _path)
        opener = build_opener(HTTPSHandler)
        request = Request(url, data=data)
        request.get_method = _METHOD_MAP[_method]
        if self._authorization:
            request.add_header('Authorization', self._authorization)
        if _method in ['POST', 'PATCH', 'PUT']:
            request.add_header('Content-Type',
                               'application/x-www-form-urlencoded')

        class Resp():
            code = None

        resp = Resp()
        req = None

        try:
            response = opener.open(request, timeout=TIMEOUT)
            is_json = self._process_resp(response.headers)
            if is_json:
                return _parse_json(response.read().decode('utf-8'))
        except HTTPError as e:
            is_json = self._process_resp(e.headers)
            if is_json:
                json = _parse_json(e.read().decode('utf-8'))
            else:
                json = e.read().decode('utf-8')
            req = JsonObject(method=_method, url=url)
            resp = JsonObject(code=e.code, json=json)
        finally:
            if resp.code == 404:
                raise ApiNotFoundError(url, req, resp)
            elif req:
                raise ApiError(url, req, resp) 
Example #21
Source File: github.py    From loghub with MIT License 5 votes vote down vote up
def get_access_token(self, code, state=None):
        '''
        In callback url: http://host/callback?code=123&state=xyz

        use code and state to get an access token.        
        '''
        kw = dict(
            client_id=self._client_id,
            client_secret=self._client_secret,
            code=code)
        if self._redirect_uri:
            kw['redirect_uri'] = self._redirect_uri
        if state:
            kw['state'] = state
        opener = build_opener(HTTPSHandler)
        request = Request(
            'https://github.com/login/oauth/access_token',
            data=_encode_params(kw))
        request.get_method = _METHOD_MAP['POST']
        request.add_header('Accept', 'application/json')
        try:
            response = opener.open(request, timeout=TIMEOUT)
            r = _parse_json(response.read())
            if 'error' in r:
                raise ApiAuthError(str(r.error))
            return str(r.access_token)
        except HTTPError as e:
            raise ApiAuthError('HTTPError when get access token') 
Example #22
Source File: githubpy.py    From osint-scraper with MIT License 5 votes vote down vote up
def _http(self, _method, _path, **kw):
        data = None
        params = None
        if _method=='GET' and kw:
            _path = '%s?%s' % (_path, _encode_params(kw))
        if _method in ['POST', 'PATCH', 'PUT']:
            data = bytes(_encode_json(kw), 'utf-8')
        url = '%s%s' % (_URL, _path)
        opener = build_opener(HTTPSHandler)
        request = Request(url, data=data)
        request.get_method = _METHOD_MAP[_method]
        if self._authorization:
            request.add_header('Authorization', self._authorization)
        if _method in ['POST', 'PATCH', 'PUT']:
            request.add_header('Content-Type', 'application/x-www-form-urlencoded')
        try:
            response = opener.open(request, timeout=TIMEOUT)
            is_json = self._process_resp(response.headers)
            if is_json:
                return _parse_json(response.read().decode('utf-8'))
        except HTTPError as e:
            is_json = self._process_resp(e.headers)
            if is_json:
                json = _parse_json(e.read().decode('utf-8'))
            else:
                json = e.read().decode('utf-8')
            req = JsonObject(method=_method, url=url)
            resp = JsonObject(code=e.code, json=json)
            if resp.code==404:
                raise ApiNotFoundError(url, req, resp)
            raise ApiError(url, req, resp) 
Example #23
Source File: instabrute.py    From instabrute with GNU General Public License v3.0 5 votes vote down vote up
def get_csrf():
    """
    get CSRF token from login page to use in POST requests
    """
    global csrf_token

    print(bcolors.WARNING + "[+] Getting CSRF Token: " + bcolors.ENDC)

    try:
        opener = rq.build_opener(rq.HTTPHandler(), rq.HTTPSHandler())
        opener.addheaders = [('User-agent', 'Mozilla/5.0')]
        rq.install_opener(opener)

        request = rq.Request('https://www.instagram.com/')
        try:
            # python 2
            headers = rq.urlopen(request).info().headers
        except Exception:
            # python 3
            headers = rq.urlopen(request).info().get_all('Set-Cookie')

        for header in headers:
            if header.find('csrftoken') != -1:
                csrf_token = header.partition(';')[0].partition('=')[2]
                print(bcolors.OKGREEN + "[+] CSRF Token :", csrf_token, "\n" + bcolors.ENDC)
    except Exception as err:
        print(bcolors.FAIL + "[!] Can't get CSRF token , please use -d for debug" + bcolors.ENDC)

        if _debug:
            logger.error(err)

        print(bcolors.FAIL + "[!] Exiting..." + bcolors.ENDC)
        exit(3) 
Example #24
Source File: asf_template.py    From esa_sentinel with MIT License 4 votes vote down vote up
def get_new_cookie(self):
        # Start by prompting user to input their credentials
        
        # Another Python2/3 workaround
        try:
            new_username = raw_input("Username: ")
        except NameError:
            new_username = input("Username: ")
        new_password = getpass.getpass(prompt="Password (will not be displayed): ")
        
        # Build URS4 Cookie request
        auth_cookie_url = self.asf_urs4['url'] + '?client_id=' + self.asf_urs4['client'] + '&redirect_uri=' + \
                          self.asf_urs4['redir'] + '&response_type=code&state='
        
        try:
            # python2
            user_pass = base64.b64encode(bytes(new_username + ":" + new_password))
        except TypeError:
            # python3
            user_pass = base64.b64encode(bytes(new_username + ":" + new_password, "utf-8"))
            user_pass = user_pass.decode("utf-8")
        
        # Authenticate against URS, grab all the cookies
        self.cookie_jar = MozillaCookieJar()
        opener = build_opener(HTTPCookieProcessor(self.cookie_jar), HTTPHandler(), HTTPSHandler(**self.context))
        request = Request(auth_cookie_url, headers={"Authorization": "Basic {0}".format(user_pass)})
        
        # Watch out cookie rejection!
        try:
            response = opener.open(request)
        except HTTPError as e:
            if e.code == 401:
                print(" > Username and Password combo was not successful. Please try again.")
                return False
            else:
                # If an error happens here, the user most likely has not confirmed EULA.
                print("\nIMPORTANT: There was an error obtaining a download cookie!")
                print("Your user appears to lack permission to download data from the ASF Datapool.")
                print(
                    "\n\nNew users: you must first log into Vertex and accept the EULA. In addition, your Study Area must be set at Earthdata https://urs.earthdata.nasa.gov")
                exit(-1)
        except URLError as e:
            print("\nIMPORTANT: There was a problem communicating with URS, unable to obtain cookie. ")
            print("Try cookie generation later.")
            exit(-1)
        
        # Did we get a cookie?
        if self.check_cookie_is_logged_in(self.cookie_jar):
            # COOKIE SUCCESS!
            self.cookie_jar.save(self.cookie_jar_path)
            return True
        
        # if we aren't successful generating the cookie, nothing will work. Stop here!
        print("WARNING: Could not generate new cookie! Cannot proceed. Please try Username and Password again.")
        print("Response was {0}.".format(response.getcode()))
        print(
            "\n\nNew users: you must first log into Vertex and accept the EULA. In addition, your Study Area must be set at Earthdata https://urs.earthdata.nasa.gov")
        exit(-1)
    
    # make sure we're logged into URS 
Example #25
Source File: asf_template.py    From esa_sentinel with MIT License 4 votes vote down vote up
def check_cookie(self):
        
        if self.cookie_jar is None:
            print(" > Cookiejar is bunk: {0}".format(self.cookie_jar))
            return False
        
        # File we know is valid, used to validate cookie
        file_check = 'https://urs.earthdata.nasa.gov/profile'
        
        # Apply custom Redirect Hanlder
        opener = build_opener(HTTPCookieProcessor(self.cookie_jar), HTTPHandler(), HTTPSHandler(**self.context))
        install_opener(opener)
        
        # Attempt a HEAD request
        request = Request(file_check)
        request.get_method = lambda: 'HEAD'
        try:
            print(" > attempting to download {0}".format(file_check))
            response = urlopen(request, timeout=30)
            resp_code = response.getcode()
            # Make sure we're logged in
            if not self.check_cookie_is_logged_in(self.cookie_jar):
                return False
            
            # Save cookiejar
            self.cookie_jar.save(self.cookie_jar_path)
        
        except HTTPError:
            # If we ge this error, again, it likely means the user has not agreed to current EULA
            print("\nIMPORTANT: ")
            print("Your user appears to lack permissions to download data from the ASF Datapool.")
            print(
                "\n\nNew users: you must first log into Vertex and accept the EULA. In addition, your Study Area must be set at Earthdata https://urs.earthdata.nasa.gov")
            exit(-1)
        
        # This return codes indicate the USER has not been approved to download the data
        if resp_code in (300, 301, 302, 303):
            try:
                redir_url = response.info().getheader('Location')
            except AttributeError:
                redir_url = response.getheader('Location')
            
            # Funky Test env:
            if ("vertex-retired.daac.asf.alaska.edu" in redir_url and "test" in self.asf_urs4['redir']):
                print("Cough, cough. It's dusty in this test env!")
                return True
            
            print("Redirect ({0}) occured, invalid cookie value!".format(resp_code))
            return False
        
        # These are successes!
        if resp_code in (200, 307):
            return True
        
        return False 
Example #26
Source File: request.py    From Atlas with GNU General Public License v3.0 4 votes vote down vote up
def send(self,url,method='GET',data=None,headers={}):
		agent    = self.kwargs['agent']
		proxy    = self.kwargs['proxy']
		cookie   = self.kwargs['cookie']
		timeout  = self.kwargs['timeout']
		headers_ = self.kwargs['headers'] if self.kwargs['headers'] == headers else headers
		redirect = self.kwargs['allow-redirect']
		# -- process -- # 
		if method:method = method.upper()
		if data is None: data = {}
		# -- disable ssl check -- *
		ctx = ssl.create_default_context()                                                                      
		ctx.check_hostname = False                                                                              
		ctx.verify_mode = ssl.CERT_NONE
		# -- add headers -- #
		headers = {}
		headers['User-Agent'] = agent
		for header in headers_.items():
			headers[header[0]] = header[1]
		# -- //
		if cookie:headers['Cookie'] = cookie
		# -- socket timeout -- #
		if timeout:socket.setdefaulttimeout(timeout)
		# -- handled http and https -- #
		handlers = [urllib2.HTTPHandler(),urllib2.HTTPSHandler(context=ctx)]
		# -- process redirect -- #
		if redirect is False:handlers.append(NoRedirectHandler)
		# -- process proxies -- # 
		if proxy:
			handlers.append(urllib2.ProxyHandler({
				'http' : proxy,
				'https': proxy
				})
			)
		# -- install opener -- #
		opener = urllib2.build_opener(*handlers)
		urllib2.install_opener(opener)
		# -- process request -- #
		if method.lower() == "get":
			if data: url = get_params(url,data)
			req = urllib2.Request(url,headers=headers)
		elif method.lower() == "post":
			req = urllib2.Request(url,data=data,headers=headers)
		# -- urlopen -- #
		try:
			resp = urllib2.urlopen(req)
		except urllib2.HTTPError as e:
			resp = e 
		return Resp(resp) 
Example #27
Source File: client.py    From bugatsinho.github.io with GNU General Public License v3.0 4 votes vote down vote up
def cfcookie(netloc, ua, timeout):
    try:
        headers = {'User-Agent': ua}

        req = urllib2.Request(netloc, headers=headers)

        try:
            urllib2.urlopen(req, timeout=int(timeout))
        except urllib2.HTTPError as response:
            result = response.read(5242880)

        jschl = re.findall('name="jschl_vc" value="(.+?)"/>', result)[0]

        init = re.findall('setTimeout\(function\(\){\s*.*?.*:(.*?)};', result)[-1]

        builder = re.findall(r"challenge-form\'\);\s*(.*)a.v", result)[0]

        decryptVal = parseJSString(init)

        lines = builder.split(';')

        for line in lines:

            if len(line) > 0 and '=' in line:

                sections = line.split('=')
                line_val = parseJSString(sections[1])
                decryptVal = int(eval(str(decryptVal) + str(sections[0][-1]) + str(line_val)))

        answer = decryptVal + len(urlparse.urlparse(netloc).netloc)

        query = '%s/cdn-cgi/l/chk_jschl?jschl_vc=%s&jschl_answer=%s' % (netloc, jschl, answer)

        if 'type="hidden" name="pass"' in result:
            passval = re.findall('name="pass" value="(.*?)"', result)[0]
            query = '%s/cdn-cgi/l/chk_jschl?pass=%s&jschl_vc=%s&jschl_answer=%s' % (
                netloc, quote_plus(passval), jschl, answer
            )
            time.sleep(5)

        cookies = cookielib.LWPCookieJar()
        handlers = [urllib2.HTTPHandler(), urllib2.HTTPSHandler(), urllib2.HTTPCookieProcessor(cookies)]
        opener = urllib2.build_opener(*handlers)
        urllib2.install_opener(opener)

        try:
            req = urllib2.Request(query, headers=headers)
            urllib2.urlopen(req, timeout=int(timeout))
        except BaseException:
            pass

        cookie = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])

        return cookie
    except BaseException:
        pass