Python requests.packages.urllib3.disable_warnings() Examples

The following are 26 code examples of requests.packages.urllib3.disable_warnings(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module requests.packages.urllib3 , or try the search function .
Example #1
Source File: source.py    From armada with Apache License 2.0 7 votes vote down vote up
def download_tarball(tarball_url, verify=False, proxy_server=None):
    '''
    Downloads a tarball to /tmp and returns the path
    '''
    try:
        if not verify:
            urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
        kwargs = {}
        if proxy_server:
            kwargs['proxies'] = {
                'http': proxy_server,
                'https': proxy_server,
                'ftp': proxy_server
            }
        tarball_filename = tempfile.mkstemp(prefix='armada')[1]
        response = requests.get(tarball_url, verify=verify, **kwargs)

        with open(tarball_filename, 'wb') as f:
            f.write(response.content)

        return tarball_filename
    except Exception:
        raise source_exceptions.TarballDownloadException(tarball_url) 
Example #2
Source File: source.py    From armada with Apache License 2.0 7 votes vote down vote up
def download_tarball(tarball_url, verify=False):
    '''
    Downloads a tarball to /tmp and returns the path
    '''
    try:
        if not verify:
            urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

        tarball_filename = tempfile.mkstemp(prefix='armada')[1]
        response = requests.get(tarball_url, verify=verify)

        with open(tarball_filename, 'wb') as f:
            f.write(response.content)

        return tarball_filename
    except Exception:
        raise source_exceptions.TarballDownloadException(tarball_url) 
Example #3
Source File: registry_base.py    From freight_forwarder with MIT License 6 votes vote down vote up
def __init__(self, version, address='https://index.docker.io', **kwargs):
        urllib3.disable_warnings()
        self._instances[id(self)] = self
        self.scheme       = utils.parse_http_scheme(address)
        self.location     = utils.parse_hostname(address)
        self._api_version = version
        self._tls         = {}

        if kwargs.get('ssl_cert_path'):
            self.tls = kwargs['ssl_cert_path']

        # prepare session
        self.session = requests.Session()

        # set up certs.
        self.session.verify = self.tls.get('ca_path', kwargs.get('verify', True))
        self.session.cert = (self.tls['ssl_cert_path'], self.tls['ssl_key_path']) if self.tls else None
        self.auth = kwargs.get('authentication', kwargs.get('auth', None))
        if self.auth:
            self.session.auth = (self.auth.user, self.auth.passwd) 
Example #4
Source File: core.py    From zabbix-exporter with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def __init__(self, base_url, login, password, verify_tls=True, timeout=None, **options):
        self.options = options
        self.key_patterns = {prepare_regex(metric['key']): metric
                             for metric in options.get('metrics', [])}

        self.zapi = pyzabbix.ZabbixAPI(base_url, timeout=timeout)
        if not verify_tls:
            import requests.packages.urllib3 as urllib3
            urllib3.disable_warnings()
            self.zapi.session.verify = verify_tls

        def measure_api_request(r, *args, **kwargs):
            api_requests_total.inc()
            api_bytes_total.inc(len(r.content))
            api_seconds_total.inc(r.elapsed.total_seconds())
        self.zapi.session.hooks = {'response': measure_api_request}

        self.zapi.login(login, password)

        self.host_mapping = {row['hostid']: row['name']
                             for row in self.zapi.host.get(output=['hostid', 'name'])} 
Example #5
Source File: container_ship.py    From freight_forwarder with MIT License 5 votes vote down vote up
def __init__(self, address, **kwargs):
        """
        explain me ; )

        """
        utils.validate_uri(address)
        # TODO: update me with a config file entry need to update to get the version from docker and use that.
        self.API_VERSION = DOCKER_API_VERSION
        self.url         = urlparse(address)

        if self.url.scheme == 'https':
            # TODO: Need to allow for ca to be passed if not disable warnings.
            urllib3.disable_warnings()

            for cert_name_type in ('ca', 'cert', 'key'):
                cert_path = utils.validate_path(os.path.join(kwargs['ssl_cert_path'], "{0}.pem".format(cert_name_type))) \
                    if 'ssl_cert_path' in kwargs and kwargs['ssl_cert_path'] else None
                setattr(self, 'SSL_{0}_PATH'.format(cert_name_type.upper()), cert_path)

            self.SSL_VERIFY = kwargs['verify'] if 'verify' in kwargs and isinstance(kwargs['verify'], bool) else True

            if not self.SSL_VERIFY:
                self.SSL_CA_PATH = None

            client_certs = (self.SSL_CERT_PATH, self.SSL_KEY_PATH) if self.SSL_KEY_PATH and self.SSL_CERT_PATH else None
            tls_config   = docker.tls.TLSConfig(client_cert=client_certs, ca_cert=self.SSL_CA_PATH, verify=self.SSL_VERIFY)

            self._client_session = docker.Client(self.url.geturl(), tls=tls_config, timeout=DOCKER_DEFAULT_TIMEOUT, version=self.API_VERSION)
        else:
            self._client_session = docker.Client(self.url.geturl(), timeout=DOCKER_DEFAULT_TIMEOUT, version=self.API_VERSION)

        self._docker_info = self._client_session.version()
        self._injector = None 
Example #6
Source File: parse_awvs_xml.py    From SecurityManageFramwork with GNU General Public License v3.0 5 votes vote down vote up
def get_scan_xml(reporturl,scan_id,path):
    filename = os.path.join(path,scan_id+'.xml')
    urllib3.disable_warnings()
    try:
        resp = requests.get(reporturl,timeout=120,verify=False)
        content = resp.content
        fp = open(filename,'wb')
        fp.write(content)
        fp.close()
    except Exception as e:
        return e 
Example #7
Source File: Nessus.py    From SecurityManageFramwork with GNU General Public License v3.0 5 votes vote down vote up
def connect(scanner_id,method, resource, data=None):
    '''
    该模块用来定制连接
    '''
    url,Access_Key,Secret_Key=get_scannerinfo(scanner_id)
    
    headers = {
               'content-type': 'application/json',
               'X-ApiKeys':'accessKey = '+ Access_Key +';secretKey ='+Secret_Key,
               }
    if data != None:
        data = json.dumps(data)
    urllib3.disable_warnings()
    if method == 'POST':
        r = requests.post(build_url(url,resource), data=data, headers=headers, verify=verify)
    elif method == 'PUT':
        r = requests.put(build_url(url,resource), data=data, headers=headers, verify=verify)
    elif method == 'DELETE':
        r = requests.delete(build_url(url,resource), data=data, headers=headers, verify=verify)
    else:
        r = requests.get(build_url(url,resource), params=data, headers=headers, verify=verify)
    
    # Exit if there is an error.
    if r.status_code != 200:
        e = r.json()
        print(e)
        #sys.exit()
        
    if 'download' in resource:
        return r.content
    else:
        try:
            return r.json()
        except:
            return True 
Example #8
Source File: AWVS11.py    From SecurityManageFramwork with GNU General Public License v3.0 5 votes vote down vote up
def connect_all(method, resource,scanner_id,data=None):
    '''
        该模块用来定制连接
    '''
    url,apikey=get_scannerinfo(scanner_id)
    
    headers = {
               'content-type': 'application/json',
               'X-Auth':apikey,
               }
    data = json.dumps(data)
    urllib3.disable_warnings()
    try:
        if method == 'POST':
            r = requests.post(build_url(url,resource), data=data, headers=headers, verify=verify)
        elif method == 'PUT':
            r = requests.put(build_url(url,resource), data=data, headers=headers, verify=verify)
        elif method == 'DELETE':
            r = requests.delete(build_url(url,resource), data=data, headers=headers, verify=verify)
        elif method == 'PATCH':
            r = requests.patch(build_url(url,resource), data=data, headers=headers, verify=verify)
        else:
            r = requests.get(build_url(url,resource), params=data, headers=headers, verify=verify)
    except Exception as e:
        return e
    return r 
Example #9
Source File: AWVS11.py    From SecurityManageFramwork with GNU General Public License v3.0 5 votes vote down vote up
def connect(method, resource,scanner_id,data=None):
    '''
        该模块用来定制连接
    '''
    url,apikey=get_scannerinfo(scanner_id)
        
    headers = {
               'content-type': 'application/json',
               'X-Auth':apikey,
               }
    data = json.dumps(data)
    urllib3.disable_warnings()
    try:
        if method == 'POST':
            r = requests.post(build_url(url,resource), data=data, headers=headers, verify=verify)
        elif method == 'PUT':
            r = requests.put(build_url(url,resource), data=data, headers=headers, verify=verify)
        elif method == 'DELETE':
            r = requests.delete(build_url(url,resource), data=data, headers=headers, verify=verify)
        elif method == 'PATCH':
            r = requests.patch(build_url(url,resource), data=data, headers=headers, verify=verify)
        else:
            r = requests.get(build_url(url,resource), params=data, headers=headers, verify=verify)
    except Exception as e:
        return False
    
    # Exit if there is an error.
    if r.status_code == 204:
        return True
    elif r.status_code != 200:
        e = r.json()
        return e
    
    if 'download' in resource:
        return r.content
    else:
        return r.json() 
Example #10
Source File: waitforupdates.py    From pyvmomi-community-samples with Apache License 2.0 5 votes vote down vote up
def main():
    """
    Sample Python program for monitoring property changes to objects of
    one or more types to stdout
    """

    args = get_args()

    if args.password:
        password = args.password
    else:
        password = getpass.getpass(prompt='Enter password for host %s and '
                                   'user %s: ' % (args.host, args.user))

    try:
        if args.disable_ssl_warnings:
            from requests.packages import urllib3
            urllib3.disable_warnings()

        si = SmartConnect(host=args.host, user=args.user, pwd=password,
                          port=int(args.port))

        if not si:
            print >>sys.stderr, "Could not connect to the specified host ' \
                                'using specified username and password"
            raise

        atexit.register(Disconnect, si)

        propspec = parse_propspec(args.propspec)

        print "Monitoring property changes.  Press ^C to exit"
        monitor_property_changes(si, propspec, args.iterations)

    except vmodl.MethodFault, e:
        print >>sys.stderr, "Caught vmodl fault :\n%s" % str(e)
        raise 
Example #11
Source File: __init__.py    From plugin.audio.tidal2 with GNU General Public License v3.0 5 votes vote down vote up
def __init__(self, config=Config()):
        """:type _config: :class:`Config`"""
        self._config = config
        self.session_id = None
        self.user = None
        self.country_code = 'US'   # Enable Trial Mode
        self.client_unique_key = None
        try:
            from requests.packages import urllib3
            urllib3.disable_warnings() # Disable OpenSSL Warnings in URLLIB3
        except:
            pass 
Example #12
Source File: qq_bot.py    From examples-of-web-crawlers with MIT License 5 votes vote down vote up
def get_pay_for_another(self):
        # 获取帮别人的代付
        # 需要提交的数据
        skey = str(self.cookies_merge_dict_in_qun_qq_com['skey'])
        url = 'https://pay.qq.com/cgi-bin/personal/account_msg.cgi?p=0.6796416908412624&cmd=1&sck=' + get_sck(skey) + '&type=100&showitem=2&per=100&pageno=1&r=0.3177912609760205'

        # 设置请求头,模拟人工
        header = {
            'Accept': 'application/json, text/javascript, */*; q=0.01',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
            'Accept-Encoding': 'gzip, deflate',
            'Referer': 'https://pay.qq.com/infocenter/infocenter.shtml?asktype=100',
            'Connection': 'keep-alive'
        }

        # 屏蔽https证书警告
        urllib3.disable_warnings()
        # 网页访问,get方式
        html = get(url, cookies=self.cookies_merge_dict_in_qun_qq_com, headers=header, verify=False)

        # 将返回数据解析为python对象
        result = loads(html.text)
        # print(result)

        return result['resultinfo']['list'] 
Example #13
Source File: qq_bot.py    From examples-of-web-crawlers with MIT License 5 votes vote down vote up
def get_quit_of_group(self):
        # 获取最近30天内退出的群
        # 需要提交的数据
        # bkn由参数skey通过另一个加密函数得到
        bkn = hash33_bkn(self.cookies_merge_dict_in_qun_qq_com['skey'])
        submit_data = {'bkn': str(bkn)}

        # 设置请求头,模拟人工
        header = {
            'Accept': 'application/json, text/javascript, */*; q=0.01',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
            'Content-Type': 'text/plain',
            'origin': 'https://huifu.qq.com',
            'referer' : 'https://huifu.qq.com/recovery/index.html?frag=0'
        }

        # 屏蔽https证书警告
        urllib3.disable_warnings()
        # 网页访问,post方式
        html = post('https://huifu.qq.com/cgi-bin/gr_grouplist', data=submit_data, cookies=self.cookies_merge_dict_in_qun_qq_com, headers=header, verify=False)

        # 将返回数据解析为python对象
        result = loads(html.text)

        return result 
Example #14
Source File: qq_bot.py    From examples-of-web-crawlers with MIT License 5 votes vote down vote up
def get_profile_picture(self, qq_number, size=100):
        # 获取指定qq的头像,size的值可为40、100、140,默认为100
        # 屏蔽https证书警告
        urllib3.disable_warnings()

        # 设置请求头,模拟人工
        header = {
            'Accept': 'application/json, text/javascript, */*; q=0.01',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
            'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
            'Referer':'http://find.qq.com/'
        }

        # 网页访问,get方式
        html = get('http://q1.qlogo.cn/g?b=qq&nk=' + str(qq_number) + '&s=' + str(size), headers=header, verify=False)
        return html.content 
Example #15
Source File: url_request.py    From examples-of-web-crawlers with MIT License 5 votes vote down vote up
def get_html(url,submit_cookies):

    # 设置请求头,模拟人工
    header = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
        'Referer' : 'http://ui.ptlogin2.qq.com/cgi-bin/login?appid=549000912&s_url=http://qun.qq.com/member.html'
    }
    # 屏蔽https证书警告
    urllib3.disable_warnings()

    # 网页访问,get方式
    html = get(url, cookies = submit_cookies, headers=header, verify=False)

    return html


# post访问网页 
Example #16
Source File: sdk.py    From darwin-sdk with Apache License 2.0 5 votes vote down vote up
def disable_ssl_cert_check(self):
        urllib3.disable_warnings(InsecureRequestWarning)
        self.s.verify = False 
Example #17
Source File: client.py    From k8s with Apache License 2.0 5 votes vote down vote up
def init_session(cls):
        if "Authorization" not in cls._session.headers and config.api_token:
            cls._session.headers.update({"Authorization": "Bearer {}".format(config.api_token)})
        if cls._session.cert is None and config.cert:
            cls._session.cert = config.cert
        cls._session.verify = config.verify_ssl
        if not config.verify_ssl:
            import requests.packages.urllib3 as urllib3
            urllib3.disable_warnings() 
Example #18
Source File: qq_bot.py    From examples-of-web-crawlers with MIT License 4 votes vote down vote up
def get_qb(self):
        # 获取该账户的qb值
        # 需要提交的数据
        qq_number = str(self.qq_number)
        skey = str(self.cookies_merge_dict_in_qun_qq_com['skey'])
        url = 'https://api.unipay.qq.com/v1/r/1450000186/wechat_query?cmd=4&pf=vip_m-pay_html5-html5&pfkey=pfkey&from_h5=1&from_https=1&openid=' + qq_number + '&openkey=' + skey + '&session_id=uin&session_type=skey'

        # 设置请求头,模拟人工
        header = {
            'Accept': 'application/json, text/javascript, */*; q=0.01',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
            'Accept-Encoding': 'gzip, deflate',
            'Referer': 'https://my.pay.qq.com/account/index.shtml',
            'Origin': 'https://my.pay.qq.com',
            'Connection': 'close'
        }

        # 屏蔽https证书警告
        urllib3.disable_warnings()
        # 网页访问,get方式
        html = get(url, cookies=self.cookies_merge_dict_in_qun_qq_com, headers=header, verify=False)

        # 将返回数据解析为python对象
        result = loads(html.text)

        qb_value = float(result['qb_balance']) / 10
        return qb_value 
Example #19
Source File: qq_bot.py    From examples-of-web-crawlers with MIT License 4 votes vote down vote up
def who_care_about_me(self):
        # qq空间亲密度 谁在意我

        # bkn由参数skey通过另一个加密函数得到
        bkn = hash33_bkn(self.cookies_merge_dict_in_qzone_qq_com['p_skey'])

        # 获取参数qzonetoken
        urllib3.disable_warnings()
        target_url = 'https://user.qzone.qq.com/' + self.qq_number
        html = get_html(target_url, self.cookies_merge_dict_in_qzone_qq_com)
        qzonetoken = re.findall(r'{ try{return "(.+?)";', html.text)
        qzonetoken = qzonetoken[0]


        # 获取谁在意我数据
        target_url = 'https://rc.qzone.qq.com/proxy/domain/r.qzone.qq.com/cgi-bin/tfriend/friend_ship_manager.cgi?uin=' + self.qq_number + '&do=2&rd=0.32313768189269365&fupdate=1&clean=0&g_tk=' + str(bkn) + '&qzonetoken=' + str(qzonetoken) + '&g_tk=' + str(bkn)
        urllib3.disable_warnings()
        html = get_html(target_url, self.cookies_merge_dict_in_qzone_qq_com)

        # 处理返回数据
        result_data = (html.text).replace('_Callback(','')
        result_data = result_data[:len(result_data)-2]
        # 将返回数据转化为python对象
        result_data = loads(result_data)
        result_data = result_data['data']['items_list']

        return result_data 
Example #20
Source File: qq_bot.py    From examples-of-web-crawlers with MIT License 4 votes vote down vote up
def i_care_about_who(self):
        # qq空间亲密度 我在意谁

        # bkn由参数skey通过另一个加密函数得到
        bkn = hash33_bkn(self.cookies_merge_dict_in_qzone_qq_com['p_skey'])

        # 获取参数qzonetoken
        urllib3.disable_warnings()
        target_url = 'https://user.qzone.qq.com/' + self.qq_number
        html = get_html(target_url, self.cookies_merge_dict_in_qzone_qq_com)
        qzonetoken = re.findall(r'{ try{return "(.+?)";', html.text)
        qzonetoken = qzonetoken[0]


        # 获取我在意谁数据
        target_url = 'https://rc.qzone.qq.com/proxy/domain/r.qzone.qq.com/cgi-bin/tfriend/friend_ship_manager.cgi?uin=' + self.qq_number + '&do=1&rd=0.9680629025032721&fupdate=1&clean=1&g_tk=' + str(bkn) + '&qzonetoken=' + str(qzonetoken) + '&g_tk=' + str(bkn)
        urllib3.disable_warnings()
        html = get_html(target_url, self.cookies_merge_dict_in_qzone_qq_com)

        # 处理返回数据
        result_data = (html.text).replace('_Callback(','')
        result_data = result_data[:len(result_data)-2]
        # 将返回数据转化为python对象
        result_data = loads(result_data)
        result_data = result_data['data']['items_list']

        return result_data 
Example #21
Source File: Nessus.py    From SecurityManageFramwork-SeMF with GNU General Public License v3.0 4 votes vote down vote up
def connect(scanner_id,method, resource, data=None):
    '''
    该模块用来定制连接
    '''
    url,Access_Key,Secret_Key=get_scannerinfo(scanner_id)
    
    headers = {
               'content-type': 'application/json',
               'X-ApiKeys':'accessKey = '+ Access_Key +';secretKey ='+Secret_Key,
               }
    if data != None:
        data = json.dumps(data)
    urllib3.disable_warnings()
    if method == 'POST':
        r = requests.post(build_url(url,resource), data=data, headers=headers, verify=verify)
    elif method == 'PUT':
        r = requests.put(build_url(url,resource), data=data, headers=headers, verify=verify)
    elif method == 'DELETE':
        r = requests.delete(build_url(url,resource), data=data, headers=headers, verify=verify)
    else:
        r = requests.get(build_url(url,resource), params=data, headers=headers, verify=verify)
    
    # Exit if there is an error.
    if r.status_code != 200:
        e = r.json()
        print(e)
        #sys.exit()
        
    if 'download' in resource:
        return r.content
    else:
        try:
            return r.json()
        except:
            return True 
Example #22
Source File: qq_bot.py    From examples-of-web-crawlers with MIT License 4 votes vote down vote up
def is_vip_svip(self):
        # 判断此次登录的qq是否为vip或者svip
        # 需要提交的数据
        # bkn由参数skey通过另一个加密函数得到
        bkn = hash33_bkn(self.cookies_merge_dict_in_qun_qq_com['skey'])
        qq_number = str(self.qq_number)
        skey = str(self.cookies_merge_dict_in_qun_qq_com['skey'])
        url = 'https://proxy.vip.qq.com/cgi-bin/srfentry.fcgi?bkn=' + str(bkn) + '&ts=&g_tk=' + str(bkn) + '&data={"11053":{"iAppId":1,"iKeyType":1,"sClientIp":"","sSessionKey":"' + skey + '","sUin":"' + qq_number + '"}}'

        # 设置请求头,模拟人工
        header = {
            'Accept': 'application/json, text/javascript, */*; q=0.01',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
            'Accept-Encoding': 'gzip, deflate',
            'Referer': 'https://huifu.qq.com/recovery/index.html?frag=1',
            'Origin': 'https://huifu.qq.com',
            'Connection': 'close'
        }

        # 屏蔽https证书警告
        urllib3.disable_warnings()
        # 网页访问,post方式
        html = get(url, cookies=self.cookies_merge_dict_in_qun_qq_com, headers=header, verify=False)

        # 将返回数据解析为python对象
        result = loads(html.text)
        isSvip = result['11053']['data']['isSvip']
        isVip = result['11053']['data']['isVip']
        return {'isSvip':isSvip, 'isVip':isVip} 
Example #23
Source File: linked_clone.py    From pyvmomi-community-samples with Apache License 2.0 4 votes vote down vote up
def main():
    args = get_args()

    urllib3.disable_warnings()
    si = None
    context = None
    if hasattr(ssl, 'SSLContext'):
        context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
        context.verify_mode = ssl.CERT_NONE
    if context:
        # Python >= 2.7.9
        si = SmartConnect(host=args.host,
                          port=int(args.port),
                          user=args.user,
                          pwd=args.password,
                          sslContext=context)
    else:
        # Python >= 2.7.7
        si = SmartConnect(host=args.host,
                          port=int(args.port),
                          user=args.user,
                          pwd=args.password)
    atexit.register(Disconnect, si)
    print "Connected to vCenter Server"

    content = si.RetrieveContent()

    datacenter = get_obj(content, [vim.Datacenter], args.datacenter_name)
    if not datacenter:
        raise Exception("Couldn't find the Datacenter with the provided name "
                        "'{}'".format(args.datacenter_name))

    cluster = get_obj(content, [vim.ClusterComputeResource], args.cluster_name,
                      datacenter.hostFolder)

    if not cluster:
        raise Exception("Couldn't find the Cluster with the provided name "
                        "'{}'".format(args.cluster_name))

    host_obj = None
    for host in cluster.host:
        if host.name == args.host_name:
            host_obj = host
            break

    vm_folder = datacenter.vmFolder

    template = get_obj(content, [vim.VirtualMachine], args.template_name,
                       vm_folder)

    if not template:
        raise Exception("Couldn't find the template with the provided name "
                        "'{}'".format(args.template_name))

    location = _get_relocation_spec(host_obj, cluster.resourcePool)
    _take_template_snapshot(si, template)
    _clone_vm(si, template, args.vm_name, vm_folder, location) 
Example #24
Source File: qq_bot.py    From examples-of-web-crawlers with MIT License 4 votes vote down vote up
def get_info_in_qq_friend(self,qq_number):

        # 获取某个qq好友的详细信息

        # 需要提交的数据
        # bkn由参数skey通过另一个加密函数得到
        bkn = hash33_bkn(self.cookies_merge_dict_in_qun_qq_com['skey'])
        submit_data = {'keyword':str(qq_number), 'ldw': str(bkn), 'num':'20', 'page':'0', 'sessionid':'0', 'agerg':'0', 'sex':'0', 'firston':'0', 'video':'0', 'country':'1', 'province':'65535', 'city':'0', 'district':'0', 'hcountry':'1', 'hprovince':'0', 'hcity':'0', 'hdistrict':'0', 'online':'0'}

        # 需要提交的cookies
        # cookies = {'uin':self.cookies_merge_dict_in_qun_qq_com['uin'], 'skey':self.cookies_merge_dict_in_qun_qq_com['skey'], 'ptisp':self.cookies_merge_dict_in_qun_qq_com['ptisp'], 'RK':self.cookies_merge_dict_in_qun_qq_com['RK'], 'ptcz':self.cookies_merge_dict_in_qun_qq_com['ptcz']}

        # 设置请求头,模拟人工
        header = {
            'Accept': 'application/json, text/javascript, */*; q=0.01',
            'Origin': 'http://find.qq.com',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
            'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
            'Referer':'http://find.qq.com/',
        }

        # 屏蔽https证书警告
        urllib3.disable_warnings()
        # 网页访问,post方式
        html = post('http://cgi.find.qq.com/qqfind/buddy/search_v3', data=submit_data, cookies=self.cookies_merge_dict_in_qun_qq_com, headers=header, verify=False)

        # 将好友信息解析为python对象
        friend_info = loads(html.text)
        # print(friend_info)
        return friend_info['result']['buddy']['info_list'][0] 
Example #25
Source File: url_request.py    From examples-of-web-crawlers with MIT License 4 votes vote down vote up
def post_html(url,submit_cookies,submit_data):

    # 设置请求头,模拟人工
    header = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
        'Referer' : 'https://qun.qq.com/member.html'
    }
    # 屏蔽https证书警告
    urllib3.disable_warnings()

    # 网页访问,post方式
    html = post(url, data=submit_data, cookies = submit_cookies, headers=header, verify=False)

    return html 
Example #26
Source File: companion.py    From EDMarketConnector with GNU General Public License v2.0 4 votes vote down vote up
def __init__(self):
        self.state = Session.STATE_INIT
        self.credentials = None
        self.session = None
        self.auth = None
        self.retrying = False	# Avoid infinite loop when successful auth / unsuccessful query

        # yuck suppress InsecurePlatformWarning under Python < 2.7.9 which lacks SNI support
        if sys.version_info < (2,7,9):
            from requests.packages import urllib3
            urllib3.disable_warnings()

        if getattr(sys, 'frozen', False):
            os.environ['REQUESTS_CA_BUNDLE'] = join(config.respath, 'cacert.pem')