没有合适的资源?快使用搜索试试~ 我知道了~
# *壹 #from urllib import request,parse # 1 #request.urlretrieve('http://www.baidu.com','aaa.html') # # 2 # reas = request.urlopen('http://www.baidu.com') # print(reas.getcode()) # # 3 # a = parse.urlencode({'我是':1,'你是':2,'它是':3}) # print(a) # print(parse.parse_qs(a)) # # 4 # url = 'https://www.baidu
资源推荐
资源详情
资源评论
python我的爬虫笔记我的爬虫笔记
# *壹
#from urllib import request,parse
# 1
#request.urlretrieve('http://www.baidu.com','aaa.html')
#
# 2
# reas = request.urlopen('http://www.baidu.com')
# print(reas.getcode())
#
# 3
# a = parse.urlencode({'我是':1,'你是':2,'它是':3})
# print(a)
# print(parse.parse_qs(a))
#
# 4
# url = 'https://www.baidu.com/s?ie=utf-8&wd=python&tn=78040160_5_pg&ch=3#1'
#result = parse.urlparse(url)#主要对url进行解析,对url按照一定格式进行拆分
#输出
# ParseResult(scheme='https', netloc='www.baidu.com', path='/s', params='', query='ie=utf-8&wd=python&tn=78040160_5_pg&ch=3', fragment='1')
# scheme: https
# netloc: www.baidu.com
# path: /s
# params:
# query: ie=utf-8&wd=python&tn=78040160_5_pg&ch=3
# fragment: 1
# result = parse.urlsplit(url)#没有params,(主要对url进行解析,对url按照一定格式进行拆分)
#输出
# SplitResult(scheme='https', netloc='www.baidu.com', path='/s', query='ie=utf-8&wd=python&tn=78040160_5_pg&ch=3', fragment='1')
# scheme: https
# netloc: www.baidu.com
# path: /s
# query: ie=utf-8&wd=python&tn=78040160_5_pg&ch=3
# fragment: 1
# print(result)
# print('scheme:',result.scheme)
# print('netloc:',result.netloc)
# print('path:',result.path)
# #print('params:',result.params)
# print('query:',result.query)
# print('fragment:',result.fragment)
#
# 5
# url = 'https://www.lagou.com/jobs/positionAjax.json?city=%E4%B8%8A%E6%B5%B7&needAddtionalResult=false'
# headers = {
# 'user-agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3741.400 QQBrowser/10.5.3868.400'
# }
# data={
# 'first':'True',
# 'pn':1,
# 'kd':'python',
# 'referer':'https://www.lagou.com/jobs/list_java?labelWords=&fromSearch=true&suginput=',
# 'pragma': 'no-cache',
# 'origin': 'https://www.lagou.com'
# }
# req = request.Request(url,headers=headers,data=parse.urlencode(data).encode('utf-8'),method='POST')
# resp = request.urlopen(req)
# print(resp.read().decode('utf-8'))
#
# 6
# from urllib import request
# url = 'http://httpbin.org/ip'
# handler = request.ProxyHandler({"http":"220.168.52.245:40406"})
# opener = request.build_opener(handler)
# resp = opener.open(url)
# print(resp.read())
#
# 7
# from urllib import request
# dapeng_url = "http://www.renren.com/880151247/profile"
# headers = {
# 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3741.400 QQBrowser/10.5.3868.400',
# "Cookie": "anonymid=k9xjdjfc-hodq0q; depovince=GW; _r01_=1; JSESSIONID=abc1uPtF8-or69JJe9Xhx; ick_login=298bf271-2e27-4267-b945-d2790750f2f4; taihe_bi_sdk_uid=06d641ecbe8305b3032f9fde9bfaaba6;
taihe_bi_sdk_session=25f0035c05eb8993a05734a3068dd860; t=36979ef16e99fcefa14351e8e304f0af1; societyguester=36979ef16e99fcefa14351e8e304f0af1; id=974393911; xnsid=e0b4519; jebecookies=78966f67-81b5-4159-ac53-
9c5694aaf215|||||; ver=7.0; loginfrom=null; jebe_key=0bc3b536-3d43-416e-a443-3550bea33d34%7Ccd3f341f2a9a65627d4ee21bd7991b3e%7C1588902281361%7C1%7C1588902281143; jebe_key=0bc3b536-3d43-416e-a443-
3550bea33d34%7Ccd3f341f2a9a65627d4ee21bd7991b3e%7C1588902281361%7C1%7C1588902281147; wp_fold=0"
# }
# req = request.Request(url=dapeng_url,headers=headers)
# resp = request.urlopen(req)
# a = resp.read().decode('utf-8')
# with open('wopa.html',mode='w',encoding='utf-8') as f:
# f.write(a)
#
# 8
# from urllib import request,parse
# from http.cookiejar import CookieJar
#
# cookiejar = CookieJar()
# handler = request.HTTPCookieProcessor(cookiejar)
# opener = request.build_opener(handler)
#
# headers = {
# 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3741.400 QQBrowser/10.5.3868.400',
# "Cookie": "anonymid=k9xjdjfc-hodq0q; depovince=GW; _r01_=1; JSESSIONID=abc1uPtF8-or69JJe9Xhx; ick_login=298bf271-2e27-4267-b945-d2790750f2f4; taihe_bi_sdk_uid=06d641ecbe8305b3032f9fde9bfaaba6;
taihe_bi_sdk_session=25f0035c05eb8993a05734a3068dd860; t=36979ef16e99fcefa14351e8e304f0af1; societyguester=36979ef16e99fcefa14351e8e304f0af1; id=974393911; xnsid=e0b4519; jebecookies=78966f67-81b5-4159-ac53-
9c5694aaf215|||||; ver=7.0; loginfrom=null; jebe_key=0bc3b536-3d43-416e-a443-3550bea33d34%7Ccd3f341f2a9a65627d4ee21bd7991b3e%7C1588902281361%7C1%7C1588902281143; jebe_key=0bc3b536-3d43-416e-a443-
3550bea33d34%7Ccd3f341f2a9a65627d4ee21bd7991b3e%7C1588902281361%7C1%7C1588902281147; wp_fold=0"
#
# }
#
# data = {
# 'email':"18337802329",
# 'password':"wang1234567890."
# }
#
# login_url = "http://www.renren.com/PLogin.do"
# req = request.Request(login_url,data=parse.urlencode(data).encode('utf-8'),headers=headers)
# request.urlopen(req)
#
# dapeng_url = "http://www.renren.com/880151247/profile"
# resp = opener.open(dapeng_url)
# with open("wopa.html",mode='w',encoding=('utf-8')) as f:
# f.write(resp.read().decode('utf-8'))
#
# 9
# python cookie信息的加载与保存
#
#*贰
# 1
# import requests
# reponse = requests.get("https://www.baidu.com/")
# print(reponse.text)
# print(type(reponse.text))
# print(reponse.content.decode('utf-8'))
# print(type(reponse.content))
资源评论
weixin_38612811
- 粉丝: 5
- 资源: 931
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
最新资源
- 1_密码锁.pdsprj
- CNN基于Python的深度学习图像识别系统
- 数据库设计与关系理论-C.J.+Date.epub
- AXU2CGB-E开发板用户手册.pdf
- rwer456456567567
- course_s3_ALINX_ZYNQ_MPSoC开发平台Linux基础教程V1.05.pdf
- course_s1_ALINX_ZYNQ_MPSoC开发平台FPGA教程V1.01.pdf
- 多边形框架物体检测20-YOLO(v5至v11)、COCO、CreateML、Paligemma、TFRecord、VOC数据集合集.rar
- course_s0_Xilinx开发环境安装教程.pdf
- course_s4_ALINX_ZYNQ_MPSoC开发平台Linux驱动教程V1.04.pdf
- course_s5_linux应用程序开发篇.pdf
- 基于51单片机开发板设计的六位密码锁
- course_s2_ALINX_ZYNQ_MPSoC开发平台Vitis应用教程V1.01.pdf
- 基于Python和OpenCV的人脸识别签到系统的开发与应用
- 多边形框架物体检测26-YOLO(v5至v11)、COCO数据集合集.rar
- 学习路之uniapp-goEasy入门
资源上传下载、课程学习等过程中有任何疑问或建议,欢迎提出宝贵意见哦~我们会及时处理!
点击此处反馈
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功