微信公众号搜"智元新知"关注
微信扫一扫可直接关注哦!

python --简单的网页图片爬取

import requests
import re
from bs4 import BeautifulSoup
import pymssql  
import time
import threading

conn=pymssql.connect(host='192.168.1.167\Test',user='sa',password='123',database='db',timeout=30,login_timeout=5)
cur=conn.cursor()

class myThread (threading.Thread):
    def __init__(self,id):
        threading.Thread.__init__(self)
        self.id=id
    def run(self):
             getimagesList(id)


def getimagesList(id):
     try:
         res = requests.get("www.xxxx.html")
         res.encoding = 'gbk'
         soup = BeautifulSoup(res.text,"html.parser")
         imagesList=soup.find_all('img',style="width: auto;")
         for image in imagesList:
             name = image.get('alt')
             url = image.get('src').replace("//", "")
             if "!/fw/264/quality/91/unsharp/true/compress/true/canvas/264x458a0a0/watermark/url/bG9nby53YXRlci52MTAucG5n/repeat/true/align/center" not in url:
                 return             
             url = url.replace("!/fw/264/quality/91/unsharp/true/compress/true/canvas/264x458a0a0/watermark/url/bG9nby53YXRlci52MTAucG5n/repeat/true/align/center", "")
             #print("insert INTO  wotu (id,title,url) values("+str(id)+",'"+str(name)+"','"+str(url)+"')")
             cur.execute("insert INTO  pic (id,title,url) values("+str(id)+",'"+str(name)+"','"+str(url)+"')")
             conn.commit()
             print("成功了")
     except Exception as ex:
          print("失败了"+time.strftime('%Y-%m-%d %H:%M:%s',time.localtime(time.time())))


 

版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。

相关推荐