报错地段在 59 行,报错提示为:Unterminated string starting at: line 1 column 1 (char 0)
主要想不通,为啥别的链接不报错,每次一到这个链接就报错
import requests
from urllib.parse import urlencode
from requests.exceptions import RequestException
import random
import json
from bs4 import BeautifulSoup
import re
headers_chi = [
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.22 Safari/537.36 SE 2.X MetaSr 1.0',
'Mozilla/5.0 (Windows NT 6.1; rv:49.0) Gecko/20100101 Firefox/49.0',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.2; WOW64; rv:21.0) Gecko/20100101 Firefox/21.0'
]
def shouye_dizhi():
data = {
'offset': '0',
'format': 'json',
'keyword': '美女',
'autoload': 'true',
'count': '20',
'cur_tab': '3',
'from': 'gallery'
}
url = '
https://www.toutiao.com/search_content/?' + urlencode(data)
try:
headers = {}
headers['User-Agent'] = random.choice(headers_chi)
dizhi = requests.get(url,headers = headers)
if dizhi.status_code == 200:
return dizhi.text
except RequestException:
print('首页加载出错')
return None
def shouye_xiangqing(html):
data = json.loads(html)
if data and 'data' in data.keys():
for item in data.get('data'):
yield item.get('article_url')
def xiangqingye_dizhi(url):
try:
headers = {}
headers['User-Agent'] = random.choice(headers_chi)
dizhi = requests.get(url,headers = headers)
if dizhi.status_code == 200:
return dizhi.text
except RequestException:
print('详情页加载出错')
return None
def xiangqingye_jiexi(html,url):
jiexi = BeautifulSoup(html,'lxml')
title = jiexi.select('title')[0].get_text()
print(title)
zhengze = re.compile('JSON.parse\(([\s\S]*?)\)')
jieguo = re.search(zhengze,html)
data = json.loads(json.loads(jieguo.group(1)))
if data and 'sub_images' in data.keys():
sub_images = data.get('sub_images')
items = [item.get('url')for item in sub_images]
return {
'title':title,
'url':url,
'items':items
}
def main():
html = shouye_dizhi()
for url in shouye_xiangqing(html):
html = xiangqingye_dizhi(url)
tupian = xiangqingye_jiexi(html,url)
print(tupian)
if __name__ == "__main__":
main()