import _thread import json import os import random import time from concurrent import futures import django import requests from bs4 import BeautifulSoup from PixivSearch.settings import logger os.environ.setdefault("DJANGO_SETTINGS_MODULE", "PixivSearch.settings") django.setup() from PixivSearch.model.config import mediaInfo, stat current_mediaInfo = mediaInfo(id=0, chn_name='null') isStop = False executors = None def check(): while True: if isStop: logger.info('停止多线程爬虫') executors.shutdown() break time.sleep(1) def save(md): if isStop: return time.sleep(random.randint(1, 3)) url = "https://www.bilibili.com/bangumi/media/md%d" % md req = requests.get(url) logger.info("request_url=%s,status_code=%d" % (url,req.status_code)) if (req.status_code == 200): tag = BeautifulSoup(req.text, 'lxml') script = tag.select("script")[3].text json_str = script[script.index("=") + 1:script.index("function") - 2] json_obj = json.loads(json_str) try: stat_info = json_obj['mediaInfo']['stat'] print(json_obj['mediaInfo']['chn_name']) print(stat_info) mediaInfo(id=md, chn_name=json_obj['mediaInfo']['chn_name']).save() global current_mediaInfo current_mediaInfo = mediaInfo.objects.get(pk=md) stat(id=md, danmakus=int(stat_info['danmakus']), favorites=stat_info['favorites'], views=stat_info['views']).save() except BaseException as e: logger.error("发生异常") logger.error(e) #asdasd def get_(): global current_mediaInfo return current_mediaInfo def threadSave(start, end): ids = [] for id in range(start, end): ids.append(id) try: global executors executors = futures.ThreadPoolExecutor(32) global isStop isStop = False with executors as executor: executor.map(save, ids) logger.info('结束爬虫') except BaseException as e: logger.error(e) def stop_(): global isStop isStop = True