python
复制代码
import scrapy
import json
from scrapy.http import Response
from Medical.items import MedicalItem
from tqdm import tqdm
'''
具体的爬虫程序
'''
class MedicalSpider(scrapy.Spider):
name = "medical"
allowed_domains = ["beian.cfdi.org.cn"]
# start_urls = ["https://beian.cfdi.org.cn/CTMDS/pub/PUB010100.do?method=handle05&_dt=20231101162330"]
# 重写第一次请求处理函数
def start_requests(self):
start_url = 'https://www.baidu.com/CTMDS/pub/PUB010100.do?method=handle05&_dt=20231101162330'
# 发送post请求
data = {
'pageSize': '1353',
'curPage': '1',
}
yield scrapy.FormRequest(url=start_url, formdata=data, callback=self.parse)
def parse(self, response):
# 转换为json
jsonRes = json.loads(response.body)
# 查看响应状态码
status = jsonRes['success']
# 如果状态为True
if status:
# 获取数据
dataList = jsonRes['data']
# 调用详细方法,发起请求(循环发起)
for row in tqdm(dataList,desc='爬取进度'):
# 请求详情页url
urlDetail = f"https://www.baidu.com/CTMDS/pub/PUB010100.do?method=handle04&compId={row['companyId']}"
# 发起请求
yield scrapy.Request(url=urlDetail, callback=self.parseDetail, meta={'row': row})
def parseDetail(self, response: Response):
# new 一个MedicalItem实例
item = MedicalItem()
# 获取上次请求的数据源
row = response.meta['row']
item['companyId'] = row['companyId']
item['linkTel'] = row['linkTel']
item['recordNo'] = row['recordNo']
item['areaName'] = row['areaName']
item['linkMan'] = row['linkMan']
item['address'] = row['address']
item['compName'] = row['compName']
item['recordStatus'] = row['recordStatus']
item['cancelRecordTime'] = row.get('cancelRecordTime', '')
# 获取备案信息
divTextList = response.xpath("//div[@class='col-md-8 textlabel']/text()").extract()
# 去空白
divtextList = [text.strip() for text in divTextList]
compLevel = ''
if len(divtextList) > 2:
compLevel = divtextList[2]
recordTime = ''
if len(divtextList) > 5:
recordTime = divtextList[6]
item['compLevel'] = compLevel
item['recordTime'] = recordTime
# 获取其他机构地址
divListOther = response.xpath("//div[@class='col-sm-8 textlabel']/text()").extract()
# 去空白
divtextListOther = [text.strip() for text in divListOther]
otherOrgAdd = ','.join(divtextListOther)
item['otherOrgAdd'] = otherOrgAdd
# 获取备案专业和主要研究者信息
trList = response.xpath("//table[@class='table table-striped']/tbody/tr")
tdTextList = [tr.xpath("./td/text()").extract() for tr in trList]
item['tdTextList'] = tdTextList
# 返回item
yield item
python
复制代码
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
import pymysql
from Medical.items import MedicalItem
class MedicalPipeline:
# 开始
def open_spider(self, spider):
# 初始化数据库
self.db = pymysql.connect(
host='localhost',
port=3306,
user='root',
password='logicfeng',
database='test2'
)
# 创建游标对象
self.cursor = self.db.cursor()
def process_item(self, item, spider):
companyId = item['companyId']
linkTel = item['linkTel']
recordNo = item['recordNo']
areaName = item['areaName']
linkMan = item['linkMan']
address = item['address']
compName = item['compName']
recordStatus = item['recordStatus']
cancelRecordTime = item.get('cancelRecordTime', '')
compLevel = item.get('compLevel', '')
recordTime = item.get('recordTime', '')
otherOrgAdd = item.get('otherOrgAdd', '')
tdTextList = item['tdTextList']
sql1 = "insert INTO medical_register(company_id,area_name,record_no,comp_name,address,link_man,link_tel,record_status,comp_level,record_time,cancel_record_time,other_org_add) "
sql2 = f"values('{companyId}','{areaName}','{recordNo}','{compName}','{address}','{linkMan}','{linkTel}','{recordStatus}','{compLevel}','{recordTime}','{cancelRecordTime}','{otherOrgAdd}')"
sql3 = sql1 + sql2
# 执行sql
self.cursor.execute(sql3)
# 提交
self.db.commit()
for tdText in tdTextList:
tdText.insert(0,companyId)
# 插入数据库
sql4 = "insert into medical_register_sub (company_id,professional_name,principal_investigator,job_title) values(%s,%s,%s,%s)"
self.cursor.execute(sql4, tdText)
# 提交到数据库
self.db.commit()
return item
def close_spider(self, spider):
self.cursor.close()
self.db.close()
print("关闭数据库!")