Как добавить multiproccesing в парсер?

import grequests
from bs4 import BeautifulSoup
from openpyxl import Workbook
import time
from multiprocessing import Pool
import multiprocessing
start_time = time.time()
wb = Workbook()
wb.remove(wb.active)
ws = wb.create_sheet('Родирование')
ws.title = 'Товары'
ws.append(['Артикул', 'Родировине'])

cookies = {
'ASP.NET_SessionId': 'i1gip0fre5uzl4iqlkubv1cp',
'SLG_G_WPT_TO': 'ru',
'SLG_GWPT_Show_Hide_tmp': '1',
'SLG_wptGlobTipTmp': '1',
'ICusrcartgd': 'be6d8ad2-c52e-49b8-83b2-f384a9feaa60',
'IWusrsesckgd': 'jojhbQMjYWEdV9ohRKijJKalgxKEvPEPzVqoH/F2376n50ziaNRcMA==',
}

headers = {
'authority': 'catalog.aquamarine.kz',
'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="99", "Opera GX";v="85"',
'accept': 'application/json, text/javascript, */*; q=0.01',
'content-type': 'application/x-www-form-urlencoded; charset=UTF-8',
'x-requested-with': 'XMLHttpRequest',
'sec-ch-ua-mobile': '?0',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.84 Safari/537.36 OPR/85.0.4341.79',
'sec-ch-ua-platform': '"Windows"',
'origin': 'https://catalog.aquamarine.kz',
'sec-fetch-site': 'same-origin',
'sec-fetch-mode': 'cors',
'sec-fetch-dest': 'empty',
'referer': 'https://catalog.aquamarine.kz/catalog/index.aspx',
'accept-language': 'ru-RU,ru;q=0.9,en-US;q=0.8,en;q=0.7',
# Requests sorts cookies= alphabetically
# 'cookie': 'ASP.NET_SessionId=i1gip0fre5uzl4iqlkubv1cp; SLG_G_WPT_TO=ru; SLG_GWPT_Show_Hide_tmp=1; SLG_wptGlobTipTmp=1; ICusrcartgd=be6d8ad2-c52e-49b8-83b2-f384a9feaa60; IWusrsesckgd=jojhbQMjYWEdV9ohRKijJKalgxKEvPEPzVqoH/F2376n50ziaNRcMA==',
}


data = {
'msearch': '',
}
page = 0

def get_page(cookies, headers, data): # Запросы к страницам
    def exception_handler(request, exception):
        print("Request failed")

    response = (grequests.post(url, cookies=cookies, data=data, headers=headers)  for url in getUrls())
    cur = time.time()
    a = grequests.map(response, exception_handler=exception_handler, size=40)
    return a

def getUrls():  # Получение списка ссылок
    sites = [f'https://catalog.aquamarine.kz/catalog/products.ashx?rnd=8817290&q=&spec=&mip=317&map=7777%20777&mippg=161&mappg=5466%20222&miw=0.14&maw=137.74&miq=1&maq=241&miprcs=999999.999&maprcs=0&page={page}&sort=art-down&view=2&spc=1,&avid=5,&brid=7' for page in range(1, 212 )]
    # Расширяет первый список вторым с другой ссылкой.
    sites.extend([f'https://catalog.aquamarine.kz/catalog/products.ashx?rnd=79684694&q=&spec=&mip=317&map=7777%20777&mippg=161&mappg=5466%20222&miw=0.14&maw=137.74&miq=1&maq=241&miprcs=999999.999&maprcs=0&page={page}&sort=art-down&view=2&spc=1,&avid=6,&brid=7' for page in range(1, 141 )])
    return sites
def get_all_links(i):
    soup = BeautifulSoup(i.text.replace('\\', ' '), 'lxml')
    contain = soup.find('div', class_='products')
    products = contain.find_all('div', class_='item wide')
    links = []
    for product in products:
        link = 'https://catalog.aquamarine.kz'+(product.find('a').get('href'))
        links.append(link)
    return links

def get_data(data,headers,cookies, page1=1): 
    for i in get_page(cookies, headers, data):
        print(f'Страница: {page1}')
        try:          
            request = (grequests.get(url, cookies=cookies, headers=headers) for url in get_all_links(i))
            b = grequests.map(request)
            for data_page in b:
                soup = BeautifulSoup(data_page.text, 'lxml')
                articul = soup.find('td', text = 'Артикул').find_next_sibling('td').text
                data_rodirov = []
                try:
                    rodirovs = soup.find_all('div', class_='center ssize')
                    count = len(rodirovs)%5

                    for i in range(1, int(count)+1):
                        rodirov = soup.find_all('div', class_='center ssize')[i-1]
                        size1 = rodirov.find_all('span')[0].text.strip()
                        size2 = rodirov.find_all('span')[1].text.strip()
                        size = size1 + size2
                        data_rodirov.append(size)
                    data_rodirov = ','.join(data_rodirov)
                    ws.append([articul, data_rodirov])
                except:
                    pass
        except:
            print(f'Страница: {page1} не загружена')
        page1 += 1
        

def main():
    get_data(data, headers, cookies)
    wb.save('Товары.xlsx')
    print(time.time()-start_time)

if __name__ == '__main__':
    main()

Мне нужно как-то добавить в скрипт multiproccesing. Я пытался, но у меня не получалось


Ответы (0 шт):