-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpool2th.py
More file actions
92 lines (80 loc) · 2.94 KB
/
pool2th.py
File metadata and controls
92 lines (80 loc) · 2.94 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
# -*- coding: utf-8 -*-
import sqlite3
import urllib
import urllib3
import sys
from bs4 import BeautifulSoup
import re
from tqdm import tqdm
from arts import arts
from concurrent import futures
import collections
import xls_reader
# фУНКЦИЯ ЧТЕНИЯ ИЗ ФАЙЛОВ
def read_file(filename):
with open(filename) as input_file:
text = input_file.read()
return text
# СКРАППИНГ СТРАНИЦ
def parse_www(uri):
art = urllib.quote_plus(uri[0].encode('cp1251'))
url = url_parse.format(art)
r = http.request('GET', url)
data = r.data.decode('cp1251').encode('utf8')
text_file = open(u"/home/alex/spider/html/{}.html".format(art.encode('utf8')), "w")
text_file.write(data)
text_file.close()
return
# Парстинг HTML соохранение результатов
def parse_html(uri):
art = urllib.quote_plus(uri[0].encode('cp1251'))
text = read_file(u"/home/alex/spider/html/{}.html".format(art))
soup = BeautifulSoup(text, "html.parser")
results = soup.find_all('div', {'class': 'eItemProperties_text'})
price_div = soup.find('div', {'class': 'bSaleColumn'})
price = None
if price_div is not None:
price = price_div.find("span", {"itemprop": "price"})
description = ''
price_str = ''
for res in results:
description = description + res.text
if len(description) > 0 and price is not None:
price_str = price_str + price.text
re_w = re.compile(' ')
price_str = re_w.sub('', price_str)
sqls.append((uri[1], u"{}".format(description), price_str))
# очередь
def task_queue(task, iterator, pool):
counter = collections.Counter()
with pool as executor:
to_do_map = {}
for uri in sorted(iterator):
future = executor.submit(task, uri)
to_do_map[future] = uri
done_iter = futures.as_completed(to_do_map)
done_iter = tqdm(done_iter, total=len(iterator))
for future in done_iter:
counter['status'] += 1
return counter
# шаблон URL
url_parse = u"http://www.ozon.ru/?context=search&text={}"
# соединение с БД
connection = sqlite3.connect('db.sqlite')
cursor = connection.cursor()
sqls = []
data_from_xls = xls_reader.get_arts_from_xls()
# Выборка уникальных значений из вводного массива
arts_unique = data_from_xls
# Прогресс бар для индикации работы
# Пулл потоков
executor = futures.ThreadPoolExecutor(max_workers=len(arts_unique)/20)
# Менеджер коннектов к сайту
http = urllib3.PoolManager(10)
results_www = task_queue(parse_www, arts_unique, executor)
executor = futures.ThreadPoolExecutor(max_workers=len(arts_unique)/20)
results_html = task_queue(parse_html, arts_unique, executor)
xls_reader.put_stat_toxls(sqls)
#cursor.executemany("INSERT INTO html (art, value, price) VALUES (?, ?, ?)", sqls)
#connection.commit()
#connection.close()