-
Notifications
You must be signed in to change notification settings - Fork 3
/
crawling_tools.py
99 lines (77 loc) · 3.38 KB
/
crawling_tools.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import os
import argparse
import datetime
from dateutil.relativedelta import relativedelta
from query_crawler import crawl
def crawl_query_by_unit(query, save_dir, begin, end, mode, days=None):
print('\n====== DATA INFO ======')
print('time:', datetime.datetime.now())
print('name:', query)
print('begin:', begin)
print('end:', end)
# get nearest sunday
partial_end = initialize_partial_end(end, mode)
while partial_end >= begin:
partial_begin = update_partial_begin(partial_end, mode, days)
partial_begin_str = partial_begin.strftime('%Y.%m.%d')
partial_end_str = partial_end.strftime('%Y.%m.%d')
print('\nstart crawling: %s from %s to %s' % (query, partial_begin_str, partial_end_str))
# start crawling
os.makedirs(os.path.join(save_dir, query), exist_ok=True)
save_as = os.path.join(save_dir, query, query + '_' + partial_begin_str + '-' + partial_end_str + '.xlsx')
if os.path.exists(save_as):
print('\talready crawled. go to next step')
else:
crawl(query=query,
save_as=save_as,
begin=partial_begin_str,
end=partial_end_str)
partial_end = update_partial_end(partial_end, mode, days)
def initialize_partial_end(end, mode):
if mode == 'weekly':
# nearest sunday after 'end'
return end - datetime.timedelta(days=datetime.datetime.weekday(end)) + datetime.timedelta(days=6)
elif mode == 'monthly':
return end.replace(day=1) + relativedelta(months=1) - datetime.timedelta(days=1)
elif mode == 'interval':
return end
def update_partial_begin(partial_end, mode, days=None):
if mode == 'weekly':
# one last monday
return partial_end - datetime.timedelta(days=6)
elif mode == 'monthly':
return partial_end.replace(day=1)
elif mode == 'interval':
return partial_end - datetime.timedelta(days=days-1)
def update_partial_end(partial_end, mode, days=None):
if mode == 'weekly':
# one last sunday
return partial_end - datetime.timedelta(days=7)
elif mode == 'monthly':
return partial_end.replace(day=1) - datetime.timedelta(days=1)
elif mode == 'interval':
return partial_end - datetime.timedelta(days=days)
def get_arguments():
# Argument configuration
parser = argparse.ArgumentParser()
parser.add_argument('--query', type=str, required=True, help='query to search on NAVER')
parser.add_argument('--begin', type=str, required=True, help='crawling begin point (%%Y.%%m.%%d format)')
parser.add_argument('--end', type=str, required=True, help='crawling end point (%%Y.%%m.%%d format)')
parser.add_argument('--save_dir', type=str, default='test/', help='save directory')
parser.add_argument('--mode', type=str, required=True)
parser.add_argument('--days', type=int, default=7)
return parser.parse_args()
if __name__ == '__main__':
args = get_arguments()
query = args.query
save_dir = args.save_dir
begin = args.begin
end = args.end
mode = args.mode
days = args.days
crawl_query_by_unit(query=query,
save_dir=save_dir,
begin=datetime.datetime.strptime(begin, '%Y.%m.%d'),
end=datetime.datetime.strptime(end, '%Y.%m.%d'),
mode=mode,
days=days)