forked from ikreymer/webarchive-indexing
-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathbuild_local_zipnum.py
103 lines (73 loc) · 2.96 KB
/
build_local_zipnum.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
import glob
import logging
import os
import sys
from argparse import ArgumentParser
from mrjob.job import MRJob
from zipnumclusterjob import ZipNumClusterJob
log = logging.getLogger(__name__)
#=============================================================================
def run_job(input_path, output_dir, shards, parallel, lines=None):
args = ['--no-output', '--output-dir', output_dir, '-r']
if parallel:
args.append('local')
else:
args.append('inline')
args.append('--shards=' + str(shards))
if lines:
args.append('--numlines=' + str(lines))
if isinstance(input_path, list):
args.extend(input_path)
else:
args.append(input_path)
output_dir = os.path.abspath(output_dir)
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
os.environ['mapreduce_output_fileoutputformat_outputdir'] = output_dir
job = ZipNumClusterJob(args)
with job.make_runner() as runner:
runner.run()
def build_summary_and_loc(output_dir):
# Write summary file
full = os.path.join(output_dir, 'part-*')
inputs = sorted(glob.glob(full))
summary_file = os.path.join(output_dir, 'cluster.summary')
print('Building Summary File: ' + summary_file)
count = 1
with open(summary_file, 'w+b') as fh:
for filein in inputs:
with open(filein, 'r+b') as partfh:
for line in partfh:
line = line.rstrip()
line += b'\t' + str(count).encode('utf-8')
fh.write(line + b'\n')
count += 1
# Write loc file
full = os.path.join(output_dir, 'cdx-*')
inputs = sorted(glob.glob(full))
loc_file = os.path.join(output_dir, 'cluster.loc')
print('Building Loc File: ' + loc_file)
with open(loc_file, 'w+b') as fh:
for filename in inputs:
fh.write((os.path.basename(filename) + '\t' + filename + '\n').encode('utf-8'))
def main():
parser = ArgumentParser()
parser.add_argument('output', help='ZipNum Cluster Output directory')
parser.add_argument('inputs', nargs='+', help='CDX Input glob eg: /cdx/*.cdx.gz')
parser.add_argument('-s', '--shards', default=10, type=int,
help='Number of ZipNum Cluster shards to create')
parser.add_argument('-l', '--numlines', default=3000, type=int,
help='Number of lines per gzip block (default 3000)')
parser.add_argument('-p', '--parallel', action='store_true',
help='Run in parallel (multiple maps/reducer processes)')
r = parser.parse_args()
MRJob.set_up_logging(quiet=False,
verbose=False,
stream=sys.stderr)
log.setLevel(logging.INFO)
compat_log = logging.getLogger('mrjob.compat')
compat_log.setLevel(logging.ERROR)
run_job(r.inputs, r.output, r.shards, r.parallel, r.numlines)
build_summary_and_loc(r.output)
if __name__ == "__main__":
main()