-
Notifications
You must be signed in to change notification settings - Fork 551
/
tf-31.py
executable file
·91 lines (79 loc) · 2.36 KB
/
tf-31.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
#!/usr/bin/env python
import sys, re, operator, string
#
# Functions for map reduce
#
def partition(data_str, nlines):
"""
Partitions the input data_str (a big string)
into chunks of nlines.
"""
lines = data_str.split('\n')
for i in xrange(0, len(lines), nlines):
yield '\n'.join(lines[i:i+nlines])
def split_words(data_str):
"""
Takes a string, returns a list of pairs (word, 1),
one for each word in the input, so
[(w1, 1), (w2, 1), ..., (wn, 1)]
"""
def _scan(str_data):
pattern = re.compile('[\W_]+')
return pattern.sub(' ', str_data).lower().split()
def _remove_stop_words(word_list):
with open('../stop_words.txt') as f:
stop_words = f.read().split(',')
stop_words.extend(list(string.ascii_lowercase))
return [w for w in word_list if not w in stop_words]
# The actual work of the mapper
result = []
words = _remove_stop_words(_scan(data_str))
for w in words:
result.append((w, 1))
return result
def regroup(pairs_list):
"""
Takes a list of lists of pairs of the form
[[(w1, 1), (w2, 1), ..., (wn, 1)],
[(w1, 1), (w2, 1), ..., (wn, 1)],
...]
and returns a dictionary mapping each unique word to the
corresponding list of pairs, so
{ w1 : [(w1, 1), (w1, 1)...],
w2 : [(w2, 1), (w2, 1)...],
...}
"""
mapping = {}
for pairs in pairs_list:
for p in pairs:
if p[0] in mapping:
mapping[p[0]].append(p)
else:
mapping[p[0]] = [p]
return mapping
def count_words(mapping):
"""
Takes a mapping of the form (word, [(word, 1), (word, 1)...)])
and returns a pair (word, frequency), where frequency is the
sum of all the reported occurrences
"""
def add(x, y):
return x+y
return (mapping[0], reduce(add, (pair[1] for pair in mapping[1])))
#
# Auxiliary functions
#
def read_file(path_to_file):
with open(path_to_file) as f:
data = f.read()
return data
def sort(word_freq):
return sorted(word_freq, key=operator.itemgetter(1), reverse=True)
#
# The main function
#
splits = map(split_words, partition(read_file(sys.argv[1]), 200))
splits_per_word = regroup(splits)
word_freqs = sort(map(count_words, splits_per_word.items()))
for (w, c) in word_freqs[0:25]:
print w, ' - ', c