/
markov-model.py
61 lines (48 loc) · 1.49 KB
/
markov-model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
# Simple n-gram (Markov chain) model for character-based text generation.
#
# Only tested with Python 3.6+
#
# Eli Bendersky (http://eli.thegreenplace.net)
# This code is in the public domain
from __future__ import print_function
from collections import defaultdict, Counter
import random
import sys
# This is the length of the "state" the current character is predicted from.
# For Markov chains with memory, this is the "order" of the chain. For n-grams,
# the n is STATE_LEN+1 since it includes the predicted character as well.
STATE_LEN = 4
def weighted_from_counter(c):
total = sum(c.values())
idx = random.randrange(total)
for elem, count in c.most_common():
idx -= count
if idx < 0:
return elem
def main():
filename = sys.argv[1]
with open(filename, 'r') as f:
data = f.read()
states = defaultdict(Counter)
print('Learning model...')
for i in range(len(data) - STATE_LEN - 1):
state = data[i:i + STATE_LEN]
next = data[i + STATE_LEN]
states[state][next] += 1
print('Model has {0} states'.format(len(states)))
j = 0
for k, v in states.items():
print(k, v)
if j > 9:
break
j += 1
print('Sampling...')
state = random.choice(list(states))
sys.stdout.write(state)
for i in range(200):
nextc = weighted_from_counter(states[state])
sys.stdout.write(nextc)
state = state[1:] + nextc
print()
if __name__ == '__main__':
main()