Skip to content

Commit

Permalink
update to 0.0.9: Reduce memory allocation
Browse files Browse the repository at this point in the history
  • Loading branch information
taishi-i committed Jun 27, 2018
1 parent 2053ec7 commit 29c8b20
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 7 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ print(words.words)
print(words.postags)
#=> ['名詞', '助詞', '形状詞', '助動詞', '動詞', '名詞', '助動詞']

# Nagisa gives you a simple word segmentation method.
# The nagisa.wakati method is faster than the nagisa.tagging method.
words = nagisa.wakati(text)
print(words)
#=> ['Python', 'で', '簡単', 'に', '使える', 'ツール', 'です']
Expand Down
2 changes: 1 addition & 1 deletion nagisa/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import numpy as np
import dynet_config
dynet_config.set(random_seed=1234)
dynet_config.set(mem=32, random_seed=1234)
import dynet as dy

class Model(object):
Expand Down
17 changes: 12 additions & 5 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,14 @@
from setuptools import setup
from setuptools.extension import Extension

with open('README.md') as f:
long_description = f.read()

try:
from pypandoc import convert
long_description = convert('README.md', 'rst')
except ImportError:
with open('README.md') as f:
long_description = f.read()


classifiers = [
'License :: OSI Approved :: MIT License',
Expand All @@ -20,6 +26,7 @@
'Topic :: Software Development :: Libraries :: Python Modules'
]


class defer_cythonize(list):
def __init__(self, callback):
self._list, self.callback = None, callback
Expand Down Expand Up @@ -52,11 +59,11 @@ def extensions():
packages=['nagisa'],
author = 'Taishi Ikeda',
author_email = 'taishi.ikeda.0323@gmail.com',
version = '0.0.8',
description = 'Japanese word segmentation/POS tagging tool based on neural networks',
version = '0.0.9',
description = 'A Japanese tokenizer based on recurrent neural networks',
long_description = long_description,
url = 'https://github.com/taishi-i/nagisa',
download_url = 'https://github.com/taishi-i/nagisa/archive/0.0.8.tar.gz',
download_url = 'https://github.com/taishi-i/nagisa/archive/0.0.9.tar.gz',
license = 'MIT License',
platforms = 'Unix',
setup_requires=['six', 'cython', 'numpy',],
Expand Down

0 comments on commit 29c8b20

Please sign in to comment.