Skip to content

Commit

Permalink
Fix minor bugs in search prompts
Browse files Browse the repository at this point in the history
  • Loading branch information
dipu-bd committed Dec 12, 2018
1 parent f350dd8 commit 08fa202
Show file tree
Hide file tree
Showing 5 changed files with 21 additions and 17 deletions.
12 changes: 7 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ Crawls light novels and make html, text, epub and mobi

### Easy download

📦 [lightnovel-crawler v2.5.10 for windows ~ 11MB](https://goo.gl/sc4EZh)
📦 [lightnovel-crawler v2.6.1 for windows ~ 11MB](https://goo.gl/sc4EZh)

### Installation

Expand Down Expand Up @@ -46,18 +46,20 @@ To view list of available options:
```bash
$ lncrawl -h
================================================================
📒 Lightnovel Crawler 🍀 2.5.0
📒 Lightnovel Crawler 🍀 2.6.1
Download lightnovels into html, text, epub, mobi and json
----------------------------------------------------------------
usage: lncrawl [options...]
lightnovel-crawler [options...]

optional arguments:
-h, --help show this help message and exit
-l Set log levels (1 = warn, 2 = info, 3 = debug)
-v, --version show program's version number and exit
-l, --log Set log levels (1 = warn, 2 = info, 3 = debug)
-s NOVEL_PAGE, --source NOVEL_PAGE
Profile page url of the novel
-q QUERY, --query QUERY
Novel query followed by list of source sites.
-f, --force Force replace any existing folder
-b, --byvol Build separate books by volumes
--login USER PASSWD User name/email address and password for login
Expand All @@ -74,14 +76,14 @@ optional arguments:
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
```
> There is a verbose mode for extended logging: `lncrawl -lll`
> To view extra logs, use: `lncrawl -lll`
### Adding new source
- Use the [`_sample.py`](https://github.com/dipu-bd/lightnovel-crawler/blob/master/lightnovel_crawler/_sample.py) as blueprint.
- Add your crawler to [`__init__.py`](https://github.com/dipu-bd/lightnovel-crawler/blob/master/lightnovel_crawler/__init__.py).
## Available websites
## Supported websites
The list of crawable websites are given below. *Request new site by [creating a new issue](https://github.com/dipu-bd/lightnovel-crawler/issues)*.
Expand Down
2 changes: 1 addition & 1 deletion README.pip
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,6 @@ List of supported sites are given below.
- https://www.romanticlovebooks.com
- https://webnovel.online

To request new site create an issue here: https://github.com/dipu-bd/site-to-epub/issues.
To request new site create an issue here: https://github.com/dipu-bd/lightnovel-crawler/issues.

For usage instructions visit: https://github.com/dipu-bd/lightnovel-crawler
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2.6.0
2.6.1
4 changes: 4 additions & 0 deletions lightnovel_crawler/app/program.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ def get_crawler_instance(self, choice_list):
instance.home_url = link.strip('/')
results = instance.search_novel(novel)
search_results += results
logger.debug(results)
logger.info('%d results found', len(results))
except Exception as ex:
logger.debug(ex)
Expand All @@ -100,6 +101,9 @@ def get_crawler_instance(self, choice_list):
novel = choose_a_novel(search_results)
# end if

if not novel:
raise Exception('Novel URL was not specified')
# end if
for home_url, crawler in choice_list.items():
if novel.startswith(home_url):
self.crawler = crawler()
Expand Down
18 changes: 8 additions & 10 deletions lightnovel_crawler/app/prompts.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,28 +42,26 @@ def get_crawlers_to_search(links):
'name': 'sites',
'message': 'Where to search?',
'choices': [ { 'name': x } for x in links ],
'validate': lambda ans: True if len(ans) > 0 \
else 'You must choose at least one site.'
}
])
return answer['sites'] if len(answer['sites']) else links
# end def

def choose_a_novel(search_results):
if len(search_results) == 1:
if len(search_results) == 0:
return ''
elif len(search_results) == 1:
return search_results[0][1]
# end if
answer = prompt([
{
'type': 'list' if Icons.hasSupport else 'rawlist',
'type': 'list',
'name': 'novel_url',
'message': 'Where to search?',
'choices': [
{ 'name': '%s (%s)' % x }
{ 'name': '%s (%s)' % (x[0], x[1]) }
for x in sorted(search_results)
],
'validate': lambda ans: True if len(ans) > 0 \
else 'You must choose at least one site.'
}
])
selected = answer['novel_url']
Expand Down Expand Up @@ -152,7 +150,7 @@ def download_selection(chapter_count, volume_count):

answer = prompt([
{
'type': 'list' if Icons.hasSupport else 'rawlist',
'type': 'list',
'name': 'choice',
'message': 'Which chapters to download?',
'choices': choices,
Expand All @@ -164,7 +162,7 @@ def download_selection(chapter_count, volume_count):


def range_using_urls(crawler):
start_url, stop_url = get_args().page
start_url, stop_url = get_args().page or (None, None)

if not (start_url and stop_url):
def validator(val):
Expand Down Expand Up @@ -201,7 +199,7 @@ def validator(val):


def range_using_index(chapter_count):
start, stop = get_args().range
start, stop = get_args().range or (None, None)

if not (start and stop):
def validator(val):
Expand Down

0 comments on commit 08fa202

Please sign in to comment.