Skip to content

Commit

Permalink
Merge pull request #4438 from seregaxvm/master
Browse files Browse the repository at this point in the history
edit zsh completion
  • Loading branch information
kmike committed Mar 20, 2020
2 parents 6c74795 + 9ab4532 commit c26308d
Showing 1 changed file with 38 additions and 38 deletions.
76 changes: 38 additions & 38 deletions extras/scrapy_zsh_completion
Expand Up @@ -14,40 +14,40 @@ _scrapy() {
;;
args)
case $words[1] in
bench)
(bench)
_scrapy_glb_opts
;;
fetch)
(fetch)
local options=(
'--headers[print response HTTP headers instead of body]'
'--no-redirect[do not handle HTTP 3xx status codes and print response as-is]'
'--spider[use this spider]:spider:_scrapy_spiders'
'--spider=[use this spider]:spider:_scrapy_spiders'
'1::URL:_httpie_urls'
)
_scrapy_glb_opts $options
;;
genspider)
(genspider)
local options=(
{-l,--list}'[List available templates]'
{-e,--edit}'[Edit spider after creating it]'
{'(--list)-l','(-l)--list'}'[List available templates]'
{'(--edit)-e','(-e)--edit'}'[Edit spider after creating it]'
'--force[If the spider already exists, overwrite it with the template]'
{-d,--dump=}'[Dump template to standard output]:template:(basic crawl csvfeed xmlfeed)'
{-t,--template=}'[Uses a custom template]:template:(basic crawl csvfeed xmlfeed)'
{'(--dump)-d','(-d)--dump='}'[Dump template to standard output]:template:(basic crawl csvfeed xmlfeed)'
{'(--template)-t','(-t)--template='}'[Uses a custom template]:template:(basic crawl csvfeed xmlfeed)'
'1:name:(NAME)'
'2:domain:_httpie_urls'
)
_scrapy_glb_opts $options
;;
runspider)
(runspider)
local options=(
{-o,--output}'[dump scraped items into FILE (use - for stdout)]:file:_files'
{-t,--output-format}'[format to use for dumping items with -o]:format:(FORMAT)'
{'(--output)-o','(-o)--output='}'[dump scraped items into FILE (use - for stdout)]:file:_files'
{'(--output-format)-t','(-t)--output-format='}'[format to use for dumping items with -o]:format:(FORMAT)'
'*-a[set spider argument (may be repeated)]:value pair:(NAME=VALUE)'
'1:spider file:_files -g \*.py'
)
_scrapy_glb_opts $options
;;
settings)
(settings)
local options=(
'--get=[print raw setting value]:option:(SETTING)'
'--getbool=[print setting value, interpreted as a boolean]:option:(SETTING)'
Expand All @@ -57,77 +57,77 @@ _scrapy() {
)
_scrapy_glb_opts $options
;;
shell)
(shell)
local options=(
'-c[evaluate the code in the shell, print the result and exit]:code:(CODE)'
'--no-redirect[do not handle HTTP 3xx status codes and print response as-is]'
'--spider[use this spider]:spider:_scrapy_spiders'
'--spider=[use this spider]:spider:_scrapy_spiders'
'::file:_files -g \*.html'
'::URL:_httpie_urls'
)
_scrapy_glb_opts $options
;;
startproject)
(startproject)
local options=(
'1:name:(NAME)'
'2:dir:_dir_list'
)
_scrapy_glb_opts $options
;;
version)
(version)
local options=(
{-v,--verbose}'[also display twisted/python/platform info (useful for bug reports)]'
{'(--verbose)-v','(-v)--verbose'}'[also display twisted/python/platform info (useful for bug reports)]'
)
_scrapy_glb_opts $options
;;
view)
(view)
local options=(
'--no-redirect[do not handle HTTP 3xx status codes and print response as-is]'
'--spider[use this spider]:spider:_scrapy_spiders'
'--spider=[use this spider]:spider:_scrapy_spiders'
'1:URL:_httpie_urls'
)
_scrapy_glb_opts $options
;;
check)
(check)
local options=(
'(- 1 *)'{-l,--list}'[only list contracts, without checking them]'
{-v,--verbose}'[print contract tests for all spiders]'
{'(--list)-l','(-l)--list'}'[only list contracts, without checking them]'
{'(--verbose)-v','(-v)--verbose'}'[print contract tests for all spiders]'
'1:spider:_scrapy_spiders'
)
_scrapy_glb_opts $options
;;
crawl)
(crawl)
local options=(
{-o,--output}'[dump scraped items into FILE (use - for stdout)]:file:_files'
{-t,--output-format}'[format to use for dumping items with -o]:format:(FORMAT)'
{'(--output)-o','(-o)--output='}'[dump scraped items into FILE (use - for stdout)]:file:_files'
{'(--output-format)-t','(-t)--output-format='}'[format to use for dumping items with -o]:format:(FORMAT)'
'*-a[set spider argument (may be repeated)]:value pair:(NAME=VALUE)'
'1:spider:_scrapy_spiders'
)
_scrapy_glb_opts $options
;;
edit)
(edit)
local options=(
'1:spider:_scrapy_spiders'
'1:spider:_scrapy_spiders'
)
_scrapy_glb_opts $options
;;
list)
(list)
_scrapy_glb_opts
;;
parse)
(parse)
local options=(
'*-a[set spider argument (may be repeated)]:value pair:(NAME=VALUE)'
'--spider[use this spider without looking for one]:spider:_scrapy_spiders'
'--spider=[use this spider without looking for one]:spider:_scrapy_spiders'
'--pipelines[process items through pipelines]'
"--nolinks[don't show links to follow (extracted requests)]"
"--noitems[don't show scraped items]"
'--nocolour[avoid using pygments to colorize the output]'
{-r,--rules}'[use CrawlSpider rules to discover the callback]'
{-c,--callback=}'[use this callback for parsing, instead looking for a callback]:callback:(CALLBACK)'
{-m,--meta=}'[inject extra meta into the Request, it must be a valid raw json string]:meta:(META)'
{'(--rules)-r','(-r)--rules'}'[use CrawlSpider rules to discover the callback]'
{'(--callback)-c','(-c)--callback'}'[use this callback for parsing, instead looking for a callback]:callback:(CALLBACK)'
{'(--meta)-m','(-m)--meta='}'[inject extra meta into the Request, it must be a valid raw json string]:meta:(META)'
'--cbkwargs=[inject extra callback kwargs into the Request, it must be a valid raw json string]:arguments:(CBKWARGS)'
{-d,--depth=}'[maximum depth for parsing requests (default: 1)]:depth:(DEPTH)'
{-v,--verbose}'[print each depth level one by one]'
{'(--depth)-d','(-d)--depth='}'[maximum depth for parsing requests (default: 1)]:depth:(DEPTH)'
{'(--verbose)-v','(-v)--verbose'}'[print each depth level one by one]'
'1:URL:_httpie_urls'
)
_scrapy_glb_opts $options
Expand Down Expand Up @@ -162,7 +162,7 @@ _scrapy_cmds() {
if [[ $(scrapy -h | grep -s "no active project") == "" ]]; then
commands=(${commands[@]} ${project_commands[@]})
fi
_describe -t common-commands 'common commands' commands
_describe -t common-commands 'common commands' commands && ret=0
}

_scrapy_glb_opts() {
Expand All @@ -172,13 +172,13 @@ _scrapy_glb_opts() {
'(--nolog)--logfile=[log file. if omitted stderr will be used]:file:_files'
'--pidfile=[write process ID to FILE]:file:_files'
'--profile=[write python cProfile stats to FILE]:file:_files'
'(--nolog)'{-L,--loglevel=}'[log level (default: INFO)]:log level:(DEBUG INFO WARN ERROR)'
{'(--loglevel --nolog)-L','(-L --nolog)--loglevel='}'[log level (default: INFO)]:log level:(DEBUG INFO WARN ERROR)'
'(-L --loglevel --logfile)--nolog[disable logging completely]'
'--pdb[enable pdb on failure]'
'*'{-s,--set=}'[set/override setting (may be repeated)]:value pair:(NAME=VALUE)'
)
options=(${options[@]} "$@")
_arguments $options
_arguments -A "-*" $options && ret=0
}

_httpie_urls() {
Expand Down

0 comments on commit c26308d

Please sign in to comment.