Permalink
Browse files

Fix a bunch of long lines.

test/lint.sh has a language-independent function to detect them,
including ASDL.

Also:
- Minor updates to the quick reference.
  • Loading branch information...
Andy Chu
Andy Chu committed Feb 4, 2018
1 parent 2192156 commit f09c2d6a7342b9d2cfe26ec548931479c49268f2
View
@@ -198,6 +198,9 @@ update-src-versions() {
_sed-ext \
"s/oil-[0-9]+.[0-9]+.[a-z0-9]+/oil-$OIL_VERSION/g" INSTALL.txt
_sed-ext \
"s;/release/[0-9]+.[0-9]+.[a-z0-9]+/;/release/$OIL_VERSION/;g" doc/osh-quick-ref-toc.txt
}
"$@"
View
@@ -17,7 +17,8 @@
# 2. lower-case or upper-case topic
# 3. Optional: A SINGLE space, then punctuation
TOPIC_RE = re.compile(r'\b(X[ ])?\@?([a-z_\-]+|[A-Z0-9_]+)([ ]\S+)?', re.VERBOSE)
TOPIC_RE = re.compile(
r'\b(X[ ])?\@?([a-z_\-]+|[A-Z0-9_]+)([ ]\S+)?', re.VERBOSE)
# Sections have alphabetical characters, spaces, and '/' for I/O. They are
# turned into anchors.
View
@@ -24,7 +24,8 @@ def testFlagsAndOptions(self):
s.Option('u', 'nounset')
s.Option(None, 'pipefail')
argv = ['-c', 'echo hi', '-e', '-o', 'nounset', 'foo', '--help'] # don't parse args afterward
# don't parse args afterward
argv = ['-c', 'echo hi', '-e', '-o', 'nounset', 'foo', '--help']
arg, i = s.Parse(argv)
print(arg, argv[i:])
View
@@ -505,8 +505,9 @@ def _GetCompletionType(w_parser, c_parser, ev, status_out):
pass
# TODO: Need to show buf... Need a multiline display for debugging?
status_out.Write(1, 'prev_token %s cur_token %s cur_word %s', prev_token, cur_token,
cur_word)
status_out.Write(1,
'prev_token %s cur_token %s cur_word %s',
prev_token, cur_token, cur_word)
status_out.Write(2, 'comp_state %s error %s', comp_state, c_parser.Error())
# This one can be multiple lines
status_out.Write(3, 'node: %s %s', repr(node) if node else '<Parse Error>',
View
@@ -543,8 +543,8 @@ def Eval(self, node):
s1 = self._EvalCompoundWord(node.left)
# Whehter to glob escape
do_fnmatch = op_id in (
Id.BoolBinary_GlobEqual, Id.BoolBinary_GlobDEqual, Id.BoolBinary_GlobNEqual)
do_fnmatch = op_id in (Id.BoolBinary_GlobEqual, Id.BoolBinary_GlobDEqual,
Id.BoolBinary_GlobNEqual)
s2 = self._EvalCompoundWord(node.right, do_fnmatch=do_fnmatch)
# Now dispatch on arg type
View
@@ -185,7 +185,8 @@ def _AddKinds(spec):
spec.AddKind('Ignored', ['LineCont', 'Space', 'Comment'])
# Id.WS_Space is for lex_mode_e.OUTER; Id.Ignored_Space is for lex_mode_e.ARITH
# Id.WS_Space is for lex_mode_e.OUTER; Id.Ignored_Space is for
# lex_mode_e.ARITH
spec.AddKind('WS', ['Space'])
spec.AddKind('Lit', [
@@ -500,7 +501,8 @@ def _SetupTestBuiltin(id_spec, unary_lookup, binary_lookup, other_lookup):
_AddKinds(ID_SPEC)
_AddBoolKinds(ID_SPEC) # must come second
_SetupTestBuiltin(ID_SPEC, TEST_UNARY_LOOKUP, TEST_BINARY_LOOKUP, TEST_OTHER_LOOKUP)
_SetupTestBuiltin(ID_SPEC, TEST_UNARY_LOOKUP, TEST_BINARY_LOOKUP,
TEST_OTHER_LOOKUP)
# Debug
View
@@ -35,15 +35,17 @@ def GenCppCode(kind_names, id_names, f, id_labels=None, kind_labels=None):
Emit('', f)
Emit('enum class Kind : uint8_t {', f)
if kind_labels:
Emit(', '.join(['%s=%s' % (k, kind_labels[k]) for k in kind_names]) + ',', f, 1)
s = ', '.join(['%s=%s' % (k, kind_labels[k]) for k in kind_names]) + ','
Emit(s, f, 1)
else:
Emit(', '.join(kind_names), f, 1)
Emit('};\n', f)
Emit('enum class Id : uint8_t {', f)
for names_in_kind in id_names:
if id_labels:
Emit(', '.join(['%s=%s' % (i, id_labels[i]) for i in names_in_kind]) + ',', f, 1)
s = ', '.join(['%s=%s' % (i, id_labels[i]) for i in names_in_kind]) + ','
Emit(s, f, 1)
else:
Emit(', '.join(names_in_kind) + ',', f, 1)
Emit('', f)
@@ -64,7 +66,9 @@ def GenCppCode(kind_names, id_names, f, id_labels=None, kind_labels=None):
if id_labels:
for id_name in names_in_kind:
kind_name = id_name.split('_')[0]
test = 'if (LookupKind(Id::%s) != Kind::%s) return 1;' % (id_name, kind_name)
test = (
'if (LookupKind(Id::%s) != Kind::%s) return 1;' %
(id_name, kind_name))
Emit(test, f, 1)
else:
pass
View
@@ -243,7 +243,8 @@ def Split(self, s, allow_escape):
CH_DE_WHITE, CH_DE_GRAY, CH_BLACK, CH_BACKSLASH = range(4)
# Nodes are states
ST_INVALID, ST_START, ST_DE_WHITE1, ST_DE_GRAY, ST_DE_WHITE2, ST_BLACK, ST_BACKSLASH = range(7)
(ST_INVALID, ST_START, ST_DE_WHITE1, ST_DE_GRAY, ST_DE_WHITE2,
ST_BLACK, ST_BACKSLASH) = range(7)
# Actions control what spans to emit.
EMIT_PART, EMIT_DE, EMIT_EMPTY, EMIT_ESCAPE, NO_EMIT = range(5)
@@ -347,8 +348,8 @@ def Split(self, s, allow_escape):
raise AssertionError(
'Invalid transition from %r with %r' % (state, ch))
#from core.util import log
#log('i %d c %r ch %s state %s new_state %s action %s', i, c, ch, state, new_state, action)
#log('i %d c %r ch %s state %s new_state %s action %s',
# i, c, ch, state, new_state, action)
if action == EMIT_PART:
spans.append((span_e.Black, i))
View
@@ -1,8 +1,9 @@
#!/usr/bin/env python
"""
core/runtime.py -- Parse runtime.asdl and dynamically create classes on this module.
core/runtime.py
Similar to osh/ast_.py.
Parse runtime.asdl and dynamically create classes on this module. Similar to
osh/ast_.py.
"""
import sys
View
@@ -140,7 +140,8 @@ def __init__(self, mem):
assert shellopts.tag == value_e.Str, shellopts
self._InitOptionsFromEnv(shellopts.s)
# shopt -s / -u. NOTE: bash uses $BASHOPTS rather than $SHELLOPTS for these.
# shopt -s / -u. NOTE: bash uses $BASHOPTS rather than $SHELLOPTS for
# these.
self.nullglob = False
self.failglob = False
View
@@ -57,7 +57,8 @@ class _WordEvaluator:
def EvalWordToString(self, w, do_fnmatch=False):
# do_fnmatch: for the [[ == ]] semantics which we don't have!
# I think I need another type of node
# Maybe it should be BuiltinEqual and BuiltinDEqual? Parse it into a different tree.
# Maybe it should be BuiltinEqual and BuiltinDEqual? Parse it into a
# different tree.
return runtime.Str(w.s)
View
@@ -47,7 +47,8 @@ def __init__(self, msg, *args, **kwargs):
raise AssertionError('Invalid keyword args %s' % kwargs)
def __repr__(self):
return '<%s %s %r %r %s>' % (self.msg, self.args, self.token, self.word, self.exit_status)
return '<%s %s %r %r %s>' % (
self.msg, self.args, self.token, self.word, self.exit_status)
def __str__(self):
# The default doesn't work very well?
@@ -316,4 +316,21 @@ Bash has this, but OSH won't implement it.
#### <OSH-Options> Options Only in OSH
##### <ENVIRONMENT-VARIABLES> Environment Variables
##### <SPECIAL-VARIABLES> Special Variables
##### <PLUGINS-AND-HOOKS> Plugins and Hooks
##### <OIL-EXTENSINOS> Oil Extensions
##### <OIL-LIBRARIES> Oil Libraries
#### <Builtin-Procs> Builtins Procs
#### <Builtin-Procs> Builtins Funcs
### <strftime> strftime()
Useful for logging callbacks. NOTE: bash has this with the obscure
printf '%(...)' syntax.
@@ -2,8 +2,8 @@ OSH Quick Reference
- Below is a list of topics, organized into [Sections].
- Features not yet implemented have an X prefix.
- View it on the web at https://www.oilshell.org/TODO
- NOTE: Oil features are all unimplemented!
- Oil features are all unimplemented!
- View it on the web at https://www.oilshell.org/release/0.4.0/doc/osh-quick-ref.html
INTRO
[Overview] overview osh-vs-oil command-vs-expr
@@ -159,7 +159,7 @@ X [xargs] each
OIL LIBRARIES
X [Builtin Procs] log die
X [Builtin Funcs] shEvalArith() shEvalWord()
X [Builtin Funcs] shEvalArith() shEvalWord() strftime()
X [getopts] ?
X [Testing] ?
X [Data Formats] json csv tsv2
View
@@ -74,7 +74,8 @@ def AbbreviateNodes(obj, node):
node.unnamed_fields.append(MakeTree(part, AbbreviateNodes))
# Only abbreviate 'foo', not $'foo\n'
elif node.node_type == 'SingleQuotedPart' and obj.left.id == Id.Left_SingleQuote:
elif (node.node_type == 'SingleQuotedPart' and
obj.left.id == Id.Left_SingleQuote):
node.abbrev = True
node.node_type = 'SQ'
View
@@ -262,11 +262,13 @@ def ParseFactor(self):
if not self._Next(): return None
node = self.ParseExpr()
if self.op_id != Id.Op_RParen:
self.AddErrorContext("Expected ), got %s", self.cur_word, word=self.cur_word)
self.AddErrorContext(
'Expected ), got %s', self.cur_word, word=self.cur_word)
return None
if not self._Next(): return None
return node
# TODO: A proper error, e.g. for [[ && ]] or [[ ]]
self.AddErrorContext("Unexpected token: %s" % self.cur_word, word=self.cur_word)
self.AddErrorContext(
'Unexpected token: %s' % self.cur_word, word=self.cur_word)
return None
View
@@ -354,8 +354,8 @@ def _MakeSimpleCommand(self, prefix_bindings, suffix_words, redirects):
# character of the initial word.
# However, this means we must do tilde detection AFTER brace EXPANSION, not
# just after brace DETECTION like we're doing here.
# The BracedWordTree instances have to be expanded into CompoundWord instances
# for the tilde detection to work.
# The BracedWordTree instances have to be expanded into CompoundWord
# instances for the tilde detection to work.
words2 = braces.BraceDetectAll(suffix_words)
words3 = word.TildeDetectAll(words2)
@@ -583,8 +583,8 @@ def ParseSimpleCommand(self):
return None
if prefix_bindings: # FOO=bar local spam=eggs not allowed
# Use the location of the first value. TODO: Use the whole word before
# splitting.
# Use the location of the first value. TODO: Use the whole word
# before splitting.
_, _, v0, _ = prefix_bindings[0]
self.AddErrorContext(
'Invalid prefix bindings in assignment: %s', prefix_bindings,
View
@@ -1214,7 +1214,8 @@ def testCommand(self):
err = _assertParseCommandListError(self, r'ENV1=A ENV2=B local foo=bar')
# This needs more context
err = _assertParseCommandListError(self, 'for ((i=1; i<)); do echo $i; done')
err = _assertParseCommandListError(self,
'for ((i=1; i<)); do echo $i; done')
err = _assertParseCommandListError(self,
'for ((i=1; i<5; ++i)) OOPS echo $i; ERR')
View
@@ -28,7 +28,6 @@ def testLexMode(self):
print lex_mode_e.DQ
CMD = """\
ls /
ls /home/
@@ -38,7 +37,8 @@ class LexerTest(unittest.TestCase):
def assertTokensEqual(self, left, right):
self.assertTrue(
test_lib.TokensEqual(left, right), 'Expected %r, got %r' % (left, right))
test_lib.TokensEqual(left, right),
'Expected %r, got %r' % (left, right))
def testRead(self):
lexer = _InitLexer(CMD)
View
@@ -89,7 +89,7 @@ module osh
| BracedIntRangePart(int start, int end, braced_step? step)
-- {a..f} or {a..f..2} or {a..f..-2}
| BracedCharRangePart(string start, string end, braced_step? step)
-- extended globs are parsed statically, unlike globs
-- extended globs are parsed statically, unlike globs
| ExtGlobPart(token op, word* arms)
word =
View
@@ -945,8 +945,8 @@ def _ReadArrayLiteralPart(self):
return ast.ArrayLiteralPart(words3)
def _ReadCompoundWord(self, eof_type=Id.Undefined_Tok, lex_mode=lex_mode_e.OUTER,
empty_ok=True):
def _ReadCompoundWord(self, eof_type=Id.Undefined_Tok,
lex_mode=lex_mode_e.OUTER, empty_ok=True):
"""
Precondition: Looking at the first token of the first word part
Postcondition: Looking at the token after, e.g. space or operator
@@ -1144,8 +1144,8 @@ def _ReadWord(self, lex_mode):
Kind.VSub, Kind.Lit, Kind.Left, Kind.KW, Kind.Assign, Kind.ControlFlow,
Kind.BoolUnary, Kind.BoolBinary, Kind.ExtGlob):
# We're beginning a word. If we see Id.Lit_Pound, change to
# lex_mode_e.COMMENT and read until end of line. (TODO: How to add comments
# to AST?)
# lex_mode_e.COMMENT and read until end of line. (TODO: How to add
# comments to AST?)
# TODO: Can we do the same thing for Tilde here? Enter a state where we
# look for / too.
@@ -1155,7 +1155,8 @@ def _ReadWord(self, lex_mode):
# NOTE: The # could be the last character in the file. It can't be
# Eof_{RParen,Backtick} because #) and #` are comments.
assert self.token_type in (Id.Ignored_Comment, Id.Eof_Real), self.cur_token
assert self.token_type in (Id.Ignored_Comment, Id.Eof_Real), \
self.cur_token
# The next iteration will go into Kind.Ignored and set lex state to
# lex_mode_e.OUTER/etc.
View
@@ -59,17 +59,30 @@ bin-pep8() {
}
# Language independent
find-tabs() {
find-src() {
# benchmarks/testdata should be excluded
find . '(' -name _tmp \
-o -name _chroot \
-o -name _deps \
-o -name _devbuild \
-o -name testdata \
-o -name $PY27 \
')' \
-a -prune -o \
'(' -name '*.py' -o -name '*.sh' ')' -a -print |
xargs grep -n $'\t'
')' -a -prune \
-o \
'(' -name '*.py' \
-o -name '*.sh' \
-o -name '*.asdl' \
-o -name '*.[ch]' \
')' -a -print
}
find-tabs() {
find-src | xargs grep -n $'\t'
}
find-long-lines() {
# Exclude URLs
find-src | xargs grep -n '^.\{81\}' | grep -v 'http'
}
bin-flake8() {
View
@@ -1084,7 +1084,8 @@ def DoWordPart(self, node, local_symbols, quoted=False):
self.cursor.SkipUntil(spid + 1)
elif op_id == Id.VSub_Star: # $*
self.f.write('$ifsjoin(Argv)') # PEDANTIC: Depends if quoted or unquoted
# PEDANTIC: Depends if quoted or unquoted
self.f.write('$ifsjoin(Argv)')
self.cursor.SkipUntil(spid + 1)
elif op_id == Id.VSub_Hyphen: # $*

0 comments on commit f09c2d6

Please sign in to comment.