View

Large diffs are not rendered by default.

Oops, something went wrong.
View
@@ -64,9 +64,16 @@ Builtin has too many arguments -- but this falls under the errexit rule
Although we might want to highlight the extra args.
Error from stat() system call:
Fatal error from system calls:
fork() could fail in theory
Some are not failures:
stat() [[ -f /tmp/foo ]]
cd /ff chdir() # exit code 1
cat <nonexistent # This is just exit code 1
[[ -f /tmp/foo ]]
Redirects:
Redirect to empty filename/descriptor ( or array)
@@ -75,6 +82,15 @@ Redirects:
^~~~~~ break only invalid inside loop, etc.
NotImplementedError
- e.g for var ref ${!a}
- bash associative arrays? I think we want most of that
- $"" ?
- |& not yet done
- ;;& for case -- although parsing it is all of the work I guess
- some could be parse time errors too though?
Runtime: Stack Too Deep (catch infinite recursion)
Out of memory: should not happen with OSH, but maybe with Oil
@@ -84,3 +100,21 @@ Runtime Parse Errors
The way bash works 0x$var can be a hex literal.
so var=xx makes this invalid. hex/octal/decimal have this problem.
Parse Time Errors
-----------------
regcomp() errors.
Need to show stack trace for "source" like Python. Prototype this.
Also might show which token thing caused you to be in arith parse state, like:
$((echo hi))
^~ ^~
Arith Invalid token
View
@@ -753,6 +753,15 @@ def Options():
def main(argv):
# First check if bash is polluting the environment. Tests rely on the
# environment.
v = os.getenv('RANDOM')
if v is not None:
raise AssertionError('got $RANDOM = %s' % v)
v = os.getenv('PPID')
if v is not None:
raise AssertionError('got $PPID = %s' % v)
o = Options()
(opts, argv) = o.parse_args(argv)
View
25 spec.sh
@@ -158,9 +158,8 @@ bugs() {
sh-spec tests/bugs.test.sh ${REF_SHELLS[@]} $OSH "$@"
}
# Regress bugs
blog1() {
sh-spec tests/blog1.test.sh --osh-failures-allowed 6 \
sh-spec tests/blog1.test.sh --osh-failures-allowed 4 \
${REF_SHELLS[@]} $ZSH $OSH "$@"
}
@@ -169,12 +168,12 @@ comments() {
}
word-split() {
sh-spec tests/word-split.test.sh \
sh-spec tests/word-split.test.sh --osh-failures-allowed 1 \
${REF_SHELLS[@]} $OSH "$@"
}
word-eval() {
sh-spec tests/word-eval.test.sh --osh-failures-allowed 2 \
sh-spec tests/word-eval.test.sh \
${REF_SHELLS[@]} $OSH "$@"
}
@@ -250,7 +249,7 @@ command-sub() {
}
pipeline() {
sh-spec tests/pipeline.test.sh --osh-failures-allowed 1 \
sh-spec tests/pipeline.test.sh --osh-failures-allowed 2 \
${REF_SHELLS[@]} $ZSH $OSH "$@"
}
@@ -281,6 +280,11 @@ posix() {
${REF_SHELLS[@]} $OSH "$@"
}
special-vars() {
sh-spec tests/special-vars.test.sh --osh-failures-allowed 8 \
${REF_SHELLS[@]} $OSH "$@"
}
# DONE -- pysh is the most conformant!
tilde() {
sh-spec tests/tilde.test.sh ${REF_SHELLS[@]} $OSH "$@"
@@ -297,14 +301,14 @@ var-op-other() {
}
var-op-strip() {
sh-spec tests/var-op-strip.test.sh --osh-failures-allowed 4 \
sh-spec tests/var-op-strip.test.sh --osh-failures-allowed 5 \
${REF_SHELLS[@]} $OSH "$@"
}
var-sub() {
# NOTE: ZSH has interesting behavior, like echo hi > "$@" can write to TWO
# FILES! But ultimately we don't really care, so I disabled it.
sh-spec tests/var-sub.test.sh --osh-failures-allowed 4 \
sh-spec tests/var-sub.test.sh --osh-failures-allowed 2 \
${REF_SHELLS[@]} $OSH "$@"
}
@@ -335,7 +339,12 @@ arith-context() {
}
array() {
sh-spec tests/array.test.sh --osh-failures-allowed 20 \
sh-spec tests/array.test.sh --osh-failures-allowed 14 \
$BASH $MKSH $OSH "$@"
}
array-compat() {
sh-spec tests/array-compat.test.sh --osh-failures-allowed 3 \
$BASH $MKSH $OSH "$@"
}
View
@@ -0,0 +1,26 @@
#!/bin/bash
#
# Arrays decay upon assignment (without splicing) and equality. This will not
# be true in Oil -- arrays will be first class.
### Assignment Causes Array Decay
set -- x y z
#argv "[$@]" # NOT DECAYED here.
var="[$@]"
argv "$var"
# stdout: ['[x y z]']
### User arrays decay
declare -a a b
a=(x y z)
b="${a[@]}" # this collapses to a string
c=("${a[@]}") # this preserves the array
c[1]=YYY # mutate a copy -- doesn't affect the original
argv.py "${a[@]}" "${b[@]}" "${c[@]}"
# stdout: ['x', 'y', 'z', 'x y z', 'x', 'YYY', 'z']
### $a gives first element of array
a=(1 '2 3')
echo $a
# stdout: 1
View
@@ -5,11 +5,6 @@
### SETUP
a=(1 '2 3')
### $a gives first element of array
a=(1 '2 3')
echo $a
# stdout: 1
### "${a[@]}" and "${a[*]}"
a=(1 '2 3')
argv.py "${a[@]}" "${a[*]}"
@@ -135,15 +130,6 @@ a=(0 "${a[@]}" '4 5')
argv.py "${a[@]}"
# stdout: ['0', '1', '2 3', '4 5']
### Arrays can't be copied directly
declare -a a b
a=(x y z)
b="${a[@]}" # this collapses to a string
c=("${a[@]}") # this preserves the array
c[1]=YYY # mutate a copy -- doesn't affect the original
argv.py "${a[@]}" "${b[@]}" "${c[@]}"
# stdout: ['x', 'y', 'z', 'x y z', 'x', 'YYY', 'z']
### Exporting array doesn't do anything, not even first element
# bash parses, but doesn't execute.
# mksh gives syntax error -- parses differently with 'export'
@@ -250,12 +236,13 @@ argv.py ${empty[@]:-not one} "${empty[@]:-not one}"
# stdout: ['not', 'one', 'not one']
### Single array with :-
# bash does EMPTY ELISION here, unless it's double quoted. Looks like mksh has
# the sane behavior.
# bash does EMPTY ELISION here, unless it's double quoted. mksh has
# more sane behavior. OSH is better.
single=('')
argv.py ${single[@]:-none} "${single[@]:-none}"
# stdout: ['none', '']
# OK mksh stdout: ['none', 'none']
argv.py ${single[@]:-none} x "${single[@]:-none}"
# OK osh stdout: ['x', '']
# OK bash stdout: ['none', 'x', '']
# OK mksh stdout: ['none', 'x', 'none']
### Stripping a whole array unquoted
# Problem: it joins it first.
@@ -301,9 +288,8 @@ echo ${s[@]}
set -- 'a b' 'c'
array1=('x y' 'z')
array2=("$@")
array3="$@" # Without splicing with (), this one is flattened
argv.py "${array1[@]}" "${array2[@]}" "${array3[@]}"
# stdout: ['x y', 'z', 'a b', 'c', 'a b c']
argv.py "${array1[@]}" "${array2[@]}"
# stdout: ['x y', 'z', 'a b', 'c']
### Tilde expansion within array
HOME=/home/bob
View
@@ -116,10 +116,14 @@ echo _tmp/*.[[:punct:]] _tmp/*.[[:punct\:]]
### Redirect to glob, not evaluated
# This writes to *.F, not foo.F
rm _tmp/*.F
touch _tmp/f.F
echo foo > _tmp/*.F
cat '_tmp/*.F'
# status: 0
# stdout: foo
# BUG bash status: 1
# BUG bash stdout-json: ""
### Glob after var manipulation
touch _tmp/foo.zzz _tmp/bar.zzz
View
@@ -2,10 +2,21 @@
#
# Tests for pipelines.
### Basic
### Brace group in pipeline
{ echo one; echo two; } | tac
# stdout-json: "two\none\n"
### For loop in pipeline
for w in one two; do
echo $w
done | tac
# stdout-json: "two\none\n"
### Exit code is last status
expr $0 : '.*/osh$' && exit 99 # Disabled because of spec-runner.sh issue
echo a | egrep '[0-9]+'
# status: 1
### |&
stdout_stderr.py |& cat
# stdout-json: "STDERR\nSTDOUT\n"
View
@@ -102,7 +102,7 @@ do
echo $x
x=$(($x-1))
done
# stdout-json "3\n2\n1\n"
# stdout-json: "3\n2\n1\n"
### Newlines in compound lists
x=3
View
@@ -0,0 +1,69 @@
#!/bin/bash
### $?
echo $? # starts out as 0
sh -c 'exit 33'
echo $?
# stdout-json: "0\n33\n"
# status: 0
### $#
set -- 1 2 3 4
echo $#
# stdout: 4
# status: 0
### $-
# dash's behavior seems most sensible here?
$SH -o nounset -c 'echo $-'
# OK bash stdout: huBc
# OK dash stdout: u
# OK mksh stdout: uhc
# status: 0
### $_
# This is bash-specific.
echo hi
echo $_
# stdout-json: "hi\nhi\n"
# N-I dash/mksh stdout-json: "hi\n\n"
### PID $$
# Just test that it has decimal digits
echo $$ | egrep '[0-9]+'
# status: 0
### Background PID $!
# Just test that it has decimal digits
sleep 0.01 &
echo $! | egrep '[0-9]+'
wait
# status: 0
### $PPID
expr $0 : '.*/osh$' && exit 99 # Disabled because of spec-runner.sh issue
echo $PPID | egrep '[0-9]+'
# Disabled because of spec-runner.sh issue: bash sets it for osh
# status: 0
# NOTE: There is also $BASHPID
### $PIPESTATUS
echo hi | sh -c 'cat; exit 33' | wc -l >/dev/null
argv.py "${PIPESTATUS[@]}"
# status: 0
# stdout: ['0', '33', '0']
# N-I dash stdout-json: ""
# N-I dash status: 2
### $PWD
cd /
echo $PWD
# status: 0
# stdout: /
### $RANDOM
expr $0 : '.*/osh$' && exit 99 # Disabled because of spec-runner.sh issue
echo $RANDOM | egrep '[0-9]+'
# status: 0
# N-I dash status: 1
View
@@ -7,6 +7,16 @@ v=foo
echo ${#v}
# stdout: 3
### Length of undefined variable
echo ${#undef}
# stdout: 0
### Length of undefined variable with nounset
set -o nounset
echo ${#undef}
# status: 1
# OK dash status: 2
### Cannot take length of substring slice
# These are runtime errors, but we could make them parse time errors.
v=abcde
View
@@ -1,21 +1,42 @@
#!/bin/bash
### Remove smallest suffix
### Remove const suffix
v=abcd
echo ${v%d} ${v%%cd}
# stdout: abc ab
### Remove const prefix
v=abcd
echo ${v#a} ${v##ab}
# stdout: bcd cd
### Remove vectorized const suffix
set -- 1a 2a 3a
argv.py ${@%a}
# stdout: ['1', '2', '3']
# N-I dash stdout: ['1a', '2a', '3']
# N-I mksh stdout-json: ""
### Remove const suffix from undefined
echo ${undef%suffix}
# stdout:
### Remove smallest glob suffix
v=aabbccdd
echo ${v%c*}
# stdout: aabbc
### Remove longest suffix
### Remove longest glob suffix
v=aabbccdd
echo ${v%%c*}
# stdout: aabb
### Remove smallest prefix
### Remove smallest glob prefix
v=aabbccdd
echo ${v#*b}
# stdout: bccdd
### Remove longest prefix
### Remove longest glob prefix
v=aabbccdd
echo ${v##*b}
# stdout: ccdd
View
@@ -13,41 +13,6 @@ echo ${foo:-$({ which ls; })}
# BUG bash stdout-json: ""
# BUG bash status: 2
### Assigning $@ to var
# dash doesn't like this -- says '2' bad variable name.
# NOTE: bash and mksh support array variables! This is probably the
# difference. Need to test array semantics!
func() {
local v=$@
argv.py $v
}
func 1 2 3
# stdout: ['1', '2', '3']
# BUG dash status: 2
# BUG dash stdout-json: ""
### Assigning "$@" to var
# dash doesn't like this -- says '2 3' bad variable name.
func() {
local v="$@"
argv.py $v
}
func 1 '2 3'
# stdout: ['1', '2', '3']
# BUG dash status: 2
# BUG dash stdout-json: ""
### Assigning "$@" to var, then showing it quoted
# dash doesn't like this -- says '2 3' bad variable name.
func() {
local v="$@"
argv.py "$v"
}
func 1 '2 3'
# stdout: ['1 2 3']
# BUG dash status: 2
# BUG dash stdout-json: ""
### Filename redirect with "$@"
# bash - ambiguous redirect -- yeah I want this error
# - But I want it at PARSE time? So is there a special DollarAtPart?
View
@@ -46,18 +46,6 @@ s1=''
argv.py $s1 - "$s1"
# stdout: ['-', '']
### Word elision with space
s1=' '
argv.py $s1
# stdout: []
### Word elision with non-whitespace IFS
# Treated differently than the default IFS. What is the rule here?
IFS=_
s1='_'
argv.py $s1
# stdout: ['']
### Default values -- more cases
argv ${undef:-hi} ${undef:-'a b'} "${undef:-c d}" "${un:-"e f"}" "${un:-'g h'}"
# stdout: ['hi', 'a b', 'c d', 'e f', "'g h'"]
@@ -73,10 +61,3 @@ touch '_tmp/[bc]ar.mm' # file that looks like a glob pattern
touch _tmp/bar.mm _tmp/car.mm
argv '_tmp/[bc]'*.mm - _tmp/?ar.mm
# stdout: ['_tmp/[bc]ar.mm', '-', '_tmp/bar.mm', '_tmp/car.mm']
### Assignment Causes Array Decay
set -- x y z
#argv "[$@]" # NOT DECAYED here.
var="[$@]"
argv "$var"
# stdout: ['[x y z]']
View
@@ -55,6 +55,18 @@ func() { argv.py "-$@-"; }
func "a 1" "b 2" "c 3"
# stdout: ['-a 1', 'b 2', 'c 3-']
### Word elision with space
s1=' '
argv.py $s1
# stdout: []
### Word elision with non-whitespace IFS
# Treated differently than the default IFS. What is the rule here?
IFS=_
s1='_'
argv.py $s1
# stdout: ['']
### empty $@ and $* is elided
func() { argv.py 1 $@ $* 2; }
func