Skip to content

Commit

Permalink
added Action.Split
Browse files Browse the repository at this point in the history
  • Loading branch information
rsteube committed Jul 26, 2023
1 parent 22fc1c6 commit 63849a3
Show file tree
Hide file tree
Showing 8 changed files with 920 additions and 0 deletions.
30 changes: 30 additions & 0 deletions action.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (

"github.com/rsteube/carapace/internal/cache"
"github.com/rsteube/carapace/internal/common"
"github.com/rsteube/carapace/internal/lexer"
pkgcache "github.com/rsteube/carapace/pkg/cache"
"github.com/rsteube/carapace/pkg/style"
)
Expand Down Expand Up @@ -294,3 +295,32 @@ func (a Action) UsageF(f func() string) Action {
return a
})
}

// Split splits `Context.Value` using a shell lexer.
func (a Action) Split() Action {
return ActionCallback(func(c Context) Action {
tokenset, err := lexer.Split(c.Value)
if err != nil {
return ActionMessage(err.Error())
}

c.Args = tokenset.Tokens[:len(tokenset.Tokens)-1]
c.Parts = []string{}
c.Value = tokenset.Tokens[len(tokenset.Tokens)-1]
invoked := a.Invoke(c)
for index, value := range invoked.rawValues {
if !invoked.meta.Nospace.Matches(value.Value) {
switch tokenset.State {
case lexer.OPEN_DOUBLE:
invoked.rawValues[index].Value = fmt.Sprintf(`"%v" `, strings.Replace(value.Value, `"`, `\"`, -1))
case lexer.OPEN_SINGLE:
invoked.rawValues[index].Value = fmt.Sprintf(`'%v' `, strings.Replace(value.Value, `'`, `'"'"'`, -1))
default:
invoked.rawValues[index].Value = strings.Replace(value.Value, ` `, `\ `, -1) + ` `
}
}
}
invoked.Prefix(tokenset.Prefix)
return invoked.ToA().NoSpace()
})
}
9 changes: 9 additions & 0 deletions example/cmd/modifier.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ func init() {
modifierCmd.Flags().String("prefix", "", "Prefix()")
modifierCmd.Flags().String("retain", "", "Retain()")
modifierCmd.Flags().String("shift", "", "Shift()")
modifierCmd.Flags().String("split", "", "Split()")
modifierCmd.Flags().String("style", "", "Style()")
modifierCmd.Flags().String("stylef", "", "StyleF()")
modifierCmd.Flags().String("styler", "", "StyleR()")
Expand Down Expand Up @@ -123,6 +124,14 @@ func init() {
"shift": carapace.ActionCallback(func(c carapace.Context) carapace.Action {
return carapace.ActionMessage("%#v", c.Args)
}).Shift(1),
"split": carapace.ActionCallback(func(c carapace.Context) carapace.Action {
args := []string{"_carapace", "export", ""}
args = append(args, c.Args...)
args = append(args, c.Value)
return carapace.ActionExecCommand("example", args...)(func(output []byte) carapace.Action {
return carapace.ActionImport(output)
})
}).Split(),
"style": carapace.ActionValues(
"one",
"two",
Expand Down
62 changes: 62 additions & 0 deletions internal/lexer/lexer.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
package lexer

import (
"strings"

"github.com/rsteube/carapace/third_party/github.com/google/shlex"
)

type State int

const (
UNQUOTED = iota
OPEN_DOUBLE
OPEN_SINGLE
)

type Tokenset struct {
Tokens []string
Prefix string
State State
}

func Split(s string) (*Tokenset, error) {
tokenset, err := split(s)
if err != nil && err.Error() == "EOF found when expecting closing quote" {
tokenset, err = split(s + `_"`)
if err == nil {
last := tokenset.Tokens[len(tokenset.Tokens)-1]
tokenset.Tokens[len(tokenset.Tokens)-1] = last[:len(last)-1]
tokenset.Prefix = tokenset.Prefix[:len(tokenset.Prefix)-1]
tokenset.State = OPEN_DOUBLE
}
}
if err != nil && err.Error() == "EOF found when expecting closing quote" {
tokenset, err = split(s + `_'`)
if err == nil {
last := tokenset.Tokens[len(tokenset.Tokens)-1]
tokenset.Tokens[len(tokenset.Tokens)-1] = last[:len(last)-1]
tokenset.Prefix = tokenset.Prefix[:len(tokenset.Prefix)-1]
tokenset.State = OPEN_SINGLE
}
}
return tokenset, err
}

func split(s string) (*Tokenset, error) {
splitted, err := shlex.Split(s)
if strings.HasSuffix(s, " ") {
splitted = append(splitted, "")
}
if err != nil {
return nil, err
}

if len(splitted) == 0 {
splitted = []string{""}
}
return &Tokenset{
Tokens: splitted,
Prefix: s[:strings.LastIndex(s, splitted[len(splitted)-1])],
}, nil
}
99 changes: 99 additions & 0 deletions internal/lexer/lexer_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
package lexer

import (
"encoding/json"
"testing"

"github.com/rsteube/carapace/internal/assert"
)

func TestSplit(t *testing.T) {
_test := func(s string, expected Tokenset) {
t.Run(s, func(t *testing.T) {
tokenset, err := Split(s)
if err != nil {
t.Error(err.Error())
}

expected, _ := json.MarshalIndent(expected, "", " ")
actual, _ := json.MarshalIndent(tokenset, "", " ")
assert.Equal(t, string(expected), string(actual))
})
}

_test(``, Tokenset{
Tokens: []string{""},
})

_test(` `, Tokenset{
Tokens: []string{""},
Prefix: ` `,
})

_test(`"example`, Tokenset{
Tokens: []string{"example"},
State: OPEN_DOUBLE,
})

_test(`'example`, Tokenset{
Tokens: []string{"example"},
State: OPEN_SINGLE,
})

_test(`example a`, Tokenset{
Tokens: []string{"example", "a"},
Prefix: `example `,
})

_test(`example a`, Tokenset{
Tokens: []string{"example", "a"},
Prefix: `example `,
})

_test(`example "a`, Tokenset{
Tokens: []string{"example", "a"},
Prefix: `example `,
State: OPEN_DOUBLE,
})

_test(`example 'a`, Tokenset{
Tokens: []string{"example", "a"},
Prefix: `example `,
State: OPEN_SINGLE,
})

_test(`example action `, Tokenset{
Tokens: []string{"example", "action", ""},
Prefix: `example action `,
})

_test(`example action -`, Tokenset{
Tokens: []string{"example", "action", "-"},
Prefix: `example action `,
})

_test(`example action --`, Tokenset{
Tokens: []string{"example", "action", "--"},
Prefix: `example action `,
})

_test(`example action - `, Tokenset{
Tokens: []string{"example", "action", "-", ""},
Prefix: `example action - `,
})

_test(`example action -- `, Tokenset{
Tokens: []string{"example", "action", "--", ""},
Prefix: `example action -- `,
})

_test(`example "action" -- `, Tokenset{
Tokens: []string{"example", "action", "--", ""},
Prefix: `example "action" -- `,
})

_test(`example 'action' -- `, Tokenset{
Tokens: []string{"example", "action", "--", ""},
Prefix: `example 'action' -- `,
})
}
Loading

0 comments on commit 63849a3

Please sign in to comment.