Skip to content

Commit

Permalink
Add local buffer autcompletion.
Browse files Browse the repository at this point in the history
  • Loading branch information
nsf committed Sep 16, 2012
1 parent 1212cb6 commit 66ff504
Show file tree
Hide file tree
Showing 10 changed files with 318 additions and 3 deletions.
5 changes: 5 additions & 0 deletions README
Expand Up @@ -26,6 +26,7 @@ Global keys:
C-x - Extended mode
M-g - Go to line
C-s - I-search forward mode
M-/ - Local autocompletion mode

Extended mode:
C-c - Quit from godit
Expand Down Expand Up @@ -93,3 +94,7 @@ Redo mode:
I-search forward mode:
Type text - Incrementally search for a word
C-s - Next, please

Local autocompletion mode:
M-/ - Substitute next proposal
Any other key - Dispatch key to default path and quit mode
3 changes: 3 additions & 0 deletions action.go
Expand Up @@ -147,6 +147,9 @@ func (a *action) do(v *view, what action_type) {
}
}
v.dirty = dirty_everything

// any change to the buffer causes words cache invalidation
v.buf.words_cache_valid = false
}

func (a *action) last_line() *line {
Expand Down
62 changes: 62 additions & 0 deletions autocomplete.go
Expand Up @@ -306,6 +306,68 @@ func (ac *autocompl) finalize(view *view) {
view.move_cursor_to(ac.current)
}

//----------------------------------------------------------------------------
// local buffer autocompletion
//----------------------------------------------------------------------------

func local_ac(view *view) ([]ac_proposal, int) {
var dups llrb_tree
var others llrb_tree
proposals := make([]ac_proposal, 0, 100)
prefix := view.cursor.word_under_cursor()

// update word caches
view.other_buffers(func(buf *buffer) {
buf.update_words_cache()
})

collect := func(ignorecase bool) {
words := view.collect_words([][]byte(nil), &dups, ignorecase)
for _, word := range words {
proposals = append(proposals, ac_proposal{
display: word,
content: word,
})
}

lprefix := prefix
if ignorecase {
lprefix = bytes.ToLower(prefix)
}
view.other_buffers(func(buf *buffer) {
buf.words_cache.walk(func(word []byte) {
lword := word
if ignorecase {
lword = bytes.ToLower(word)
}
if bytes.HasPrefix(lword, lprefix) {
ok := dups.insert_maybe(word)
if !ok {
return
}
others.insert_maybe(word)
}
})
})
others.walk(func(word []byte) {
proposals = append(proposals, ac_proposal{
display: word,
content: word,
})
})
others.clear()
}
collect(false)
if len(proposals) == 0 {
collect(true)
}

if prefix != nil {
return proposals, utf8.RuneCount(prefix)
}
return proposals, 0
}

//----------------------------------------------------------------------------
// gocode autocompletion
//----------------------------------------------------------------------------
Expand Down
69 changes: 69 additions & 0 deletions autocomplete_mode.go
@@ -0,0 +1,69 @@
package main

import (
"github.com/nsf/termbox-go"
)

type autocomplete_mode struct {
stub_overlay_mode
godit *godit
origin cursor_location
proposals []ac_proposal
prefix_len int
current int
}

func init_autocomplete_mode(godit *godit) *autocomplete_mode {
view := godit.active.leaf

a := new(autocomplete_mode)
a.godit = godit
a.origin = view.cursor
a.proposals, a.prefix_len = local_ac(view)
a.current = -1
a.substitute_next()
return a
}

func (a *autocomplete_mode) substitute_next() {
view := a.godit.active.leaf
if a.current != -1 {
// undo previous substitution
view.undo()
a.godit.set_status("") // hide undo status message
}

a.current++
if a.current >= len(a.proposals) {
a.current = -1
a.godit.set_status("No further expansions found")
return
}

// create a new one
c := view.cursor
view.finalize_action_group()
if a.prefix_len != 0 {
c.move_one_word_backward()
wlen := a.origin.boffset - c.boffset
view.action_delete(c, wlen)
}
newword := clone_byte_slice(a.proposals[a.current].content)
view.action_insert(c, newword)
view.last_vcommand = vcommand_none
view.dirty = dirty_everything
c.boffset += len(newword)
view.move_cursor_to(c)
view.finalize_action_group()
}

func (a *autocomplete_mode) on_key(ev *termbox.Event) {
g := a.godit
if ev.Mod & termbox.ModAlt != 0 && ev.Ch == '/' {
a.substitute_next()
return
}

g.set_overlay_mode(nil)
g.on_key(ev)
}
24 changes: 24 additions & 0 deletions buffer.go
Expand Up @@ -66,6 +66,10 @@ type buffer struct {
// buffer name (displayed in the status line), must be unique,
// uniqueness is maintained by godit methods
name string

// cache for local buffer autocompletion
words_cache llrb_tree
words_cache_valid bool
}

func new_empty_buffer() *buffer {
Expand Down Expand Up @@ -244,6 +248,26 @@ func (b *buffer) contents() []byte {
return data
}

func (b *buffer) refill_words_cache() {
b.words_cache.clear()
line := b.first_line
for line != nil {
iter_words(line.data, func(word []byte) {
b.words_cache.insert_maybe(word)
})
line = line.next
}
}

func (b *buffer) update_words_cache() {
if b.words_cache_valid {
return
}

b.refill_words_cache()
b.words_cache_valid = true
}

//----------------------------------------------------------------------------
// buffer_reader
//----------------------------------------------------------------------------
Expand Down
18 changes: 18 additions & 0 deletions cursor_location.go
Expand Up @@ -167,6 +167,24 @@ func (c *cursor_location) move_end_of_line() {
c.boffset = len(c.line.data)
}

func (c *cursor_location) word_under_cursor() []byte {
end, beg := *c, *c
r, rlen := beg.rune_before()
if r == utf8.RuneError {
return nil
}

for is_word(r) && !beg.bol() {
beg.boffset -= rlen
r, rlen = beg.rune_before()
}

if beg.boffset == end.boffset {
return nil
}
return c.line.data[beg.boffset:end.boffset]
}

// returns true if the move was successful, false if EOF reached.
func (c *cursor_location) move_one_word_forward() bool {
// move cursor forward until the first word rune is met
Expand Down
4 changes: 4 additions & 0 deletions godit.go
Expand Up @@ -302,6 +302,9 @@ func (g *godit) on_alt_key(ev *termbox.Event) bool {
case 'g':
g.set_overlay_mode(init_line_edit_mode(g, g.goto_line_lemp()))
return true
case '/':
g.set_overlay_mode(init_autocomplete_mode(g))
return true
}
return false
}
Expand Down Expand Up @@ -499,6 +502,7 @@ func (g *godit) view_context() view_context {
g.set_status(f, args...)
},
kill_buffer: &g.killbuffer,
buffers: &g.buffers,
}
}

Expand Down
16 changes: 15 additions & 1 deletion llrb_tree.go
Expand Up @@ -43,12 +43,17 @@ func (t *llrb_tree) clear_recursive(n *llrb_node) {
t.free_node(n)
}

func (t *llrb_tree) insert_maybe(value []byte) {
func (t *llrb_tree) walk(cb func(value []byte)) {
t.root.walk(cb)
}

func (t *llrb_tree) insert_maybe(value []byte) bool {
var ok bool
t.root, ok = t.root.insert_maybe(value)
if ok {
t.count++
}
return ok
}

func (t *llrb_tree) insert_maybe_recursive(n *llrb_node, value []byte) (*llrb_node, bool) {
Expand Down Expand Up @@ -95,6 +100,15 @@ type llrb_node struct {
color bool
}

func (n *llrb_node) walk(cb func(value []byte)) {
if n == nil {
return
}
n.left.walk(cb)
cb(n.value)
n.right.walk(cb)
}

func (n *llrb_node) rotate_left() *llrb_node {
x := n.right
n.right = x.left
Expand Down
57 changes: 57 additions & 0 deletions utils.go
Expand Up @@ -8,8 +8,65 @@ import (
"strconv"
"strings"
"unicode"
"unicode/utf8"
)

func iter_words(data []byte, cb func(word []byte)) {
for {
if len(data) == 0 {
return
}

r, rlen := utf8.DecodeRune(data)
// skip non-word runes
for !is_word(r) && len(data) > 0 {
data = data[rlen:]
r, rlen = utf8.DecodeRune(data)
}

if len(data) == 0 {
return
}

// must be on a word rune
i := 0
for is_word(r) && i < len(data) {
i += rlen
r, rlen = utf8.DecodeRune(data[i:])
}
cb(data[:i])
data = data[i:]
}
}

func iter_words_backward(data []byte, cb func(word []byte)) {
for {
if len(data) == 0 {
return
}

r, rlen := utf8.DecodeLastRune(data)
// skip non-word runes
for !is_word(r) && len(data) > 0 {
data = data[:len(data)-rlen]
r, rlen = utf8.DecodeLastRune(data)
}

if len(data) == 0 {
return
}

// must be on a word rune
i := len(data)
for is_word(r) && i > 0 {
i -= rlen
r, rlen = utf8.DecodeLastRune(data[:i])
}
cb(data[i:])
data = data[:i]
}
}

func readdir_stat(dir string, f *os.File) ([]os.FileInfo, error) {
names, err := f.Readdirnames(-1)
if err != nil {
Expand Down

0 comments on commit 66ff504

Please sign in to comment.