Skip to content

Commit

Permalink
Merge pull request from GHSA-h3m7-rqc4-7h9p
Browse files Browse the repository at this point in the history
fix chunking integer overflow
  • Loading branch information
jzelinskie committed Mar 1, 2024
2 parents 57206a1 + ab7b530 commit ef443c4
Show file tree
Hide file tree
Showing 3 changed files with 159 additions and 7 deletions.
99 changes: 99 additions & 0 deletions internal/dispatch/graph/dispatch_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
package graph

import (
"fmt"
"math"
"testing"

"github.com/authzed/spicedb/internal/dispatch"
"github.com/authzed/spicedb/internal/graph"
core "github.com/authzed/spicedb/pkg/proto/core/v1"
v1 "github.com/authzed/spicedb/pkg/proto/dispatch/v1"
"github.com/authzed/spicedb/pkg/tuple"

"github.com/stretchr/testify/require"
)

func TestDispatchChunking(t *testing.T) {
schema := `
definition user {
relation self: user
}
definition res {
relation owner : user
permission view = owner->self
}`

resources := make([]*core.RelationTuple, 0, math.MaxUint16+1)
enabled := make([]*core.RelationTuple, 0, math.MaxUint16+1)
for i := 0; i < math.MaxUint16+1; i++ {
resources = append(resources, tuple.Parse(fmt.Sprintf("res:res1#owner@user:user%d", i)))
enabled = append(enabled, tuple.Parse(fmt.Sprintf("user:user%d#self@user:user%d", i, i)))
}

ctx, dispatcher, revision := newLocalDispatcherWithSchemaAndRels(t, schema, append(enabled, resources...))

t.Run("check", func(t *testing.T) {
for _, tpl := range resources[:1] {
checkResult, err := dispatcher.DispatchCheck(ctx, &v1.DispatchCheckRequest{
ResourceRelation: RR(tpl.ResourceAndRelation.Namespace, "view"),
ResourceIds: []string{tpl.ResourceAndRelation.ObjectId},
ResultsSetting: v1.DispatchCheckRequest_ALLOW_SINGLE_RESULT,
Subject: ONR(tpl.Subject.Namespace, tpl.Subject.ObjectId, graph.Ellipsis),
Metadata: &v1.ResolverMeta{
AtRevision: revision.String(),
DepthRemaining: 50,
},
})

require.NoError(t, err)
require.NotNil(t, checkResult)
require.NotEmpty(t, checkResult.ResultsByResourceId, "expected membership for resource %s", tpl.ResourceAndRelation.ObjectId)
require.Equal(t, v1.ResourceCheckResult_MEMBER, checkResult.ResultsByResourceId[tpl.ResourceAndRelation.ObjectId].Membership)
}
})

t.Run("lookup-resources", func(t *testing.T) {
for _, tpl := range resources[:1] {
stream := dispatch.NewCollectingDispatchStream[*v1.DispatchLookupResourcesResponse](ctx)
err := dispatcher.DispatchLookupResources(&v1.DispatchLookupResourcesRequest{
ObjectRelation: RR(tpl.ResourceAndRelation.Namespace, "view"),
Subject: ONR(tpl.Subject.Namespace, tpl.Subject.ObjectId, graph.Ellipsis),
Metadata: &v1.ResolverMeta{
AtRevision: revision.String(),
DepthRemaining: 50,
},
OptionalLimit: veryLargeLimit,
}, stream)

require.NoError(t, err)

foundResources, _, _, _ := processResults(stream)
require.Len(t, foundResources, 1)
}
})

t.Run("lookup-subjects", func(t *testing.T) {
for _, tpl := range resources[:1] {
stream := dispatch.NewCollectingDispatchStream[*v1.DispatchLookupSubjectsResponse](ctx)

err := dispatcher.DispatchLookupSubjects(&v1.DispatchLookupSubjectsRequest{
ResourceRelation: RR(tpl.ResourceAndRelation.Namespace, "view"),
ResourceIds: []string{tpl.ResourceAndRelation.ObjectId},
SubjectRelation: RR(tpl.Subject.Namespace, graph.Ellipsis),
Metadata: &v1.ResolverMeta{
AtRevision: revision.String(),
DepthRemaining: 50,
},
}, stream)

require.NoError(t, err)
res := stream.Results()
require.Len(t, res, 1)
require.Len(t, res[0].FoundSubjectsByResourceId, 1)
require.NotNil(t, res[0].FoundSubjectsByResourceId["res1"])
require.Len(t, res[0].FoundSubjectsByResourceId["res1"].FoundSubjects, math.MaxUint16+1)
}
})
}
12 changes: 7 additions & 5 deletions pkg/genutil/slicez/chunking.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,12 @@ func ForEachChunkUntil[T any](data []T, chunkSize uint16, handler func(items []T
chunkSize = 1
}

dataLength := uint16(len(data))
chunkCount := (dataLength / chunkSize) + 1
for chunkIndex := uint16(0); chunkIndex < chunkCount; chunkIndex++ {
chunkStart := chunkIndex * chunkSize
chunkEnd := (chunkIndex + 1) * chunkSize
dataLength := uint64(len(data))
chunkSize64 := uint64(chunkSize)
chunkCount := (dataLength / chunkSize64) + 1
for chunkIndex := uint64(0); chunkIndex < chunkCount; chunkIndex++ {
chunkStart := chunkIndex * chunkSize64
chunkEnd := (chunkIndex + 1) * chunkSize64
if chunkEnd > dataLength {
chunkEnd = dataLength
}
Expand All @@ -38,5 +39,6 @@ func ForEachChunkUntil[T any](data []T, chunkSize uint16, handler func(items []T
}
}
}

return true, nil
}
55 changes: 53 additions & 2 deletions pkg/genutil/slicez/chunking_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,28 @@ package slicez

import (
"fmt"
"math"
"testing"

"github.com/stretchr/testify/require"
)

func TestForEachChunk(t *testing.T) {
t.Parallel()

for _, datasize := range []int{0, 1, 5, 10, 50, 100, 250} {
datasize := datasize
for _, chunksize := range []uint16{1, 2, 3, 5, 10, 50} {
chunksize := chunksize
t.Run(fmt.Sprintf("test-%d-%d", datasize, chunksize), func(t *testing.T) {
data := []int{}
t.Parallel()

data := make([]int, 0, datasize)
for i := 0; i < datasize; i++ {
data = append(data, i)
}

found := []int{}
found := make([]int, 0, datasize)
ForEachChunk(data, chunksize, func(items []int) {
found = append(found, items...)
require.True(t, len(items) <= int(chunksize))
Expand All @@ -29,3 +34,49 @@ func TestForEachChunk(t *testing.T) {
}
}
}

func TestForEachChunkOverflowPanic(t *testing.T) {
t.Parallel()

datasize := math.MaxUint16
chunksize := uint16(50)
data := make([]int, 0, datasize)
for i := 0; i < datasize; i++ {
data = append(data, i)
}

found := make([]int, 0, datasize)
ForEachChunk(data, chunksize, func(items []int) {
found = append(found, items...)
require.True(t, len(items) <= int(chunksize))
require.True(t, len(items) > 0)
})

require.Equal(t, data, found)
}

func TestForEachChunkOverflowIncorrect(t *testing.T) {
t.Parallel()

chunksize := uint16(50)
for _, datasize := range []int{math.MaxUint16 + int(chunksize), 10_000_000} {
datasize := datasize
t.Run(fmt.Sprintf("test-%d-%d", datasize, chunksize), func(t *testing.T) {
t.Parallel()

data := make([]int, 0, datasize)
for i := 0; i < datasize; i++ {
data = append(data, i)
}

found := make([]int, 0, datasize)
ForEachChunk(data, chunksize, func(items []int) {
found = append(found, items...)
require.True(t, len(items) <= int(chunksize))
require.True(t, len(items) > 0)
})

require.Equal(t, data, found)
})
}
}

0 comments on commit ef443c4

Please sign in to comment.