Skip to content

Commit

Permalink
add more coverage
Browse files Browse the repository at this point in the history
add file transfer agent test

more tests

skip test on aws
  • Loading branch information
sfc-gh-ext-simba-lb committed May 25, 2023
1 parent c502198 commit 8661894
Show file tree
Hide file tree
Showing 6 changed files with 602,984 additions and 5 deletions.
2 changes: 1 addition & 1 deletion azure_storage_client_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ func TestUploadFileWithAzureUploadNeedsRetry(t *testing.T) {
uploadMeta := fileMetadata{
name: "data1.txt.gz",
stageLocationType: "AZURE",
noSleepingTime: true,
noSleepingTime: false,
parallel: initialParallel,
client: azureCli,
sha256Digest: "123456789abcdef",
Expand Down
3 changes: 3 additions & 0 deletions converter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ func TestSnowflakeTypeToGo(t *testing.T) {
{in: arrayType, scale: 0, out: reflect.TypeOf("")},
{in: binaryType, scale: 0, out: reflect.TypeOf([]byte{})},
{in: booleanType, scale: 0, out: reflect.TypeOf(true)},
{in: sliceType, scale: 0, out: reflect.TypeOf("")},
}
for _, test := range testcases {
a := snowflakeTypeToGo(test.in, test.scale)
Expand Down Expand Up @@ -238,8 +239,10 @@ type tcArrayToString struct {
func TestArrayToString(t *testing.T) {
testcases := []tcArrayToString{
{in: driver.NamedValue{Value: &intArray{1, 2}}, typ: fixedType, out: []string{"1", "2"}},
{in: driver.NamedValue{Value: &int32Array{1, 2}}, typ: fixedType, out: []string{"1", "2"}},
{in: driver.NamedValue{Value: &int64Array{3, 4, 5}}, typ: fixedType, out: []string{"3", "4", "5"}},
{in: driver.NamedValue{Value: &float64Array{6.7}}, typ: realType, out: []string{"6.7"}},
{in: driver.NamedValue{Value: &float32Array{1.5}}, typ: realType, out: []string{"1.5"}},
{in: driver.NamedValue{Value: &boolArray{true, false}}, typ: booleanType, out: []string{"true", "false"}},
{in: driver.NamedValue{Value: &stringArray{"foo", "bar", "baz"}}, typ: textType, out: []string{"foo", "bar", "baz"}},
}
Expand Down
54 changes: 54 additions & 0 deletions put_get_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -610,3 +610,57 @@ func TestPutGetGcsDownscopedCredential(t *testing.T) {
}
})
}

func TestPutGetLargeFile(t *testing.T) {
if runningOnGithubAction() && !runningOnAWS() {
t.Skip("skipping non aws environment")
}
sourceDir, err := os.Getwd()
if err != nil {
t.Fatal(err)
}

tmpDir, err := os.MkdirTemp("", "get")
if err != nil {
t.Error(err)
}
runTests(t, dsn, func(dbt *DBTest) {
dbt.mustExec("rm @~/test_put_largefile")
putQuery := fmt.Sprintf("put file://%v/test_data/largefile.txt @%v", sourceDir, "~/test_put_largefile")
sqlText := strings.ReplaceAll(putQuery, "\\", "\\\\")
dbt.mustExec(sqlText)
defer dbt.mustExec("rm @~/test_put_largefile")
rows := dbt.mustQuery("ls @~/test_put_largefile")
var file, s1, s2, s3 string
if rows.Next() {
if err := rows.Scan(&file, &s1, &s2, &s3); err != nil {
t.Fatal(err)
}
}

if !strings.Contains(file, "largefile.txt.gz") {
t.Fatalf("should contain file. got: %v", file)
}

getQuery := fmt.Sprintf("get @%v 'file://%v'", "~/test_put_largefile", tmpDir)
sqlText = strings.ReplaceAll(getQuery, "\\", "\\\\")
var s0 string
rows = dbt.mustQuery(sqlText)
defer rows.Close()
for rows.Next() {
if err = rows.Scan(&s0, &s1, &s2, &s3); err != nil {
t.Error(err)
}
if !strings.HasPrefix(s0, "largefile") {
t.Error("a file was not downloaded by GET")
}
if s2 != "DOWNLOADED" {
t.Error("did not return DOWNLOADED status")
}
if s3 != "" {
t.Errorf("returned %v", s3)
}
}

})
}
99 changes: 99 additions & 0 deletions s3_storage_client_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
package gosnowflake

import (
"bytes"
"context"
"errors"
"io"
Expand Down Expand Up @@ -571,3 +572,101 @@ func TestGetHeaderClientCastFail(t *testing.T) {
t.Fatal("should have failed")
}
}

func TestS3UploadRetryWithHeaderNotFound(t *testing.T) {
info := execResponseStageInfo{
Location: "sfc-customer-stage/rwyi-testacco/users/9220/",
LocationType: "S3",
}
initialParallel := int64(100)
dir, err := os.Getwd()
if err != nil {
t.Error(err)
}

s3Cli, err := new(snowflakeS3Client).createClient(&info, false)
if err != nil {
t.Error(err)
}
uploadMeta := fileMetadata{
name: "data1.txt.gz",
stageLocationType: "S3",
noSleepingTime: false,
parallel: initialParallel,
client: s3Cli,
sha256Digest: "123456789abcdef",
stageInfo: &info,
dstFileName: "data1.txt.gz",
srcFileName: path.Join(dir, "/test_data/put_get_1.txt"),
overwrite: true,
options: &SnowflakeFileTransferOptions{
MultiPartThreshold: dataSizeThreshold,
},
mockUploader: mockUploadObjectAPI(func(ctx context.Context, params *s3.PutObjectInput, optFns ...func(*manager.Uploader)) (*manager.UploadOutput, error) {
return &manager.UploadOutput{
Location: "https://sfc-customer-stage/rwyi-testacco/users/9220/data1.txt.gz",
}, nil
}),
mockHeader: mockHeaderAPI(func(ctx context.Context, params *s3.HeadObjectInput, optFns ...func(*s3.Options)) (*s3.HeadObjectOutput, error) {
return nil, &smithy.GenericAPIError{
Code: notFound,
}
}),
}

uploadMeta.realSrcFileName = uploadMeta.srcFileName
fi, err := os.Stat(uploadMeta.srcFileName)
if err != nil {
t.Error(err)
}
uploadMeta.uploadSize = fi.Size()

err = new(remoteStorageUtil).uploadOneFileWithRetry(&uploadMeta)
if err != nil {
t.Error(err)
}

if uploadMeta.resStatus != errStatus {
t.Fatalf("expected %v result status, got: %v", errStatus, uploadMeta.resStatus)
}
}

func TestS3UploadStreamFailed(t *testing.T) {
info := execResponseStageInfo{
Location: "sfc-customer-stage/rwyi-testacco/users/9220/",
LocationType: "S3",
}
initialParallel := int64(100)
src := []byte{65, 66, 67}

s3Cli, err := new(snowflakeS3Client).createClient(&info, false)
if err != nil {
t.Error(err)
}

uploadMeta := fileMetadata{
name: "data1.txt.gz",
stageLocationType: "S3",
noSleepingTime: true,
parallel: initialParallel,
client: s3Cli,
sha256Digest: "123456789abcdef",
stageInfo: &info,
dstFileName: "data1.txt.gz",
srcStream: bytes.NewBuffer(src),
overwrite: true,
options: &SnowflakeFileTransferOptions{
MultiPartThreshold: dataSizeThreshold,
},
mockUploader: mockUploadObjectAPI(func(ctx context.Context, params *s3.PutObjectInput, optFns ...func(*manager.Uploader)) (*manager.UploadOutput, error) {
return nil, errors.New("unexpected error uploading file")
}),
}

uploadMeta.realSrcStream = uploadMeta.srcStream

err = new(remoteStorageUtil).uploadOneFile(&uploadMeta)
if err == nil {
t.Fatal("should have failed")
}
}
Loading

0 comments on commit 8661894

Please sign in to comment.