Skip to content

Commit

Permalink
add more coverage
Browse files Browse the repository at this point in the history
add file transfer agent test

fix test

try setting covflags

fix make file
  • Loading branch information
sfc-gh-ext-simba-lb committed May 26, 2023
1 parent 1d4e0f3 commit 33103bd
Show file tree
Hide file tree
Showing 12 changed files with 602,969 additions and 8 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
NAME:=gosnowflake
VERSION:=$(shell git describe --tags --abbrev=0)
REVISION:=$(shell git rev-parse --short HEAD)
COVFLAGS:=
COVFLAGS:=$(-coverprofile=coverage.txt -covermode=atomic)

## Run fmt, lint and test
all: fmt lint cov
Expand Down
3 changes: 3 additions & 0 deletions azure_storage_client.go
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ func (util *snowflakeAzureClient) getFileHeader(meta *fileMetadata, filename str
}
var blobClient azureAPI
blobClient = containerClient.NewBlockBlobClient(path)
// for testing only
if meta.mockAzureClient != nil {
blobClient = meta.mockAzureClient
}
Expand Down Expand Up @@ -188,6 +189,7 @@ func (util *snowflakeAzureClient) uploadFile(
}
var blobClient azureAPI
blobClient = containerClient.NewBlockBlobClient(path)
// for testing only
if meta.mockAzureClient != nil {
blobClient = meta.mockAzureClient
}
Expand Down Expand Up @@ -267,6 +269,7 @@ func (util *snowflakeAzureClient) nativeDownloadFile(
}
var blobClient azureAPI
blobClient = containerClient.NewBlockBlobClient(path)
// for testing only
if meta.mockAzureClient != nil {
blobClient = meta.mockAzureClient
}
Expand Down
2 changes: 1 addition & 1 deletion azure_storage_client_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ func TestUploadFileWithAzureUploadNeedsRetry(t *testing.T) {
uploadMeta := fileMetadata{
name: "data1.txt.gz",
stageLocationType: "AZURE",
noSleepingTime: true,
noSleepingTime: false,
parallel: initialParallel,
client: azureCli,
sha256Digest: "123456789abcdef",
Expand Down
2 changes: 1 addition & 1 deletion ci/scripts/test_component.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@ if [[ -n "$GITHUB_WORKFLOW" ]]; then
fi
env | grep SNOWFLAKE | grep -v PASS | sort
cd $TOPDIR
go test -timeout 30m -race -covermode=atomic $COVFLAGS -v .
go test -timeout 30m -race $COVFLAGS -v .
2 changes: 1 addition & 1 deletion ci/test.bat
Original file line number Diff line number Diff line change
Expand Up @@ -42,4 +42,4 @@ echo [INFO] Database: %SNOWFLAKE_TEST_DATABASE%
echo [INFO] Warehouse: %SNOWFLAKE_TEST_WAREHOUSE%
echo [INFO] Role: %SNOWFLAKE_TEST_ROLE%

go test --timeout 30m --tags=sfdebug -race -covermode=atomic -v .
go test --timeout 30m --tags=sfdebug -race -coverprofile=coverage.txt -covermode=atomic -v .
3 changes: 3 additions & 0 deletions converter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ func TestSnowflakeTypeToGo(t *testing.T) {
{in: arrayType, scale: 0, out: reflect.TypeOf("")},
{in: binaryType, scale: 0, out: reflect.TypeOf([]byte{})},
{in: booleanType, scale: 0, out: reflect.TypeOf(true)},
{in: sliceType, scale: 0, out: reflect.TypeOf("")},
}
for _, test := range testcases {
a := snowflakeTypeToGo(test.in, test.scale)
Expand Down Expand Up @@ -238,8 +239,10 @@ type tcArrayToString struct {
func TestArrayToString(t *testing.T) {
testcases := []tcArrayToString{
{in: driver.NamedValue{Value: &intArray{1, 2}}, typ: fixedType, out: []string{"1", "2"}},
{in: driver.NamedValue{Value: &int32Array{1, 2}}, typ: fixedType, out: []string{"1", "2"}},
{in: driver.NamedValue{Value: &int64Array{3, 4, 5}}, typ: fixedType, out: []string{"3", "4", "5"}},
{in: driver.NamedValue{Value: &float64Array{6.7}}, typ: realType, out: []string{"6.7"}},
{in: driver.NamedValue{Value: &float32Array{1.5}}, typ: realType, out: []string{"1.5"}},
{in: driver.NamedValue{Value: &boolArray{true, false}}, typ: booleanType, out: []string{"true", "false"}},
{in: driver.NamedValue{Value: &stringArray{"foo", "bar", "baz"}}, typ: textType, out: []string{"foo", "bar", "baz"}},
}
Expand Down
3 changes: 3 additions & 0 deletions gcs_storage_client.go
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ func (util *snowflakeGcsClient) getFileHeader(meta *fileMetadata, filename strin
}
var client gcsAPI
client = &http.Client{}
// for testing only
if meta.mockGcsClient != nil {
client = meta.mockGcsClient
}
Expand Down Expand Up @@ -210,6 +211,7 @@ func (util *snowflakeGcsClient) uploadFile(
}
var client gcsAPI
client = &http.Client{}
// for testing only
if meta.mockGcsClient != nil {
client = meta.mockGcsClient
}
Expand Down Expand Up @@ -284,6 +286,7 @@ func (util *snowflakeGcsClient) nativeDownloadFile(
}
var client gcsAPI
client = &http.Client{}
// for testing only
if meta.mockGcsClient != nil {
client = meta.mockGcsClient
}
Expand Down
27 changes: 27 additions & 0 deletions put_get_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -610,3 +610,30 @@ func TestPutGetGcsDownscopedCredential(t *testing.T) {
}
})
}

func TestPutLargeFile(t *testing.T) {
sourceDir, err := os.Getwd()
if err != nil {
t.Fatal(err)
}

runTests(t, dsn, func(dbt *DBTest) {
dbt.mustExec("rm @~/test_put_largefile")
putQuery := fmt.Sprintf("put file://%v/test_data/largefile.txt @%v", sourceDir, "~/test_put_largefile")
sqlText := strings.ReplaceAll(putQuery, "\\", "\\\\")
dbt.mustExec(sqlText)
defer dbt.mustExec("rm @~/test_put_largefile")
rows := dbt.mustQuery("ls @~/test_put_largefile")
var file, s1, s2, s3 string
if rows.Next() {
if err := rows.Scan(&file, &s1, &s2, &s3); err != nil {
t.Fatal(err)
}
}

if !strings.Contains(file, "largefile.txt.gz") {
t.Fatalf("should contain file. got: %v", file)
}

})
}
3 changes: 3 additions & 0 deletions s3_storage_client.go
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ func (util *snowflakeS3Client) getFileHeader(meta *fileMetadata, filename string
if !ok {
return nil, fmt.Errorf("could not parse client to s3.Client")
}
// for testing only
if meta.mockHeader != nil {
s3Cli = meta.mockHeader
}
Expand Down Expand Up @@ -148,6 +149,7 @@ func (util *snowflakeS3Client) uploadFile(
u.Concurrency = maxConcurrency
u.PartSize = int64Max(multiPartThreshold, manager.DefaultUploadPartSize)
})
// for testing only
if meta.mockUploader != nil {
uploader = meta.mockUploader
}
Expand Down Expand Up @@ -224,6 +226,7 @@ func (util *snowflakeS3Client) nativeDownloadFile(
downloader = manager.NewDownloader(client, func(u *manager.Downloader) {
u.Concurrency = int(maxConcurrency)
})
// for testing only
if meta.mockDownloader != nil {
downloader = meta.mockDownloader
}
Expand Down
99 changes: 99 additions & 0 deletions s3_storage_client_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
package gosnowflake

import (
"bytes"
"context"
"errors"
"io"
Expand Down Expand Up @@ -571,3 +572,101 @@ func TestGetHeaderClientCastFail(t *testing.T) {
t.Fatal("should have failed")
}
}

func TestS3UploadRetryWithHeaderNotFound(t *testing.T) {
info := execResponseStageInfo{
Location: "sfc-customer-stage/rwyi-testacco/users/9220/",
LocationType: "S3",
}
initialParallel := int64(100)
dir, err := os.Getwd()
if err != nil {
t.Error(err)
}

s3Cli, err := new(snowflakeS3Client).createClient(&info, false)
if err != nil {
t.Error(err)
}
uploadMeta := fileMetadata{
name: "data1.txt.gz",
stageLocationType: "S3",
noSleepingTime: false,
parallel: initialParallel,
client: s3Cli,
sha256Digest: "123456789abcdef",
stageInfo: &info,
dstFileName: "data1.txt.gz",
srcFileName: path.Join(dir, "/test_data/put_get_1.txt"),
overwrite: true,
options: &SnowflakeFileTransferOptions{
MultiPartThreshold: dataSizeThreshold,
},
mockUploader: mockUploadObjectAPI(func(ctx context.Context, params *s3.PutObjectInput, optFns ...func(*manager.Uploader)) (*manager.UploadOutput, error) {
return &manager.UploadOutput{
Location: "https://sfc-customer-stage/rwyi-testacco/users/9220/data1.txt.gz",
}, nil
}),
mockHeader: mockHeaderAPI(func(ctx context.Context, params *s3.HeadObjectInput, optFns ...func(*s3.Options)) (*s3.HeadObjectOutput, error) {
return nil, &smithy.GenericAPIError{
Code: notFound,
}
}),
}

uploadMeta.realSrcFileName = uploadMeta.srcFileName
fi, err := os.Stat(uploadMeta.srcFileName)
if err != nil {
t.Error(err)
}
uploadMeta.uploadSize = fi.Size()

err = new(remoteStorageUtil).uploadOneFileWithRetry(&uploadMeta)
if err != nil {
t.Error(err)
}

if uploadMeta.resStatus != errStatus {
t.Fatalf("expected %v result status, got: %v", errStatus, uploadMeta.resStatus)
}
}

func TestS3UploadStreamFailed(t *testing.T) {
info := execResponseStageInfo{
Location: "sfc-customer-stage/rwyi-testacco/users/9220/",
LocationType: "S3",
}
initialParallel := int64(100)
src := []byte{65, 66, 67}

s3Cli, err := new(snowflakeS3Client).createClient(&info, false)
if err != nil {
t.Error(err)
}

uploadMeta := fileMetadata{
name: "data1.txt.gz",
stageLocationType: "S3",
noSleepingTime: true,
parallel: initialParallel,
client: s3Cli,
sha256Digest: "123456789abcdef",
stageInfo: &info,
dstFileName: "data1.txt.gz",
srcStream: bytes.NewBuffer(src),
overwrite: true,
options: &SnowflakeFileTransferOptions{
MultiPartThreshold: dataSizeThreshold,
},
mockUploader: mockUploadObjectAPI(func(ctx context.Context, params *s3.PutObjectInput, optFns ...func(*manager.Uploader)) (*manager.UploadOutput, error) {
return nil, errors.New("unexpected error uploading file")
}),
}

uploadMeta.realSrcStream = uploadMeta.srcStream

err = new(remoteStorageUtil).uploadOneFile(&uploadMeta)
if err == nil {
t.Fatal("should have failed")
}
}
Loading

0 comments on commit 33103bd

Please sign in to comment.