Skip to content

Commit

Permalink
[FABG-785] Supported node chaincode (#40)
Browse files Browse the repository at this point in the history
[FABG-785] Supported node chaincode

Support for install, instantiate and upgrade node chaincode

Signed-off-by: yakumioto <yaku.mioto@gmail.com>
  • Loading branch information
yakumioto authored and alikic committed Jan 6, 2020
1 parent 700785a commit 8f3d32c
Show file tree
Hide file tree
Showing 22 changed files with 1,117 additions and 4 deletions.
209 changes: 209 additions & 0 deletions pkg/fab/ccpackager/nodepackager/packager.go
@@ -0,0 +1,209 @@
/*
Copyright Mioto Yaku All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/

package nodepackager

import (
"archive/tar"
"bytes"
"compress/gzip"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"time"

pb "github.com/hyperledger/fabric-protos-go/peer"
"github.com/hyperledger/fabric-sdk-go/pkg/common/logging"
"github.com/hyperledger/fabric-sdk-go/pkg/fab/resource"
"github.com/pkg/errors"
)

// Descriptor ...
type Descriptor struct {
name string
fqp string
}

var keep = []string{".js", ".yaml", ".yml", ".json"}

var logger = logging.NewLogger("fabsdk/fab")

// NewCCPackage creates new go lang chaincode package
func NewCCPackage(chaincodePath string) (*resource.CCPackage, error) {

if chaincodePath == "" {
return nil, errors.New("chaincode path must be provided")
}

logger.Debugf("projDir variable=%s", chaincodePath)

// We generate the tar in two phases: First grab a list of descriptors,
// and then pack them into an archive. While the two phases aren't
// strictly necessary yet, they pave the way for the future where we
// will need to assemble sources from multiple packages
descriptors, err := findSource(chaincodePath)
if err != nil {
return nil, err
}
tarBytes, err := generateTarGz(descriptors)
if err != nil {
return nil, err
}

ccPkg := &resource.CCPackage{Type: pb.ChaincodeSpec_NODE, Code: tarBytes}

return ccPkg, nil
}

// -------------------------------------------------------------------------
// findSource(goPath, filePath)
// -------------------------------------------------------------------------
// Given an input 'filePath', recursively parse the filesystem for any files
// that fit the criteria for being valid golang source (ISREG + (*.(go|c|h)))
// As a convenience, we also formulate a tar-friendly "name" for each file
// based on relative position to 'goPath'.
// -------------------------------------------------------------------------
func findSource(filePath string) ([]*Descriptor, error) {
var descriptors []*Descriptor

folder := filePath
// trim trailing slash if it exists
if folder[len(folder)-1] == '/' {
folder = folder[:len(folder)-1]
}

if abs := filepath.IsAbs(folder); !abs {
var err error
folder, err = filepath.Rel("", folder)
if err != nil {
return nil, err
}
}

err := filepath.Walk(folder,
func(path string, fileInfo os.FileInfo, err error) error {
if err != nil {
return err
}

if fileInfo.Mode().IsRegular() && isSource(path) {
if strings.Contains(path, "/META-INF/") {
relPath := path[strings.Index(path, "/META-INF/")+1:]
descriptors = append(descriptors, &Descriptor{name: relPath, fqp: path})
return nil
}

// file is not metadata, include in src
relPath := filepath.Join("src", path[len(folder)+1:])
descriptors = append(descriptors, &Descriptor{name: relPath, fqp: path})
}

return nil
})

return descriptors, err
}

// -------------------------------------------------------------------------
// isSource(path)
// -------------------------------------------------------------------------
// predicate function for determining whether a given path should be
// considered valid source code, based entirely on the extension. It is
// assumed that other checks for file type have already been performed.
// -------------------------------------------------------------------------
func isSource(filePath string) bool {
var extension = filepath.Ext(filePath)
for _, v := range keep {
if v == extension {
return true
}
}
return false
}

// -------------------------------------------------------------------------
// generateTarGz(descriptors)
// -------------------------------------------------------------------------
// creates an .tar.gz stream from the provided descriptor entries
// -------------------------------------------------------------------------
func generateTarGz(descriptors []*Descriptor) ([]byte, error) {
// set up the gzip writer
var codePackage bytes.Buffer
gw := gzip.NewWriter(&codePackage)
tw := tar.NewWriter(gw)
for _, v := range descriptors {
logger.Debugf("generateTarGz for %s", v.fqp)
err := packEntry(tw, gw, v)
if err != nil {
err1 := closeStream(tw, gw)
if err1 != nil {
return nil, errors.Wrap(err, fmt.Sprintf("packEntry failed and close error %s", err1))
}
return nil, errors.Wrap(err, "packEntry failed")
}
}
err := closeStream(tw, gw)
if err != nil {
return nil, errors.Wrap(err, "closeStream failed")
}
return codePackage.Bytes(), nil

}

func closeStream(tw io.Closer, gw io.Closer) error {
err := tw.Close()
if err != nil {
return err
}
err = gw.Close()
return err
}

func packEntry(tw *tar.Writer, gw *gzip.Writer, descriptor *Descriptor) error {
file, err := os.Open(descriptor.fqp)
if err != nil {
return err
}
defer func() {
err := file.Close()
if err != nil {
logger.Warnf("error file close %s", err)
}
}()

if stat, err := file.Stat(); err == nil {

// now lets create the header as needed for this file within the tarball
header := new(tar.Header)
header.Name = descriptor.name
header.Size = stat.Size()
header.Mode = int64(stat.Mode())
// Use a deterministic "zero-time" for all date fields
header.ModTime = time.Time{}
header.AccessTime = time.Time{}
header.ChangeTime = time.Time{}
// write the header to the tarball archive
if err := tw.WriteHeader(header); err != nil {
return err
}

// copy the file data to the tarball

if _, err := io.Copy(tw, file); err != nil {
return err
}
if err := tw.Flush(); err != nil {
return err
}
if err := gw.Flush(); err != nil {
return err
}

}
return nil
}
106 changes: 106 additions & 0 deletions pkg/fab/ccpackager/nodepackager/packager_test.go
@@ -0,0 +1,106 @@
/*
Copyright Mioto Yaku All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/

package nodepackager

import (
"archive/tar"
"bytes"
"compress/gzip"
"io"
"os"
"path/filepath"
"strings"
"testing"

"github.com/stretchr/testify/assert"
)

// Test golang ChainCode packaging
func TestNewCCPackage(t *testing.T) {
pwd, err := os.Getwd()
assert.Nil(t, err, "error from os.Getwd %s", err)

ccPackage, err := NewCCPackage(filepath.Join(pwd, "testdata"))
assert.Nil(t, err, "error from Create %s", err)

r := bytes.NewReader(ccPackage.Code)

gzf, err := gzip.NewReader(r)
assert.Nil(t, err, "error from gzip.NewReader %s", err)

tarReader := tar.NewReader(gzf)
i := 0
var exampleccExist, eventMetaInfExists, examplecc1MetaInfExists, fooMetaInfoExists, metaInfFooExists bool
for {
header, err := tarReader.Next()

if err == io.EOF {
break
}

assert.Nil(t, err, "error from tarReader.Next() %s", err)

exampleccExist = exampleccExist || header.Name == "src/example_cc/chaincode_example02.js"
eventMetaInfExists = eventMetaInfExists || header.Name == "META-INF/sample-json/event.json"
examplecc1MetaInfExists = examplecc1MetaInfExists || header.Name == "META-INF/example1.json"
fooMetaInfoExists = fooMetaInfoExists || strings.HasPrefix(header.Name, "foo-META-INF")
metaInfFooExists = metaInfFooExists || strings.HasPrefix(header.Name, "META-INF-foo")

i++
}

assert.True(t, exampleccExist, "src/example_cc/chaincode_example02.js does not exists in tar file")
assert.True(t, eventMetaInfExists, "META-INF/event.json does not exists in tar file")
assert.True(t, examplecc1MetaInfExists, "META-INF/example1.json does not exists in tar file")
assert.False(t, fooMetaInfoExists, "invalid root directory found")
assert.False(t, metaInfFooExists, "invalid root directory found")
}

// Test Package Go ChainCode
func TestEmptyCreate(t *testing.T) {

_, err := NewCCPackage("")
if err == nil {
t.Fatal("Package Empty GoLang CC must return an error.")
}
}

// Test Bad Package Path for ChainCode packaging
func TestBadPackagePathGoLangCC(t *testing.T) {
_, err := NewCCPackage("github.com")
if err == nil {
t.Fatalf("error expected from Create %s", err)
}
}

// Test isSource set to true for any go readable files used in ChainCode packaging
func TestIsSourcePath(t *testing.T) {
keep = []string{}
isSrcVal := isSource(filepath.Join(".."))

if isSrcVal {
t.Fatalf("error expected when calling isSource %v", isSrcVal)
}

// reset keep
keep = []string{".go", ".c", ".h"}
}

// Test packEntry and generateTarGz with empty file Descriptor
func TestEmptyPackEntry(t *testing.T) {
emptyDescriptor := &Descriptor{"NewFile", ""}
err := packEntry(nil, nil, emptyDescriptor)
if err == nil {
t.Fatal("packEntry call with empty descriptor info must throw an error")
}

_, err = generateTarGz([]*Descriptor{emptyDescriptor})
if err == nil {
t.Fatal("generateTarGz call with empty descriptor info must throw an error")
}

}
@@ -0,0 +1,6 @@
{
"sample": {
"title": "sample-json",
"data": "sample text"
}
}

0 comments on commit 8f3d32c

Please sign in to comment.