Skip to content

Commit

Permalink
price api wip (#407)
Browse files Browse the repository at this point in the history
* price api wip

* fix example

* update example

* finish pricing

* update readme
  • Loading branch information
asim committed Mar 23, 2022
1 parent a35cccd commit f662df8
Show file tree
Hide file tree
Showing 15 changed files with 1,965 additions and 0 deletions.
2 changes: 2 additions & 0 deletions price/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@

price
3 changes: 3 additions & 0 deletions price/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
FROM alpine
ADD price /price
ENTRYPOINT [ "/price" ]
27 changes: 27 additions & 0 deletions price/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@

GOPATH:=$(shell go env GOPATH)
.PHONY: init
init:
go install github.com/golang/protobuf/protoc-gen-go@latest
go install github.com/micro/micro/v3/cmd/protoc-gen-micro@latest
go install github.com/micro/micro/v3/cmd/protoc-gen-openapi@latest

.PHONY: api
api:
protoc --openapi_out=. --proto_path=. proto/price.proto

.PHONY: proto
proto:
protoc --proto_path=. --micro_out=. --go_out=:. proto/price.proto

.PHONY: build
build:
go build -o price *.go

.PHONY: test
test:
go test -v ./... -cover

.PHONY: docker
docker:
docker build . -t price:latest
7 changes: 7 additions & 0 deletions price/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
Global prices index

# Price Service

Get the price of quite literally anything. Commodity prices are updated every 10-15 minutes. Add your own prices
for anything not already indexed.

221 changes: 221 additions & 0 deletions price/crawler/crawler.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,221 @@
package crawler

import (
"fmt"
"net/url"
"path"
"strings"
"time"

"github.com/micro/micro/v3/service/logger"
"github.com/micro/micro/v3/service/store"
"github.com/micro/services/pkg/api"
pb "github.com/micro/services/price/proto"
)

type Crawler struct {
Key string
}

type Response struct {
Data Data `json:"data"`
}

type Data struct {
Success bool `json:"success"`
Timestamp int64 `json:"timestamp"`
Date string `json:"date"`
Base string `json:"base"`
Rates map[string]float64 `json:"rates"`
Unit string `json:"unit,omitempty"`
}

func (c *Crawler) GetPrices(base string) {
uri := "https://www.commodities-api.com/api/latest"
vals := url.Values{}
vals.Set("access_key", c.Key)
vals.Set("base", base)

var symbols []string

for symbol, _ := range Index {
symbols = append(symbols, symbol)
}

vals.Set("symbols", strings.Join(symbols, ","))

q := vals.Encode()
uri += "?" + q

var rsp Response

if err := api.Get(uri, &rsp); err != nil {
logger.Errorf("Failed to get symbols: %v", err)
return
}

for symbol, value := range rsp.Data.Rates {
name := Index[symbol]

val := &pb.Value{
Name: name,
Price: float64(1) / value,
Symbol: symbol,
Currency: rsp.Data.Base,
Timestamp: time.Unix(rsp.Data.Timestamp, 0).Format(time.RFC3339Nano),
}

for _, suffix := range []string{"latest", fmt.Sprintf("%d", rsp.Data.Timestamp)} {
// store it
key := path.Join(
"price",
strings.ToLower(symbol),
strings.ToLower(rsp.Data.Base),
suffix,
)

rec := store.NewRecord(key, val)

// save the record
if err := store.Write(rec); err != nil {
logger.Error("Failed to write symbol: %v error: %v", key, err)
}
}

// index the item for the future
key := path.Join(
"index",
strings.ToLower(symbol),
strings.ToLower(rsp.Data.Base),
)

if err := store.Write(store.NewRecord(key, &pb.Index{
Name: val.Name,
Symbol: val.Symbol,
Currency: val.Currency,
})); err != nil {
logger.Error("Failed to write index: %v error: %v", key, err)
}
}
}

func (c *Crawler) Run() {
t := time.NewTicker(time.Minute * 10)
defer t.Stop()

// build the index
var index map[string]interface{}

vals := url.Values{}
vals.Set("access_key", c.Key)
q := vals.Encode()
uri := "https://www.commodities-api.com/api/symbols?" + q

if err := api.Get(uri, &index); err != nil {
logger.Errorf("Failed to get index symbols: %v", err)
}

// update our built in index
for k, v := range index {
Index[k] = v.(string)
}

symbols := make(map[string]bool)

// load the indexed items
recs, err := store.List(store.ListPrefix("index/"))
if err != nil {
logger.Errorf("Failed to read index: %v", err)
}

// pull out the currencies for them
for _, rec := range recs {
parts := strings.Split(rec, "/")
symbols[strings.ToUpper(parts[2])] = true
}

// if no currencies then use USD
if len(symbols) == 0 {
symbols["USD"] = true
}

// get prices now
for symbol, _ := range symbols {
c.GetPrices(symbol)
}

for {
select {
case <-t.C:
// get prices now
for symbol, _ := range symbols {
c.GetPrices(symbol)
}
}
}
}

func (c *Crawler) Get(symbol, currency string) (*pb.Value, error) {
uri := "https://www.commodities-api.com/api/latest"
vals := url.Values{}
vals.Set("access_key", c.Key)
vals.Set("base", currency)
vals.Set("symbols", symbol)

q := vals.Encode()
uri += "?" + q

var rsp Response

if err := api.Get(uri, &rsp); err != nil {
logger.Errorf("Failed to get symbols for %v:%v: %v", symbol, currency, err)
return nil, err
}

val := &pb.Value{
Name: Index[symbol],
Price: float64(1) / rsp.Data.Rates[symbol],
Symbol: symbol,
Currency: rsp.Data.Base,
Timestamp: time.Unix(rsp.Data.Timestamp, 0).Format(time.RFC3339Nano),
}

// write historic record and latest
for _, suffix := range []string{"latest", fmt.Sprintf("%d", rsp.Data.Timestamp)} {
key := path.Join(
"price",
strings.ToLower(symbol),
strings.ToLower(rsp.Data.Base),
suffix,
)

rec := store.NewRecord(key, val)

// save the record
if err := store.Write(rec); err != nil {
logger.Error("Failed to write symbol: %v error: %v", symbol, err)
}
}

// index the item for the future
key := path.Join(
"index",
strings.ToLower(symbol),
strings.ToLower(rsp.Data.Base),
)

if err := store.Write(store.NewRecord(key, &pb.Index{
Name: val.Name,
Symbol: val.Symbol,
Currency: val.Currency,
})); err != nil {
logger.Error("Failed to write index: %v error: %v", key, err)
}

// return value
return val, nil
}

func New(key string) *Crawler {
return &Crawler{key}
}

0 comments on commit f662df8

Please sign in to comment.