/
page.go
94 lines (87 loc) · 2.26 KB
/
page.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
package directus
import (
"bytes"
"encoding/json"
"fmt"
"github.com/pkg/errors"
"io/ioutil"
"net/http"
"time"
)
type Page struct {
dir *Directus `json:"-"`
Id int64 `json:"id,omitempty"`
Name string `json:"name,omitempty"`
Slug string `json:"slug,omitempty"`
Content string `json:"content,omitempty"`
}
type PageResult struct {
Data []*Page `json:"data,omitempty"`
Errors []*Error `json:"errors"`
}
func (n *Page) GetId() int64 { return n.Id }
func (n *Page) GetName() string { return n.Name }
func (d *Directus) GetPages() ([]*Page, error) {
if err := d.loadPages(); err != nil {
return nil, errors.Wrap(err, "cannot load news")
}
return d.pages, nil
}
func (d *Directus) GetPageByName(name string) (*Page, error) {
pages, err := d.GetPages()
if err != nil {
return nil, errors.Wrap(err, "cannot load news")
}
for _, p := range pages {
if p.GetName() == name {
return p, nil
}
}
return nil, errors.New(fmt.Sprintf("page %s not found", name))
}
func (d *Directus) loadPages() error {
d.mutex.Lock()
defer d.mutex.Unlock()
if d.pages == nil || time.Now().Add(-d.cacheTime).After(d.lastAccess) {
if d.pages != nil {
d.clearCache()
}
urlStr := fmt.Sprintf("%s/items/pages", d.baseurl)
req, err := http.NewRequest("GET", urlStr, bytes.NewReader(nil))
if err != nil {
return errors.Wrapf(err, "cannot create request %s", urlStr)
}
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", d.token))
client := http.Client{
Transport: http.DefaultTransport,
CheckRedirect: nil,
Jar: nil,
Timeout: 0,
}
resp, err := client.Do(req)
if err != nil {
return errors.Wrapf(err, "error executing %s", urlStr)
}
data, err := ioutil.ReadAll(resp.Body)
if err != nil {
return errors.Wrapf(err, "cannot read result of %s", urlStr)
}
page := PageResult{}
if err := json.Unmarshal(data, &page); err != nil {
d.news = nil
return errors.Wrapf(err, "cannot parse result: %s", string(data))
}
if len(page.Errors) > 0 {
d.news = nil
error := page.Errors[0]
return errors.New(fmt.Sprintf("%s", error.Message))
}
d.pages = []*Page{}
for _, page := range page.Data {
page.dir = d
d.pages = append(d.pages, page)
}
d.lastAccess = time.Now()
}
return nil
}