Skip to content
šŸ“® RSS, Atom, and JSON feed generator from multiple RSS, Atom, and any entries you want.
Go
Branch: master
Clone or download
Latest commit 06bb7cf Aug 17, 2019
Permalink
Type Name Latest commit message Commit time
Failed to load latest commit information.
.circleci
image Add feeder logo Jul 16, 2019
.gitignore initial commit Mar 21, 2019
LICENSE Fix username to p1ass Jul 30, 2019
README.md Fix example Aug 17, 2019
atom_crawler.go Change interface name Aug 17, 2019
atom_crawler_test.go Change interface name Aug 17, 2019
atom_test.xml Change filename Mar 22, 2019
export_test.go Remove description length limit Apr 2, 2019
feeder.go
feeder_test.go Fix error handling in goroutine Aug 17, 2019
generator.go Change interface name Aug 17, 2019
generator_test.go Fix username to p1ass Jul 30, 2019
go.mod Fix error handling in goroutine Aug 17, 2019
go.sum Fix error handling in goroutine Aug 17, 2019
mock_server_for_test.go Change test package name to feeder_test Apr 1, 2019
rss_crawler.go Change interface name Aug 17, 2019
rss_crawler_test.go
rss_test.xml Change filename Mar 22, 2019

README.md

feeder is the RSS, Atom and JSON feed generator from multiple RSS, Atom, and any entries you want.

Getting started

Install

go get -u github.com/p1ass/feeder

Examples

import "github.com/p1ass/feeder"

func crawl(){
	rss1 := feeder.NewRSSCrawler("https://example.com/rss1")
	rss2 := feeder.NewRSSCrawler("https://example.com/rss2")

	items, err := feeder.Crawl(rss1, rss2)

	feed := &feeder.Feed{
		Title:       "My feeds",
		Link:        &feeder.Link{Href: "https://example.com/feed"},
		Description: "My feeds.",
		Author:      &feeder.Author{
			Name: "p1ass",
			Email: "p1ass@example.com"},
		Created:     time.Now(),
		Items:       items,
	}

	json, err := feed.ToJSON() // json is a `string`
	rss, err := feed.ToRSS() // rss is a `string`
	atom, err := feed.ToAtom() // atom is a `string`
}

Advanced usages

Implement original Crawler

You can create a original crawler by implementing feeder.Crawler.

type Crawler interface {
	Crawl() ([]*Item, error)
}

This is an example of Qiita API(GET /api/v2/users/:user_id/items).

Qiita API v2 documentation - Qiita:Developer

type qiitaResponse struct {
	CreatedAt *time.Time `json:"created_at"`
	Title     string     `json:"title"`
	URL       string     `json:"url"`
	Body      string     `json:"body"`
	ID        string     `json:"id"`
	User      *qiitaUser `json:"user"`
}

type qiitaUser struct {
	ID string `json:"id"`
}

type qiitaCrawler struct {
	URL string
}

func (crawler *qiitaCrawler) Fetch() ([]*feeder.Item, error) {
	resp, err := http.Get(crawler.URL)
	if err != nil {
		return nil, errors.Wrap(err, "failed to get response from qiita.")
	}

	var qiita []*qiitaResponse
	err = json.NewDecoder(resp.Body).Decode(&qiita)
	if err != nil {
		return nil, errors.Wrap(err, "failed to decode response body.")
	}

	items := []*feeder.Item{}
	for _, i := range qiita {
		items = append(items, convertQiitaToItem(i))
	}
	return items, nil
}

func convertQiitaToItem(q *qiitaResponse) *feeder.Item {

	i := &feeder.Item{
		Title:       q.Title,
		Link:        &feeder.Link{Href: q.URL},
		Created:     q.CreatedAt,
		Id:          q.ID,
		Description: q.Body,
	}

	if q.User != nil {
		i.Author = &feeder.Author{
			Name: q.User.ID,
		}
	}
	return i
}
You canā€™t perform that action at this time.