Skip to content

Commit

Permalink
Support episodes cleanup #44
Browse files Browse the repository at this point in the history
  • Loading branch information
mxpv committed Mar 8, 2020
1 parent e0290af commit 35dc604
Show file tree
Hide file tree
Showing 4 changed files with 73 additions and 0 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ vimeo = "{VIMEO_API_TOKEN}"
# max_height = "720" # Optional maximal height of video, example: 720, 1080, 1440, 2160, ... # max_height = "720" # Optional maximal height of video, example: 720, 1080, 1440, 2160, ...
# cron_schedule = "@every 12h" # Optional cron expression format. If set then overwrite 'update_period'. See details below # cron_schedule = "@every 12h" # Optional cron expression format. If set then overwrite 'update_period'. See details below
# filters = { title = "regex for title here" } # Optional Golang regexp format. If set, then only download episodes with matching titles. # filters = { title = "regex for title here" } # Optional Golang regexp format. If set, then only download episodes with matching titles.
# clean = { keep_last = 10 } # Keep last 10 episodes (order desc by PubDate)
``` ```


Episodes files will be kept at: `/path/to/data/directory/ID1`, feed will be accessible from: `http://localhost/ID1.xml` Episodes files will be kept at: `/path/to/data/directory/ID1`, feed will be accessible from: `http://localhost/ID1.xml`
Expand Down
61 changes: 61 additions & 0 deletions cmd/podsync/updater.go
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -7,8 +7,10 @@ import (
"io" "io"
"os" "os"
"regexp" "regexp"
"sort"
"time" "time"


"github.com/hashicorp/go-multierror"
"github.com/pkg/errors" "github.com/pkg/errors"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"


Expand Down Expand Up @@ -61,6 +63,10 @@ func (u *Updater) Update(ctx context.Context, feedConfig *config.Feed) error {
return err return err
} }


if err := u.cleanup(ctx, feedConfig); err != nil {
log.WithError(err).Error("cleanup failed")
}

elapsed := time.Since(started) elapsed := time.Since(started)
nextUpdate := time.Now().Add(feedConfig.UpdatePeriod.Duration) nextUpdate := time.Now().Add(feedConfig.UpdatePeriod.Duration)
log.Infof("successfully updated feed in %s, next update at %s", elapsed, nextUpdate.Format(time.Kitchen)) log.Infof("successfully updated feed in %s, next update at %s", elapsed, nextUpdate.Format(time.Kitchen))
Expand Down Expand Up @@ -241,3 +247,58 @@ func (u *Updater) buildXML(ctx context.Context, feedConfig *config.Feed) error {


return nil return nil
} }

func (u *Updater) cleanup(ctx context.Context, feedConfig *config.Feed) error {
var (
feedID = feedConfig.ID
logger = log.WithField("feed_id", feedID)
count = feedConfig.Clean.KeepLast
list []*model.Episode
result *multierror.Error
)

if count < 1 {
logger.Info("nothing to clean")
return nil
}

logger.WithField("count", count).Info("running cleaner")
if err := u.db.WalkEpisodes(ctx, feedConfig.ID, func(episode *model.Episode) error {
switch episode.Status {
case model.EpisodeError, model.EpisodeCleaned:
// Skip
default:
list = append(list, episode)
}
return nil
}); err != nil {
return err
}

if count > len(list) {
return nil
}

sort.Slice(list, func(i, j int) bool {
return list[i].PubDate.After(list[j].PubDate)
})

for _, episode := range list[count:] {
logger.WithField("episode_id", episode.ID).Infof("deleting %q", episode.Title)

if err := u.fs.Delete(ctx, feedConfig.ID, feed.EpisodeName(feedConfig, episode)); err != nil {
result = multierror.Append(result, errors.Wrapf(err, "failed to delete episode: %s", episode.ID))
continue
}

if err := u.db.UpdateEpisode(feedID, episode.ID, func(episode *model.Episode) error {
episode.Status = model.EpisodeCleaned
return nil
}); err != nil {
result = multierror.Append(result, errors.Wrapf(err, "failed to set state for cleaned episode: %s", episode.ID))
continue
}
}

return result.ErrorOrNil()
}
7 changes: 7 additions & 0 deletions pkg/config/config.go
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@ type Feed struct {
CoverArt string `toml:"cover_art"` CoverArt string `toml:"cover_art"`
// Only download episodes that match this regexp (defaults to matching anything) // Only download episodes that match this regexp (defaults to matching anything)
Filters Filters `toml:"filters"` Filters Filters `toml:"filters"`
// Clean is a cleanup policy to use for this feed
Clean Cleanup `toml:"clean"`
} }


type Tokens struct { type Tokens struct {
Expand All @@ -64,6 +66,11 @@ type Database struct {
Dir string `toml:"dir"` Dir string `toml:"dir"`
} }


type Cleanup struct {
// KeepLast defines how many episodes to keep
KeepLast int `toml:"keep_last"`
}

type Config struct { type Config struct {
// Server is the web server configuration // Server is the web server configuration
Server Server `toml:"server"` Server Server `toml:"server"`
Expand Down
4 changes: 4 additions & 0 deletions pkg/config/config_test.go
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ dir = "/home/user/db/"
update_period = "5h" update_period = "5h"
format = "audio" format = "audio"
quality = "low" quality = "low"
filters = { title = "regex for title here" }
clean = { keep_last = 10 }
` `


f, err := ioutil.TempFile("", "") f, err := ioutil.TempFile("", "")
Expand Down Expand Up @@ -62,6 +64,8 @@ dir = "/home/user/db/"
assert.EqualValues(t, Duration{5 * time.Hour}, feed.UpdatePeriod) assert.EqualValues(t, Duration{5 * time.Hour}, feed.UpdatePeriod)
assert.EqualValues(t, "audio", feed.Format) assert.EqualValues(t, "audio", feed.Format)
assert.EqualValues(t, "low", feed.Quality) assert.EqualValues(t, "low", feed.Quality)
assert.EqualValues(t, "regex for title here", feed.Filters.Title)
assert.EqualValues(t, 10, feed.Clean.KeepLast)
} }


func TestApplyDefaults(t *testing.T) { func TestApplyDefaults(t *testing.T) {
Expand Down

0 comments on commit 35dc604

Please sign in to comment.