/
main.go
171 lines (138 loc) · 4.63 KB
/
main.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
package monitor
import (
"database/sql"
"encoding/xml"
"fmt"
"io/ioutil"
"net/http"
"os"
"regexp"
"strconv"
"strings"
"time"
"github.com/go-co-op/gocron"
)
var orderby = "id"
var DB *sql.DB
var ConfigFile = ""
var ConfigDir = ""
func main() {
MonitorStart(ConfigFile)
}
func MonitorStart(configfile string) {
if len(configfile) == 0 {
homedir, err := os.UserHomeDir()
checkErr(err)
ConfigFile = homedir + ".config/ytmon/ytmon.json"
} else {
ConfigFile = configfile
}
go executeChannelMonitor()
go executeQueueMonitor()
}
func queueCheck() {
//if queue is running ignore signal
fmt.Println("Checking queue for new videos")
videos := getVideoForQueue()
videosUpdate := []string{}
for videos.Next() {
var video Video
err := videos.Scan(&video.id, &video.yt_videoid, &video.title, &video.description, &video.publisher, &video.publish_date, &video.watched)
if err != nil {
fmt.Println("No videos to queue")
} else {
chaninfo := getChanInfo2(video.publisher)
processedString := strings.Join(cleanText(video.title), " ")
savename := chaninfo.displayname + " " + dateConvertTrim(video.publish_date, 10) + " " + processedString
fmt.Println("downloading ", savename, " ", video.publish_date)
//download queued video and when complete update status
mkCrawljob(chaninfo.yt_channelid, video.title, video.yt_videoid, dateConvertTrim(video.publish_date, 10), 0)
//mark video downloaded
videosUpdate = append(videosUpdate, video.yt_videoid)
}
}
//no update videos downloaded
for i := 0; i < len(videosUpdate); i++ {
updateVideoStatus(videosUpdate[i])
}
}
func channelCheck() {
lastCheck := getLastCheck()
now := time.Now().Unix()
diff := now - int64(lastCheck)
channelQueue := []string{}
channelDisplaynames := []string{}
fmt.Println("last check ", diff, " seconds ago")
if diff >= 1800 { //this will stop us from getting banned because we kept restarting our program :) 1/2 hour but scans are normally 1 hour interval
//put results in a slice to free up the database because sqlite doesn't like to share
channels := getChannels("1", orderby, "asc")
for channels.Next() {
var channel Channel
err := channels.Scan(&channel.id, &channel.displayname, &channel.dldir, &channel.yt_channelid, &channel.lastpub, &channel.lastcheck, &channel.archive, &channel.notes, &channel.date_added, &channel.last_feed_count)
if err != nil {
fmt.Println("something went wrong with the channel scan")
}
channelQueue = append(channelQueue, channel.yt_channelid)
channelDisplaynames = append(channelDisplaynames, channel.displayname)
}
for i := 0; i < len(channelQueue); i++ {
fmt.Println("Checking ", channelDisplaynames[i], " for updates ", time.Now())
getChannelVideos(channelQueue[i])
time.Sleep(time.Second * 3) //wait three seconds between checks so as no to piss off youtube
}
} else {
fmt.Println("Queue locked defering channel check until later")
}
}
func cleanText(text string) []string {
words := regexp.MustCompile(`[\p{L}\d_]+`)
return words.FindAllString(text, -1)
}
func getChannelVideos(chanid string) {
youtubefeed := ytFeedURL + chanid
resp, err := http.Get(youtubefeed)
// // handle the error if there is one
if err != nil {
panic(err)
}
byteValue, err := ioutil.ReadAll(resp.Body)
if err != nil {
panic(err)
}
// we initialize our Users array
var feed Feed
// we unmarshal our byteArray which contains our
// xmlFiles content into 'users' which we defined above
xml.Unmarshal(byteValue, &feed)
resultCount := 0
if len(feed.Entries) >= 1 {
//db fields yt_videoid, title, description, publisher, publish_date(unix), watched(if added to download then 1 else 0)
for i := 0; i < len(feed.Entries); i++ {
//check and if exist do nothing
date := friendlyDate(feed.Entries[i].Published)
unixdate := convertYMDtoUnix(date)
exists := getVideoExist(feed.Entries[i].VideoId)
fmt.Println("insert video", feed.Entries[i].VideoId+" "+strconv.Itoa(exists))
if exists == 0 {
//insert into database with watched status 0 and begin queue check
fmt.Println("Inserting", feed.Entries[i].Title)
insertVideo(feed.Entries[i].VideoId, feed.Entries[i].Title, feed.Entries[i].Title, chanid, unixdate, "0")
i++
resultCount++
}
}
}
//update last check timestamp
}
func executeQueueMonitor() {
scheduler := gocron.NewScheduler(time.UTC)
scheduler.SingletonMode()
scheduler.Every(1).Minutes().Do(queueCheck)
scheduler.StartBlocking()
}
func executeChannelMonitor() {
//set download queue to true to keep the database from being locked
scheduler2 := gocron.NewScheduler(time.UTC)
scheduler2.Every(60).Minutes().Do(channelCheck)
scheduler2.StartBlocking()
}