Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Greg daemon mode #12

Closed
wants to merge 8 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
117 changes: 117 additions & 0 deletions greg/daemon.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
# Copyright (C) 2013 Pierre Marijon <pierre@marion.fr>
#
# This file is part of Greg.
#
# Greg is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Greg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Greg. If not, see <http://www.gnu.org/licenses/>.

import os
import sys
import configparser
from datetime import datetime
from time import sleep
import daemonic

from greg.greg import *

class Daemon():
def __init__(self, args, time=0, user=None, log="/var/log/greg/", pid='/var/run/greg/greg.pid'):
self.daemon_user = user
self.pidfile_path = pid
self.log_dir = log
self.sleep_time = time
self.session = Session(args)
self.feeds_time = configparser.ConfigParser()
self.feeds_time_path = os.path.join(self.session.data_dir, "feed_time")
self.feeds_time.read(self.feeds_time_path)
self.date_format_write = "%Y-%m-%d %H:%M:%S%z"
self.date_format_podcast = "%a, %d %b %Y %H:%M:%S %z"

def start(self):
out = open(os.path.join(self.log_dir, "message.log"), "a")
err = open(os.path.join(self.log_dir, "error.log"), "a")
d = daemonic.daemon(pidfile=self.pidfile_path, stdout=out, stderr=err, user=self.daemon_user)

with d:
while True :
self.__daemon_work()
sleep(self.sleep_time)

def stop(self):
d = daemonic.daemon(pidfile=self.pidfile_path)
d.stop()

def __daemon_work(self):
print("Check podcast "+datetime.now().strftime(self.date_format_podcast)+".")
# Set data and conf file after change user
self.session.config_filename_user = self.session.retrieve_config_file()
self.session.data_dir = self.session.retrieve_data_directory()
self.session.data_filename = os.path.join(self.session.data_dir, "data")
self.session.feeds.read(self.session.data_filename)
self.session.config.read([config_filename_global, self.session.config_filename_user])
self.feeds_time_path = os.path.join(self.session.data_dir, "feed_time")
self.feeds_time.read(self.feeds_time_path)

# For each feed check if update
for feed_name in self.session.list_feeds():
print("Check "+feed_name+" feed")

# Find date of last podcast in remote and local
podcast = create_podcast(self.session.feeds[feed_name]["url"])

remote_date_str = re.sub("(.*\s\+\d\d):(\d\d)$", r"\1\2", podcast.entries[0]["published"])
remote_date = datetime.strptime(remote_date_str, self.date_format_podcast)

# Test if is the first check of this feed
if feed_name not in self.feeds_time.sections():
self.__download_podcast(feed_name, podcast, remote_date)

local_date_str = re.sub("(.*\+\d\d):(\d\d)$", r"\1\2", self.feeds_time[feed_name]["date"])
local_date = datetime.strptime(local_date_str, self.date_format_write)

# If remote_date up local_date why need download the last podcast
if remote_date > local_date:
self.__download_podcast(feed_name, podcast)
else:
print(feed_name+" podcast is uptodate.")

def __download_podcast(self, feed_name, podcast, remote_date):
# Download podcast
load_feed = Feed(self.session, feed_name, podcast)
load_feed.info = []
load_feed.entrylinks = []
load_feed.linkdate = list(time.localtime())
download_entry(load_feed, podcast.entries[0])

# Set time value in local storage
entry = {}
entry["date"] = remote_date
self.feeds_time[feed_name] = entry
with open(self.feeds_time_path, 'w') as configfile:
self.feeds_time.write(configfile)

def main(args) :
# Create daemon object
if args["command"] == "start":
if not(args["time"]):
sys.exit("You dont set the time betowen tow check.")
elif not(args["user"]):
sys.exit("You dont set the user of worker processe.")
else:
daemon = Daemon(args, time=args["time"], user=args["user"], log=args["log_dir"], pid=args["pid_file"])
daemon.start()
elif args["command"] == "stop":
daemon = Daemon(args, log=args["log_dir"], pid=args["pid_file"])
daemon.stop()
else:
sys.exit("You need specifie start or stop.")
21 changes: 13 additions & 8 deletions greg/greg.py
Original file line number Diff line number Diff line change
Expand Up @@ -625,14 +625,8 @@ def check(args):
name = args["feed"]
except KeyError:
sys.exit("You don't appear to have a feed with that name.")
try:
podcast = feedparser.parse(url)
wentwrong = "urlopen" in str(podcast["bozo_exception"])
except KeyError:
wentwrong = False
if wentwrong:
sys.exit("I cannot check that podcast now.\
You are probably not connected to the internet.")

podcast = create_podcast(url)
for entry in enumerate(podcast.entries):
listentry = list(entry)
print(listentry[0], end=": ")
Expand All @@ -650,6 +644,17 @@ def check(args):
dump = [name, podcast]
pickle.dump(dump, dumpfile)

def create_podcast(url):
try:
podcast = feedparser.parse(url)
wentwrong = "urlopen" in str(podcast["bozo_exception"])
except KeyError:
wentwrong = False
if wentwrong:
sys.exit("I cannot check that podcast now.\
You are probably not connected to the internet.")
else:
return podcast

def download(args):
session = Session(args)
Expand Down
10 changes: 10 additions & 0 deletions greg/gregparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from urllib.parse import urlparse

import greg.greg
import greg.daemon

# defining the from_date type
def from_date(string):
Expand Down Expand Up @@ -97,6 +98,15 @@ def url(string):
parser_remove.add_argument('name', help='the name of the feed you want to remove')
parser_remove.set_defaults(func=greg.greg.remove)

# create the parser for the "remove" command
parser_daemon = subparsers.add_parser('daemon', help='run greg in daemon')
parser_daemon.add_argument('command', help='greg daemon command [start|stop]', action='store')
parser_daemon.add_argument('-t', '--time', type=int, help='the time after recheck the new podcast')
parser_daemon.add_argument('-u', '--user', help='the user we run the daemon')
parser_daemon.add_argument('-l', '--log-dir', help='the dir where write log', default="/var/log/greg/")
parser_daemon.add_argument('-p', '--pid-file', help='the file where write pid', default='/var/run/greg/greg.pid')
parser_daemon.set_defaults(func=greg.daemon.main)

def main(): # parse the args and call whatever function was selected
try:
args = parser.parse_args(sys.argv[1:])
Expand Down
4 changes: 3 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
url='https://github.com/manolomartinez/greg',
packages=['greg'],
scripts=['bin/greg'],
data_files=[('/etc',['data/greg.conf'])],
data_files=[('/etc',['data/greg.conf']),
('/var/run/greg/',[]),
('/var/log/greg/',[])],
license = 'GPLv3'
)