Skip to content
Snippets Groups Projects
Commit 0d1f509c authored by David Trattnig's avatar David Trattnig
Browse files

Refactored calendar fetcher to work with latest Steering and Tank API.

parent 8e06b6b9
No related branches found
No related tags found
No related merge requests found
...@@ -35,7 +35,6 @@ calendarurl="http://localhost:8000/api/v1/playout" ...@@ -35,7 +35,6 @@ calendarurl="http://localhost:8000/api/v1/playout"
api_show_url="http://localhost:8000/api/v1/shows/${ID}/" api_show_url="http://localhost:8000/api/v1/shows/${ID}/"
# The URL to get playlist details via Tank # The URL to get playlist details via Tank
importerurl="http://localhost:8040/api/v1/shows/${SLUG}/playlists" importerurl="http://localhost:8040/api/v1/shows/${SLUG}/playlists"
# how often should the calendar be fetched in seconds (This determines the time of the last change before a specific show) # how often should the calendar be fetched in seconds (This determines the time of the last change before a specific show)
fetching_frequency=3600 fetching_frequency=3600
...@@ -58,7 +57,7 @@ daemongroup="david" ...@@ -58,7 +57,7 @@ daemongroup="david"
daemonuser="david" daemonuser="david"
[socket] [socket]
socketdir="/home/david/Code/aura/engine" socketdir="/home/david/code/aura/aura-engine/modules/liquidsoap"
[logging] [logging]
logdir="/var/log/aura" logdir="/var/log/aura"
......
...@@ -15,7 +15,7 @@ class CalendarFetcher: ...@@ -15,7 +15,7 @@ class CalendarFetcher:
logging = None logging = None
has_already_fetched = False has_already_fetched = False
fetched_schedule_data = None fetched_schedule_data = None
# another crutch because of the missing TANK # FIXME another crutch because of the missing TANK
used_random_playlist_ids = list() used_random_playlist_ids = list()
def __init__(self, config): def __init__(self, config):
...@@ -23,6 +23,7 @@ class CalendarFetcher: ...@@ -23,6 +23,7 @@ class CalendarFetcher:
self.logger = logging.getLogger("AuraEngine") self.logger = logging.getLogger("AuraEngine")
self.__set_url__("calendar") self.__set_url__("calendar")
self.__set_url__("importer") self.__set_url__("importer")
self.__set_url__("api_show_")
def fetch(self): def fetch(self):
# fetch upcoming schedules from STEERING # fetch upcoming schedules from STEERING
...@@ -91,45 +92,74 @@ class CalendarFetcher: ...@@ -91,45 +92,74 @@ class CalendarFetcher:
schedule = None schedule = None
# fetch data from steering # fetch data from steering
html_response = self.__fetch_data__(self.url["calendar"]) html_response = self.__fetch_data__(servicetype)
# response fails or is empty # FIXME move hardcoded test-data to separate testing logic.
if not html_response: # use testdata if response fails or is empty
self.logger.debug("Got no response from pv!") if not html_response or html_response == b"[]":
self.logger.critical("Got no response from Steering!")
#html_response = '[{"schedule_id":1,"start":"' + (datetime.now() + timedelta(hours=0)).strftime('%Y-%m-%d %H:00:00') + '","end":"' + (datetime.now() + timedelta(hours=1)).strftime('%Y-%m-%d %H:00:00') + '","show_id":9,"show_name":"FROzine","show_hosts":"Sandra Hochholzer, Martina Schweiger","is_repetition":false,"playlist_id":2,"schedule_fallback_id":12,"show_fallback_id":92,"station_fallback_id":1,"rtr_category":"string","comment":"Kommentar","languages":"Sprachen","type":"Typ","category":"Kategorie","topic":"Topic","musicfocus":"Fokus"},{"schedule_id":2,"schedule_start":"' + (datetime.now() + timedelta(hours=1)).strftime('%Y-%m-%d %H:00:00') + '","schedule_end":"' + (datetime.now() + timedelta(hours=2)).strftime('%Y-%m-%d %H:00:00') + '","show_id":10,"show_name":"FROMat","show_hosts":"Sandra Hochholzer, Martina Schweiger","is_repetition":false,"playlist_id":4,"schedule_fallback_id":22,"show_fallback_id":102,"station_fallback_id":1,"rtr_category":"string","comment":"Kommentar","languages":"Sprachen","type":"Typ","category":"Kategorie","topic":"Topic","musicfocus":"Fokus"},{"schedule_id":3,"schedule_start":"' + (datetime.now() + timedelta(hours=2)).strftime('%Y-%m-%d %H:00:00') + '","schedule_end":"' + (datetime.now() + timedelta(hours=3)).strftime('%Y-%m-%d %H:00:00') + '","show_id":11,"show_name":"Radio für Senioren","show_hosts":"Sandra Hochholzer, Martina Schweiger","is_repetition":false,"playlist_id":6,"schedule_fallback_id":32,"show_fallback_id":112,"station_fallback_id":1,"rtr_category":"string","comment":"Kommentar","languages":"Sprachen","type":"Typ","category":"Kategorie","topic":"Topic","musicfocus":"Fokus"}]'
# use testdata if wanted # use testdata if wanted
if self.config.get("use_test_data"): if self.config.get("use_test_data"):
# FIXME move hardcoded test-data to separate testing logic.
html_response = '[{"id":1,"schedule_id":1,"automation-id":1,"className":"TestData","memo":"TestData","show_fundingcategory":"TestData","start":"' + (datetime.now() + timedelta(hours=0)).strftime('%Y-%m-%dT%H:00:00') + '","end":"' + (datetime.now() + timedelta(hours=1)).strftime('%Y-%m-%dT%H:00:00') + '","show_id":9,"show_name":"TestData: FROzine","show_hosts":"TestData: Sandra Hochholzer, Martina Schweiger","title":"TestData:title","is_repetition":false,"playlist_id":2,"schedule_fallback_id":12,"show_fallback_id":92,"station_fallback_id":1,"rtr_category":"string","comment":"TestData: Kommentar","show_languages":"TestData: Sprachen","show_type":"TestData: Typ","show_categories":"TestData: Kategorie","show_topics":"TestData: Topic","show_musicfocus":"TestData: Fokus"},' \ html_response = '[{"id":1,"schedule_id":1,"automation-id":1,"className":"TestData","memo":"TestData","show_fundingcategory":"TestData","start":"' + (datetime.now() + timedelta(hours=0)).strftime('%Y-%m-%dT%H:00:00') + '","end":"' + (datetime.now() + timedelta(hours=1)).strftime('%Y-%m-%dT%H:00:00') + '","show_id":9,"show_name":"TestData: FROzine","show_hosts":"TestData: Sandra Hochholzer, Martina Schweiger","title":"TestData:title","is_repetition":false,"playlist_id":2,"schedule_fallback_id":12,"show_fallback_id":92,"station_fallback_id":1,"rtr_category":"string","comment":"TestData: Kommentar","show_languages":"TestData: Sprachen","show_type":"TestData: Typ","show_categories":"TestData: Kategorie","show_topics":"TestData: Topic","show_musicfocus":"TestData: Fokus"},' \
'{"id":2,"schedule_id":2,"automation-id":1,"className":"TestData","memo":"TestData","show_fundingcategory":"TestData","start":"' + (datetime.now() + timedelta(hours=1)).strftime('%Y-%m-%dT%H:00:00') + '","end":"' + (datetime.now() + timedelta(hours=2)).strftime('%Y-%m-%dT%H:00:00') + '","show_id":10,"show_name":"TestData: FROMat","show_hosts":"TestData: Sandra Hochholzer, Martina Schweiger","title":"TestData:title","is_repetition":false,"playlist_id":4,"schedule_fallback_id":22,"show_fallback_id":102,"station_fallback_id":1,"rtr_category":"TestData: string","comment":"TestData: Kommentar","show_languages":"TestData: Sprachen","show_type":"TestData: Typ","show_categories":"TestData: Kategorie","show_topics":"TestData: Topic","show_musicfocus":"TestData: Fokus"},' \ '{"id":2,"schedule_id":2,"automation-id":1,"className":"TestData","memo":"TestData","show_fundingcategory":"TestData","start":"' + (datetime.now() + timedelta(hours=1)).strftime('%Y-%m-%dT%H:00:00') + '","end":"' + (datetime.now() + timedelta(hours=2)).strftime('%Y-%m-%dT%H:00:00') + '","show_id":10,"show_name":"TestData: FROMat","show_hosts":"TestData: Sandra Hochholzer, Martina Schweiger","title":"TestData:title","is_repetition":false,"playlist_id":4,"schedule_fallback_id":22,"show_fallback_id":102,"station_fallback_id":1,"rtr_category":"TestData: string","comment":"TestData: Kommentar","show_languages":"TestData: Sprachen","show_type":"TestData: Typ","show_categories":"TestData: Kategorie","show_topics":"TestData: Topic","show_musicfocus":"TestData: Fokus"},' \
'{"id":3,"schedule_id":3,"automation-id":1,"className":"TestData","memo":"TestData","show_fundingcategory":"TestData","start":"' + (datetime.now() + timedelta(hours=2)).strftime('%Y-%m-%dT%H:00:00') + '","end":"' + (datetime.now() + timedelta(hours=3)).strftime('%Y-%m-%dT%H:00:00') + '","show_id":11,"show_name":"TestData: Radio für Senioren","show_hosts":"TestData: Sandra Hochholzer, Martina Schweiger","title":"TestData:title","is_repetition":false,"playlist_id":6,"schedule_fallback_id":32,"show_fallback_id":112,"station_fallback_id":1,"rtr_category":"TestData: string","comment":"TestData: Kommentar","show_languages":"TestData: Sprachen","show_type":"TestData: Typ","show_categories":"TestData: Kategorie","show_topics":"TestData: Topic","show_musicfocus":"TestData: Fokus"}]' '{"id":3,"schedule_id":3,"automation-id":1,"className":"TestData","memo":"TestData","show_fundingcategory":"TestData","start":"' + (datetime.now() + timedelta(hours=2)).strftime('%Y-%m-%dT%H:00:00') + '","end":"' + (datetime.now() + timedelta(hours=3)).strftime('%Y-%m-%dT%H:00:00') + '","show_id":11,"show_name":"TestData: Radio für Senioren","show_hosts":"TestData: Sandra Hochholzer, Martina Schweiger","title":"TestData:title","is_repetition":false,"playlist_id":6,"schedule_fallback_id":32,"show_fallback_id":112,"station_fallback_id":1,"rtr_category":"TestData: string","comment":"TestData: Kommentar","show_languages":"TestData: Sprachen","show_type":"TestData: Typ","show_categories":"TestData: Kategorie","show_topics":"TestData: Topic","show_musicfocus":"TestData: Fokus"}]'
self.logger.critical("Hardcoded Response") self.logger.critical("Using hardcoded Response!")
else: else:
html_response = "{}" html_response = "{}"
# convert to dict # convert to dict
schedule = simplejson.loads(html_response) schedule = simplejson.loads(html_response)
# check data # check data
self.logger.critical("no JSON data checks. I believe what i get here") self.logger.critical("no JSON data checks. I believe what i get here")
self.fetched_schedule_data = self.remove_unnecessary_data(schedule) #self.fetched_schedule_data = self.remove_unnecessary_data(schedule)
return self.remove_unnecessary_data(schedule_from_pv)
# ------------------------------------------------------------------------------------------ # # ------------------------------------------------------------------------------------------ #
def __fetch_schedule_playlists__(self): def __fetch_schedule_playlists__(self):
# store fetched entries => do not have to fetch playlist_id more than once # store fetched entries => do not have to fetch playlist_id more than once
fetched_entries=[] fetched_entries=[]
self.logger.warning("only fetching normal playlists. no fallbacks") try:
for schedule in self.fetched_schedule_data: self.logger.warning("only fetching normal playlists. no fallbacks")
# retrieve playlist and the fallbacks for every schedule for schedule in self.fetched_schedule_data:
# if a playlist (like station_fallback) is already fetched, it is not fetched again but reused
schedule["playlist"] = self.__fetch_schedule_playlist__(schedule, "playlist_id", fetched_entries) # Enhance schedule with details of show (e.g. slug)
# schedule["schedule_fallback"] = self.__fetch_schedule_playlist__(schedule, "schedule_fallback_id", fetched_entries) schedule = self.__fetch_show_details__(schedule)
# schedule["show_fallback"] = self.__fetch_schedule_playlist__(schedule, "show_fallback_id", fetched_entries) # retrieve playlist and the fallbacks for every schedule
# schedule["station_fallback"] = self.__fetch_schedule_playlist__(schedule, "station_fallback_id", fetched_entries) # if a playlist (like station_fallback) is already fetched, it is not fetched again but reused
schedule["playlist"] = self.__fetch_schedule_playlist__(schedule, "playlist_id", fetched_entries)
#schedule["schedule_fallback"] = self.__fetch_schedule_playlist__(schedule, "schedule_fallback_id", fetched_entries)
#schedule["show_fallback"] = self.__fetch_schedule_playlist__(schedule, "show_fallback_id", fetched_entries)
#schedule["station_fallback"] = self.__fetch_schedule_playlist__(schedule, "station_fallback_id", fetched_entries)
self.logger.info(str(schedule))
except Exception as e:
self.logger.error("Error: "+str(e))
# ------------------------------------------------------------------------------------------ #
def __fetch_show_details__(self, schedule):
servicetype = "api_show_"
json_response = self.__fetch_data__(servicetype, "${ID}", str(schedule["show_id"]))
show_details = simplejson.loads(json_response)
# Augment "schedules" with details of "show"
schedule["show_slug"] = show_details["slug"]
### ... add more properties here, if needed ... ###
return schedule
# ------------------------------------------------------------------------------------------ # # ------------------------------------------------------------------------------------------ #
def __fetch_schedule_playlist__(self, schedule, id_name, fetched_schedule_entries): def __fetch_schedule_playlist__(self, schedule, id_name, fetched_schedule_entries):
servicetype = "importer"
# set playlist_id (in testenvironment always null => no idea) # set playlist_id (in testenvironment always null => no idea)
if id_name not in schedule or schedule[id_name] is None: if id_name not in schedule or schedule[id_name] is None:
playlist_id = 1 playlist_id = 1
...@@ -138,13 +168,28 @@ class CalendarFetcher: ...@@ -138,13 +168,28 @@ class CalendarFetcher:
# set url # set url
#url = self.url["importer"] + schedule["show_name"] + "/playlists/" + str(playlist_id) #url = self.url["importer"] + schedule["show_name"] + "/playlists/" + str(playlist_id)
url = self.url["importer"] + "public" + "/playlists/" + str(playlist_id) #url = self.url["importer"] + "public" + "/playlists/" + str(playlist_id)
# fetch playlists from TANK
if not "show_slug" in schedule:
raise ValueError("Missing 'show_slug' for schedule", schedule)
# fetch data from importer slug = str(schedule["show_slug"])
json_response = self.__fetch_data__(url) json_response = self.__fetch_data__(servicetype, "${SLUG}", slug)
# use testdata if wanted # use testdata if wanted
if not json_response and self.config.get("use_test_data"): if not json_response and self.config.get("use_test_data"):
self.logger.warn("Using test-data for fetch-schedule-playlist")
use_testdata = True
# if a playlist is already fetched, do not fetch it again
for entry in fetched_schedule_entries:
if entry["playlist_id"] == schedule[id_name]:
self.logger.debug("playlist #" + str(schedule[id_name]) + " already fetched")
return entry
if use_testdata:
# FIXME move hardcoded test-data to separate testing logic.
json_response = self.create_test_data(id_name, schedule) json_response = self.create_test_data(id_name, schedule)
# convert to list # convert to list
...@@ -167,27 +212,48 @@ class CalendarFetcher: ...@@ -167,27 +212,48 @@ class CalendarFetcher:
return e return e
# ------------------------------------------------------------------------------------------ # # ------------------------------------------------------------------------------------------ #
def __fetch_data__(self, url, parameter = ""): def __fetch_data__(self, type, placeholder=None, value=None):
# init html_response # Init html_response
html_response = b'' html_response = b''
url = self.__build_url__(type, placeholder, value)
# open an url and read the data # Send request to the API and read the data
if parameter == "": try:
request = urllib.request.Request(url) if type not in self.data:
else: if self.url[type] == "":
request = urllib.request.Request(url, parameter) return False
request = urllib.request.Request(url)
else:
request = urllib.request.Request(url, self.data[type])
response = urllib.request.urlopen(request)
html_response = response.read()
response = urllib.request.urlopen(request) except (urllib.error.URLError, IOError, ValueError) as e:
html_response = response.read() self.logger.error("Cannot connect to " + self.url[type] +
" (type: " + type + ")! Reason: " + str(e.reason))
#if not self.has_already_fetched: # first fetch
# self.logger.critical("exiting fetch data thread..")
# sys.exit()
self.has_already_fetched = True self.has_already_fetched = True
return html_response.decode("utf-8") return html_response.decode("utf-8")
# ------------------------------------------------------------------------------------------ #
def __build_url__(self, type, placeholder=None, value=None):
url = self.url[type]
if placeholder:
url = url.replace(placeholder, value)
# print("built URL: "+url)
return url
# ------------------------------------------------------------------------------------------ # # ------------------------------------------------------------------------------------------ #
def remove_unnecessary_data(self, schedule): def remove_unnecessary_data(self, schedule):
reduced_schedule = self.remove_data_more_than_24h_in_the_future(self.remove_data_in_the_past(schedule)) count_before = len(schedule)
return reduced_schedule schedule = self.remove_data_more_than_24h_in_the_future(schedule)
schedule = self.remove_data_in_the_past(schedule)
count_after = len(schedule)
self.logger.info("Removed %d unnecessary schedules from response." % (count_before - count_after))
return schedule
# ------------------------------------------------------------------------------------------ # # ------------------------------------------------------------------------------------------ #
def remove_data_more_than_24h_in_the_future(self, schedule_from_pv): def remove_data_more_than_24h_in_the_future(self, schedule_from_pv):
act_list = [] act_list = []
...@@ -231,6 +297,7 @@ class CalendarFetcher: ...@@ -231,6 +297,7 @@ class CalendarFetcher:
self.used_random_playlist_ids.append(rand_id) self.used_random_playlist_ids.append(rand_id)
# FIXME move hardcoded test-data to separate testing logic.
# HARDCODED Testdata # HARDCODED Testdata
if id_name != "playlist_id": if id_name != "playlist_id":
# FALLBACK TESTDATA # FALLBACK TESTDATA
...@@ -275,4 +342,4 @@ class CalendarFetcher: ...@@ -275,4 +342,4 @@ class CalendarFetcher:
self.logger.info("Using 'randomized' playlist: " + json_response + " for " + id_name[:-3] + " for show " + schedule["show_name"] + " starting @ " + schedule["start"]) self.logger.info("Using 'randomized' playlist: " + json_response + " for " + id_name[:-3] + " for show " + schedule["show_name"] + " starting @ " + schedule["start"])
return json_response return json_response
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment