From f46be130544fded68fba919772bc21a6d3113dde Mon Sep 17 00:00:00 2001 From: Gottfried Gaisbauer <gogo@servus.at> Date: Fri, 8 Mar 2019 07:38:57 +0100 Subject: [PATCH] separated fetching from calendarservice --- aura.py | 8 +- libraries/base/config.py | 7 - modules/scheduling/calendar.py | 244 ++----------------------- modules/scheduling/calender_fetcher.py | 243 ++++++++++++++++++++++++ 4 files changed, 260 insertions(+), 242 deletions(-) create mode 100644 modules/scheduling/calender_fetcher.py diff --git a/aura.py b/aura.py index 43e9b09f..9b8511d7 100755 --- a/aura.py +++ b/aura.py @@ -61,12 +61,12 @@ class Aura(AuraLogger): # self.controller = AuraController(self.config) # create scheduler and ls_communicator -# self.liquidsoapcommunicator = LiquidSoapCommunicator(self.config) -# self.scheduler = AuraScheduler(self.config) + self.liquidsoapcommunicator = LiquidSoapCommunicator(self.config) + self.scheduler = AuraScheduler(self.config) # give both a reference of each other -# self.liquidsoapcommunicator.scheduler = self.scheduler -# self.scheduler.liquidsoapcommunicator = self.liquidsoapcommunicator + self.liquidsoapcommunicator.scheduler = self.scheduler + self.scheduler.liquidsoapcommunicator = self.liquidsoapcommunicator # create the redis adapter # self.messenger = ServerRedisAdapter() diff --git a/libraries/base/config.py b/libraries/base/config.py index 8e7cd6fe..9076cbcf 100644 --- a/libraries/base/config.py +++ b/libraries/base/config.py @@ -26,9 +26,6 @@ from modules.base.config import ConfigReader class AuraConfig: - """ - AuraCommon handles logger, reads and stores config - """ config = None def __init__(self): @@ -37,8 +34,4 @@ class AuraConfig: self.read_config() def read_config(self): - """ - reads aura.ini - :return: - """ self.config.load_config() diff --git a/modules/scheduling/calendar.py b/modules/scheduling/calendar.py index 537de5d6..2ed4e174 100644 --- a/modules/scheduling/calendar.py +++ b/modules/scheduling/calendar.py @@ -37,24 +37,20 @@ from datetime import datetime, timedelta from libraries.database.broadcasts import Schedule, ScheduleEntry from libraries.enum.auraenumerations import ScheduleEntryType from modules.communication.redis.messenger import RedisMessenger +from modules.scheduling.calender_fetcher import CalendarFetcher class AuraCalendarService(threading.Thread): messenger = RedisMessenger() until = "" playlistdir = "" - xmlplaylist = range(0) - has_already_fetched = False queue = None config = None debug = False - _stop_event = None logger = None - url = dict() - data = dict() - # another crutch because of the missing TANK - used_random_playlist_ids = list() + _stop_event = None + calendar_fetcher = None """ Fetching playlist data, write it into the database and notify service @@ -74,8 +70,7 @@ class AuraCalendarService(threading.Thread): self._stop_event = threading.Event() - self.__set_url__("calendar") - self.__set_url__("importer") + self.calendar_fetcher = CalendarFetcher(config) # ------------------------------------------------------------------------------------------ # def set_date_from(self, date): @@ -125,25 +120,22 @@ class AuraCalendarService(threading.Thread): """ try: - # fetch upcoming schedules from STEERING - self.logger.debug("Fetching schedules from STEERING") - self.__fetch_schedule_data__() - # fetch playlist and fallbacks to the schedules from TANK - self.logger.debug("Fetching playlists from TANK") - self.__fetch_schedule_entry_data__() - - # drop everything what is more than 30 minutes in the future to avoid strange sync errors - # the programme is still in the memory of engine and reloaded, when this fetching is finished. + + self.fetched_schedule_data = self.calendar_fetcher.fetch() + + # drop everything what is in the future + # the programme is still in the memory of engine and is being reloaded, when this fetching cycle is finished. self.drop_the_future(timedelta(minutes=00)) schedule = None for schedule in self.fetched_schedule_data: + # skip schedule if no start or end is given if "start" not in schedule: - self.logger.warning("No start of schedule given. skipping the schedule: "+str(schedule)) + self.logger.warning("No start of schedule given. skipping schedule: "+str(schedule)) continue if "end" not in schedule: - self.logger.warning("No end of schedule given. skipping the schedule: "+str(schedule)) + self.logger.warning("No end of schedule given. skipping schedule: "+str(schedule)) continue # store the schedule @@ -156,7 +148,7 @@ class AuraCalendarService(threading.Thread): self.store_schedule_playlist(schedule_db, schedule, "station_fallback", 3) # release the mutex - self.queue.put(schedule) #"fetching_finished") + self.queue.put(schedule) except Exception as e: self.logger.critical("Exceptions raised while fetching new schedules and playlists: " + str(e)) self.queue.put("fetching_aborted " + str(e)) @@ -288,205 +280,6 @@ class AuraCalendarService(threading.Thread): sec2 = int(datetime.strptime(end[0:16].replace(" ","T"),"%Y-%m-%dT%H:%M").strftime("%s")); return (sec2 - sec1); - # ------------------------------------------------------------------------------------------ # - def __fetch_schedule_entry_data__(self): - # store fetched entries => do not have to fetch playlist_id more than once - fetched_entries=[] - - try: - for schedule in self.fetched_schedule_data: - # retrieve playlist and the fallbacks for every schedule - # if a playlist (like station_fallback) is already fetched, it is not fetched again but reused - schedule["playlist"] = self.__fetch_schedule_entries__(schedule, "playlist_id", fetched_entries) - schedule["schedule_fallback"] = self.__fetch_schedule_entries__(schedule, "schedule_fallback_id", fetched_entries) - schedule["show_fallback"] = self.__fetch_schedule_entries__(schedule, "show_fallback_id", fetched_entries) - schedule["station_fallback"] = self.__fetch_schedule_entries__(schedule, "station_fallback_id", fetched_entries) - - self.logger.info(str(schedule)) - - except Exception as e: - self.logger.error(str(e)) - - # ------------------------------------------------------------------------------------------ # - def __fetch_schedule_entries__(self, schedule, id_name, fetched_schedule_entries): - servicetype = "importer" - use_testdata = False - - # fetch data from importer - json_response = self.__fetch_data__(servicetype) - if not json_response: - use_testdata = True - - # if a playlist is already fetched, do not fetch it again - for entry in fetched_schedule_entries: - if entry["playlist_id"] == schedule[id_name]: - self.logger.debug("playlist #" + str(schedule[id_name]) + " already fetched") - return entry - - # generate testdata - if use_testdata: - json_response = self.create_test_data(id_name, schedule) - - # convert to list - try: - schedule_entries = simplejson.loads(json_response) - except Exception as e: - self.logger.critical("Cannot convert playlist from importer into list") - schedule_entries = list() - - if "entries" in schedule_entries: - for entry in schedule_entries["entries"]: - if entry["source"].startswith("file"): - e = entry["source"][7:] # filter file:// out - if not os.path.isfile(e): - self.logger.warning("File", e, "does not exist!") - - fetched_schedule_entries.append(schedule_entries) - - return schedule_entries - - def create_test_data(self, id_name, schedule): - import random - rand_id = random.randint(1, 10000) - - while rand_id in self.used_random_playlist_ids: - rand_id = random.randint(1, 10000) - - self.used_random_playlist_ids.append(rand_id) - - # HARDCODED Testdata - if id_name != "playlist_id": - # FALLBACK TESTDATA - - if rand_id % 3 == 0: # playlist fallback - json_response = '{"playlist_id":' + str( - rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"file:///var/audio/fallback/NightmaresOnWax/DJ-Kicks/02 - Only Child - Breakneck.flac"}]}' - elif rand_id % 2 == 0: # stream fallback - json_response = '{"playlist_id":' + str( - rand_id) + ',"entries":[{"source":"http://chill.out.airtime.pro:8000/chill_a"}]}' - else: # pool fallback - json_response = '{"playlist_id":' + str(rand_id) + ',"entries":[{"source":"pool:///liedermacherei"}]}' - - schedule[id_name] = rand_id - - elif schedule[id_name] == 0 or schedule[id_name] is None: - # this happens when playlist id is not filled out in pv - # json_response = '{"playlist_id": 0}' - - if rand_id % 4 == 0: # playlist with two files - json_response = '{"playlist_id":' + str( - rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"file:///var/audio/fallback/NightmaresOnWax/DJ-Kicks/02 - Only Child - Breakneck.flac"}]}' - elif rand_id % 3 == 0: # playlist with jingle and then linein - json_response = '{"playlist_id":' + str( - rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"linein://1"}]}' - elif rand_id % 2 == 0: # playlist with jingle and then http stream - json_response = '{"playlist_id":' + str( - rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"http://chill.out.airtime.pro:8000/chill_a"}]}' - else: # pool playlist - json_response = '{"playlist_id":' + str(rand_id) + ',"entries":[{"source":"pool:///hiphop"}]}' - - schedule[id_name] = rand_id - - elif schedule[id_name] % 4 == 0: # playlist with two files - json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"file:///var/audio/fallback/NightmaresOnWax/DJ-Kicks/01 - Type - Slow Process.flac"}]}' - elif schedule[id_name] % 3 == 0: # playlist with jingle and then http stream - json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"linein://0"}]}' - elif schedule[id_name] % 2 == 0: # playlist with jingle and then linein - json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"http://stream.fro.at:80/fro-128.ogg"}]}' - else: # pool playlist - json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"pool:///chillout"}]}' - - self.logger.info("Using 'randomized' playlist: " + json_response + " for " + id_name[:-3] + " for show " + schedule["show_name"] + " starting @ " + schedule["start"]) - - return json_response - - # ------------------------------------------------------------------------------------------ # - def __fetch_schedule_data__(self): - servicetype = "calendar" - use_testdata = False - - html_response = self.__fetch_data__(servicetype) - if not html_response or html_response == b"[]": - self.logger.debug("Got no response: Using testdata") - use_testdata = True - - # if an error occours => use testdata - if use_testdata: - html_response = '[{"schedule_id":1,"start":"' + (datetime.now() + timedelta(hours=0)).strftime('%Y-%m-%d %H:00:00') + '","end":"' + (datetime.now() + timedelta(hours=1)).strftime('%Y-%m-%d %H:00:00') + '","show_id":9,"show_name":"FROzine","show_hosts":"Sandra Hochholzer, Martina Schweiger","is_repetition":false,"playlist_id":2,"schedule_fallback_id":12,"show_fallback_id":92,"station_fallback_id":1,"rtr_category":"string","comment":"Kommentar","languages":"Sprachen","type":"Typ","category":"Kategorie","topic":"Topic","musicfocus":"Fokus"},{"schedule_id":2,"schedule_start":"' + (datetime.now()+timedelta(hours=1)).strftime('%Y-%m-%d %H:00:00') + '","schedule_end":"' + (datetime.now()+timedelta(hours=2)).strftime('%Y-%m-%d %H:00:00') + '","show_id":10,"show_name":"FROMat","show_hosts":"Sandra Hochholzer, Martina Schweiger","is_repetition":false,"playlist_id":4,"schedule_fallback_id":22,"show_fallback_id":102,"station_fallback_id":1,"rtr_category":"string","comment":"Kommentar","languages":"Sprachen","type":"Typ","category":"Kategorie","topic":"Topic","musicfocus":"Fokus"},{"schedule_id":3,"schedule_start":"' + (datetime.now()+timedelta(hours=2)).strftime('%Y-%m-%d %H:00:00') + '","schedule_end":"' + (datetime.now() + timedelta(hours=3)).strftime('%Y-%m-%d %H:00:00') + '","show_id":11,"show_name":"Radio für Senioren","show_hosts":"Sandra Hochholzer, Martina Schweiger","is_repetition":false,"playlist_id":6,"schedule_fallback_id":32,"show_fallback_id":112,"station_fallback_id":1,"rtr_category":"string","comment":"Kommentar","languages":"Sprachen","type":"Typ","category":"Kategorie","topic":"Topic","musicfocus":"Fokus"}]' - - try: - schedule_from_pv = simplejson.loads(html_response) - except Exception as e: - self.logger.critical("Cannot fetch schedule entries from PV") - sys.exit() - - # check data - self.logger.critical("Hardcoded Response && no JSON data checks. I believe what i get here") - - d = self.remove_data_more_than_24h_in_the_future(schedule_from_pv) - self.fetched_schedule_data = self.remove_data_in_the_past(d) - - return self.fetched_schedule_data - - # ------------------------------------------------------------------------------------------ # - def remove_data_more_than_24h_in_the_future(self, schedule_from_pv): - act_list = [] - now = datetime.now() - now_plus_24hours = now + timedelta(hours=24) - - for s in schedule_from_pv: - date_start = datetime.strptime(s["start"], "%Y-%m-%dT%H:%M:%S") - - # append only elements which are close enough to now - if date_start <= now_plus_24hours and date_start >= now - timedelta(hours=1): - act_list.append(s) - - return act_list - - # ------------------------------------------------------------------------------------------ # - def remove_data_in_the_past(self, schedule_from_pv): - act_list = [] - now = datetime.now() - - for index,curr in enumerate(schedule_from_pv[:-1]): - date_start = datetime.strptime(curr["start"], "%Y-%m-%dT%H:%M:%S") - date_next_start = datetime.strptime(schedule_from_pv[index+1]["start"], "%Y-%m-%dT%H:%M:%S") - - # append all elements in the future - if date_start >= now: - act_list.append(curr) - # append the one which is now playing - if date_start <= now and date_next_start >= now: - act_list.append(curr) - - return act_list - - # ------------------------------------------------------------------------------------------ # - def __fetch_data__(self, type): - # init html_response - html_response = "" - - # open an url and read the data - try: - if type not in self.data: - if self.url[type] == "": - return False - request = urllib.request.Request(self.url[type]) - else: - request = urllib.request.Request(self.url[type], self.data[type]) - - response = urllib.request.urlopen(request) - html_response = response.read() - - except (urllib.error.URLError, IOError, ValueError) as e: - self.logger.error("Cannot connect to " + self.url[type] + "! reason: " + str(e.reason)) - if not self.has_already_fetched: # first fetch - self.logger.critical("exiting fetch data thread..") - sys.exit() - - self.has_already_fetched = True - return html_response - # ------------------------------------------------------------------------------------------ # def get_length(self, entry): if entry is None or entry.type == ScheduleEntryType.STREAM or entry.type == ScheduleEntryType.LIVE_0 or entry.type == ScheduleEntryType.LIVE_1 or entry.type == ScheduleEntryType.LIVE_2 or entry.type == ScheduleEntryType.LIVE_3 or entry.type == ScheduleEntryType.LIVE_4: @@ -495,17 +288,6 @@ class AuraCalendarService(threading.Thread): audio_file = FLAC(entry.cleansource) return audio_file.info.length - # ------------------------------------------------------------------------------------------ # - def __set_url__(self, type): - url = self.config.get(type+"url") - pos = url.find("?") - - if pos > 0: - self.url[type] = url[0:pos] - self.data[type] = url[pos:] - else: - self.url[type] = url - # ------------------------------------------------------------------------------------------ # def stop(self): self._stop_event.set() diff --git a/modules/scheduling/calender_fetcher.py b/modules/scheduling/calender_fetcher.py new file mode 100644 index 00000000..afa2aede --- /dev/null +++ b/modules/scheduling/calender_fetcher.py @@ -0,0 +1,243 @@ +import os +import sys +import urllib +import logging +import simplejson + +from datetime import datetime, timedelta + +class CalendarFetcher: + url = dict() + data = dict() + config = None + logging = None + has_already_fetched = False + fetched_schedule_data = None + # another crutch because of the missing TANK + used_random_playlist_ids = list() + + def __init__(self, config): + self.config = config + self.logger = logging.getLogger("AuraEngine") + self.__set_url__("calendar") + self.__set_url__("importer") + + def fetch(self): + # fetch upcoming schedules from STEERING + self.logger.debug("Fetching schedules from STEERING") + self.__fetch_schedule_data__() + # fetch playlist and fallbacks to the schedules from TANK + self.logger.debug("Fetching playlists from TANK") + self.__fetch_schedule_entry_data__() + + return self.fetched_schedule_data + + # ------------------------------------------------------------------------------------------ # + def __set_url__(self, type): + url = self.config.get(type+"url") + pos = url.find("?") + + if pos > 0: + self.url[type] = url[0:pos] + self.data[type] = url[pos:] + else: + self.url[type] = url + + # ------------------------------------------------------------------------------------------ # + def __fetch_schedule_data__(self): + servicetype = "calendar" + + # fetch data from steering + html_response = self.__fetch_data__(servicetype) + + # use testdata if response fails or is empty + if not html_response or html_response == b"[]": + self.logger.debug("Got no response: Using testdata") + html_response = '[{"schedule_id":1,"start":"' + (datetime.now() + timedelta(hours=0)).strftime('%Y-%m-%d %H:00:00') + '","end":"' + (datetime.now() + timedelta(hours=1)).strftime('%Y-%m-%d %H:00:00') + '","show_id":9,"show_name":"FROzine","show_hosts":"Sandra Hochholzer, Martina Schweiger","is_repetition":false,"playlist_id":2,"schedule_fallback_id":12,"show_fallback_id":92,"station_fallback_id":1,"rtr_category":"string","comment":"Kommentar","languages":"Sprachen","type":"Typ","category":"Kategorie","topic":"Topic","musicfocus":"Fokus"},{"schedule_id":2,"schedule_start":"' + (datetime.now() + timedelta(hours=1)).strftime('%Y-%m-%d %H:00:00') + '","schedule_end":"' + (datetime.now() + timedelta(hours=2)).strftime('%Y-%m-%d %H:00:00') + '","show_id":10,"show_name":"FROMat","show_hosts":"Sandra Hochholzer, Martina Schweiger","is_repetition":false,"playlist_id":4,"schedule_fallback_id":22,"show_fallback_id":102,"station_fallback_id":1,"rtr_category":"string","comment":"Kommentar","languages":"Sprachen","type":"Typ","category":"Kategorie","topic":"Topic","musicfocus":"Fokus"},{"schedule_id":3,"schedule_start":"' + (datetime.now() + timedelta(hours=2)).strftime('%Y-%m-%d %H:00:00') + '","schedule_end":"' + (datetime.now() + timedelta(hours=3)).strftime('%Y-%m-%d %H:00:00') + '","show_id":11,"show_name":"Radio für Senioren","show_hosts":"Sandra Hochholzer, Martina Schweiger","is_repetition":false,"playlist_id":6,"schedule_fallback_id":32,"show_fallback_id":112,"station_fallback_id":1,"rtr_category":"string","comment":"Kommentar","languages":"Sprachen","type":"Typ","category":"Kategorie","topic":"Topic","musicfocus":"Fokus"}]' + + try: + schedule_from_pv = simplejson.loads(html_response) + except Exception as e: + self.logger.critical("Cannot fetch schedule entries from PV") + # sys.exit() + return None + + # check data + self.logger.critical("Hardcoded Response && no JSON data checks. I believe what i get here") + + d = self.remove_data_more_than_24h_in_the_future(schedule_from_pv) + self.fetched_schedule_data = self.remove_data_in_the_past(d) + + return self.fetched_schedule_data + + # ------------------------------------------------------------------------------------------ # + def __fetch_schedule_entry_data__(self): + # store fetched entries => do not have to fetch playlist_id more than once + fetched_entries=[] + + try: + for schedule in self.fetched_schedule_data: + # retrieve playlist and the fallbacks for every schedule + # if a playlist (like station_fallback) is already fetched, it is not fetched again but reused + schedule["playlist"] = self.__fetch_schedule_entries__(schedule, "playlist_id", fetched_entries) + schedule["schedule_fallback"] = self.__fetch_schedule_entries__(schedule, "schedule_fallback_id", fetched_entries) + schedule["show_fallback"] = self.__fetch_schedule_entries__(schedule, "show_fallback_id", fetched_entries) + schedule["station_fallback"] = self.__fetch_schedule_entries__(schedule, "station_fallback_id", fetched_entries) + + self.logger.info(str(schedule)) + + except Exception as e: + self.logger.error(str(e)) + + # ------------------------------------------------------------------------------------------ # + def __fetch_schedule_entries__(self, schedule, id_name, fetched_schedule_entries): + servicetype = "importer" + use_testdata = False + + # fetch data from importer + json_response = self.__fetch_data__(servicetype) + if not json_response: + use_testdata = True + + # if a playlist is already fetched, do not fetch it again + for entry in fetched_schedule_entries: + if entry["playlist_id"] == schedule[id_name]: + self.logger.debug("playlist #" + str(schedule[id_name]) + " already fetched") + return entry + + # generate testdata + if use_testdata: + json_response = self.create_test_data(id_name, schedule) + + # convert to list + try: + schedule_entries = simplejson.loads(json_response) + except Exception as e: + self.logger.critical("Cannot convert playlist from importer into list") + schedule_entries = list() + + if "entries" in schedule_entries: + for entry in schedule_entries["entries"]: + if entry["source"].startswith("file"): + e = entry["source"][7:] # filter file:// out + if not os.path.isfile(e): + self.logger.warning("File", e, "does not exist!") + + fetched_schedule_entries.append(schedule_entries) + + return schedule_entries + + # ------------------------------------------------------------------------------------------ # + def __fetch_data__(self, type): + # init html_response + html_response = "" + + # open an url and read the data + try: + if type not in self.data: + if self.url[type] == "": + return False + request = urllib.request.Request(self.url[type]) + else: + request = urllib.request.Request(self.url[type], self.data[type]) + + response = urllib.request.urlopen(request) + html_response = response.read() + + except (urllib.error.URLError, IOError, ValueError) as e: + self.logger.error("Cannot connect to " + self.url[type] + "! reason: " + str(e.reason)) + if not self.has_already_fetched: # first fetch + self.logger.critical("exiting fetch data thread..") + sys.exit() + + self.has_already_fetched = True + return html_response + + # ------------------------------------------------------------------------------------------ # + def remove_data_more_than_24h_in_the_future(self, schedule_from_pv): + act_list = [] + now = datetime.now() + now_plus_24hours = now + timedelta(hours=24) + + for s in schedule_from_pv: + date_start = datetime.strptime(s["start"], "%Y-%m-%dT%H:%M:%S") + + # append only elements which are close enough to now + if date_start <= now_plus_24hours and date_start >= now - timedelta(hours=1): + act_list.append(s) + + return act_list + + # ------------------------------------------------------------------------------------------ # + def remove_data_in_the_past(self, schedule_from_pv): + act_list = [] + now = datetime.now() + + for index,curr in enumerate(schedule_from_pv[:-1]): + date_start = datetime.strptime(curr["start"], "%Y-%m-%dT%H:%M:%S") + date_next_start = datetime.strptime(schedule_from_pv[index+1]["start"], "%Y-%m-%dT%H:%M:%S") + + # append all elements in the future + if date_start >= now: + act_list.append(curr) + # append the one which is now playing + if date_start <= now and date_next_start >= now: + act_list.append(curr) + + return act_list + + # ------------------------------------------------------------------------------------------ # + def create_test_data(self, id_name, schedule): + import random + rand_id = random.randint(1, 10000) + + while rand_id in self.used_random_playlist_ids: + rand_id = random.randint(1, 10000) + + self.used_random_playlist_ids.append(rand_id) + + # HARDCODED Testdata + if id_name != "playlist_id": + # FALLBACK TESTDATA + + if rand_id % 3 == 0: # playlist fallback + json_response = '{"playlist_id":' + str( + rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"file:///var/audio/fallback/NightmaresOnWax/DJ-Kicks/02 - Only Child - Breakneck.flac"}]}' + elif rand_id % 2 == 0: # stream fallback + json_response = '{"playlist_id":' + str( + rand_id) + ',"entries":[{"source":"http://chill.out.airtime.pro:8000/chill_a"}]}' + else: # pool fallback + json_response = '{"playlist_id":' + str(rand_id) + ',"entries":[{"source":"pool:///liedermacherei"}]}' + + schedule[id_name] = rand_id + + elif schedule[id_name] == 0 or schedule[id_name] is None: + # this happens when playlist id is not filled out in pv + # json_response = '{"playlist_id": 0}' + + if rand_id % 4 == 0: # playlist with two files + json_response = '{"playlist_id":' + str( + rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"file:///var/audio/fallback/NightmaresOnWax/DJ-Kicks/02 - Only Child - Breakneck.flac"}]}' + elif rand_id % 3 == 0: # playlist with jingle and then linein + json_response = '{"playlist_id":' + str( + rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"linein://1"}]}' + elif rand_id % 2 == 0: # playlist with jingle and then http stream + json_response = '{"playlist_id":' + str( + rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"http://chill.out.airtime.pro:8000/chill_a"}]}' + else: # pool playlist + json_response = '{"playlist_id":' + str(rand_id) + ',"entries":[{"source":"pool:///hiphop"}]}' + + schedule[id_name] = rand_id + + elif schedule[id_name] % 4 == 0: # playlist with two files + json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"file:///var/audio/fallback/NightmaresOnWax/DJ-Kicks/01 - Type - Slow Process.flac"}]}' + elif schedule[id_name] % 3 == 0: # playlist with jingle and then http stream + json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"linein://0"}]}' + elif schedule[id_name] % 2 == 0: # playlist with jingle and then linein + json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"http://stream.fro.at:80/fro-128.ogg"}]}' + else: # pool playlist + json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"pool:///chillout"}]}' + + self.logger.info("Using 'randomized' playlist: " + json_response + " for " + id_name[:-3] + " for show " + schedule["show_name"] + " starting @ " + schedule["start"]) + + return json_response \ No newline at end of file -- GitLab