Commit 0e6f9f9c authored by Gottfried Gaisbauer's avatar Gottfried Gaisbauer
Browse files

fixed annoying bugs like duplicate database entries, fading mistakes and so on

parent e9acfa56
......@@ -120,12 +120,39 @@ class Schedule(DB.Model, AuraDatabaseModel):
entry = DB.session.query(Schedule).filter(Schedule.schedule_id == id).first()
return entry
@staticmethod
def select_act_programme():
#DB.session.query(Schedule).filter
# fetching all from today to ..
today = datetime.date.today()
all_entries = DB.session.query(Schedule).filter(Schedule.schedule_start >= today).order_by(Schedule.schedule_start).all()
return all_entries
@staticmethod
def select_show_on_datetime(datetime):
return DB.session.query(Schedule).filter(Schedule.schedule_start == datetime).first()
@staticmethod
def drop_the_future(timedelta):
then = datetime.datetime.now() + timedelta
# is this really necessary?
future_entries = DB.session.query(Schedule).filter(Schedule.schedule_start > then)
for e in future_entries:
e.delete()
DB.session.commit()
def get_length(self):
sec1 = int(datetime.datetime.strptime(self.start[0:16].replace(" ", "T"), "%Y-%m-%dT%H:%M").strftime("%s"))
sec2 = int(datetime.datetime.strptime(self.end[0:16].replace(" ", "T"), "%Y-%m-%dT%H:%M").strftime("%s"))
len = sec2 - sec1
return len
# ------------------------------------------------------------------------------------------ #
def __str__(self):
return "ScheduleID: #" + str(self.schedule_id) + " Showname: " + self.show_name + " starts @ " + str(self.schedule_start)
# ------------------------------------------------------------------------------------------ #
class ScheduleEntry(DB.Model, AuraDatabaseModel):
......@@ -148,8 +175,11 @@ class ScheduleEntry(DB.Model, AuraDatabaseModel):
entry_start_unix = 0
programme_index = -1
type = None
fadeintimer = None
fadeouttimer = None
switchtimer = None
schedule = relationship("Schedule", foreign_keys=[schedule_id], lazy="joined")
# schedule = relationship("Schedule", foreign_keys=[schedule_id], lazy="joined")
# normal constructor
def __init__(self, **kwargs):
......@@ -165,6 +195,9 @@ class ScheduleEntry(DB.Model, AuraDatabaseModel):
self.set_entry_type()
def define_clean_source(self):
if self.source is None:
return None
if self.source.startswith("http"):
self.cleanprotocol = self.source[:7]
self.cleansource = self.source
......@@ -262,10 +295,26 @@ class ScheduleEntry(DB.Model, AuraDatabaseModel):
def select_one(playlist_id, entry_num):
return DB.session.query(ScheduleEntry).filter(ScheduleEntry.playlist_id == playlist_id, ScheduleEntry.entry_num == entry_num).first()
# ------------------------------------------------------------------------------------------ #
@staticmethod
def select_one_playlist_entry_for_show(schedule_id, playlist_type, entry_num):
return DB.session.query(ScheduleEntry).filter(ScheduleEntry.schedule_id == schedule_id, ScheduleEntry.fallback_type == playlist_type, ScheduleEntry.entry_num == entry_num).first()
# ------------------------------------------------------------------------------------------ #
@staticmethod
def select_playlist(playlist_id):
return DB.session.query(ScheduleEntry).filter(ScheduleEntry.playlist_id == playlist_id).all()
return DB.session.query(ScheduleEntry).filter(ScheduleEntry.playlist_id == playlist_id).order_by(ScheduleEntry.entry_start).all()
@staticmethod
def drop_the_future(timedelta):
then = datetime.datetime.now() + timedelta
#DB.session.delete(ScheduleEntry).filter(ScheduleEntry.entry_start >= then)
# is this really necessary?
future_entries = DB.session.query(ScheduleEntry).filter(ScheduleEntry.entry_start > then)
for e in future_entries:
e.delete()
DB.session.commit()
def getChannel(self):
if self.type == self.type.FILESYSTEM:
......@@ -280,7 +329,7 @@ class ScheduleEntry(DB.Model, AuraDatabaseModel):
# ------------------------------------------------------------------------------------------ #
def __str__(self):
return "ScheduleID: #" + str(self.schedule_id) + " Showname: " + self.schedule.show_name + " starts @ " + str(self.entry_start) + " and plays " + self.source
return "Showentry starts @ " + str(self.entry_start) + " and plays " + self.source
# ------------------------------------------------------------------------------------------ #
......
......@@ -51,6 +51,7 @@ class RedisChannel(Enum):
FNP_REPLY = "fetch_new_programme_reply"
GAP_REPLY = "get_act_programme_reply"
GCS_REPLY = "get_connection_status_reply"
GNF_REPLY = "get_next_file_reply"
IPE_REPLY = "insert_playlist_entry_reply"
IP_REPLY = "init_player_reply"
MPE_REPLY = "move_playlist_entry_reply"
......@@ -67,4 +68,17 @@ class ScheduleEntryType(Enum):
LIVE_1 = "aura_linein_1"
LIVE_2 = "aura_linein_2"
LIVE_3 = "aura_linein_3"
LIVE_4 = "aura_linein_4"
\ No newline at end of file
LIVE_4 = "aura_linein_4"
class FallbackType(Enum):
SHOW = "show" # the first played when the show playlist fails
STATION = "station" # the last played when everything else fails
TIMESLOT = "timeslot" # the second played when show fallback fails
class TimerType(Enum):
SWITCH = "switch"
FADEIN = "fadein"
FADEOUT = "fadeout"
......@@ -168,12 +168,15 @@ class Padavan:
# ------------------------------------------------------------------------------------------ #
def print_programme(self, programme):
for entry in programme:
self.stringreply += "idx: " + str(entry["programme_index"]) + \
" --- schedule id #" + str(entry["schedule_id"]) + "." + str(entry["entry_num"]) + \
" - show: " + entry["schedule"]["show_name"] + \
" - starts @ " + entry["entry_start"] + \
" - plays " + str(entry["source"]) + "\n"
cnt = 1
for show in programme:
for entry in show["playlist"]:
self.stringreply += str(cnt) + \
" --- schedule id #" + str(show["schedule_id"]) + "." + str(entry["entry_num"]) + \
" - show: " + show["show_name"] + \
" - starts @ " + entry["entry_start"] + \
" - plays " + str(entry["source"]) + "\n"
cnt = cnt + 1
# ------------------------------------------------------------------------------------------ #
def print_connection_status(self, connection_status):
......@@ -297,21 +300,22 @@ class Padavan:
# ------------------------------------------------------------------------------------------ #
def get_next_file(self, type):
redis = RedisMessenger()
# redis = RedisMessenger()
# next_file = redis.get_next_file_for(type)
# if next_file == "":
# next_file = "/var/audio/blank.flac"
# self.stringreply = next_file
next_file = redis.get_next_file_for(type)
if next_file == "":
next_file = "/var/audio/blank.flac"
#self.send_redis("aura", "set_next_file " + type)
next_file = self.send_and_wait_redis("aura", "get_next_file " + type, RedisChannel.GNF_REPLY)
self.stringreply = next_file
self.send_redis("aura", "set_next_file " + type)
# ------------------------------------------------------------------------------------------ #
def set_next_file(self, type, file):
from modules.communication.redis.messenger import RedisMessenger
redis = RedisMessenger()
#from modules.communication.redis.messenger import RedisMessenger
#redis = RedisMessenger()
redis.set_next_file_for(type, file)
#redis.set_next_file_for(type, file)
self.send_redis("aura", "set_next_file " + type + " " + file)
self.stringreply = "Set "+file+" for fallback '"+type+"'"
......@@ -42,7 +42,6 @@ from libraries.exceptions.exception_logger import ExceptionLogger
"""
class LiquidSoapCommunicator(ExceptionLogger):
# lqcr = None
client = None
logger = None
transaction = 0
......@@ -54,6 +53,8 @@ class LiquidSoapCommunicator(ExceptionLogger):
connection_attempts = 0
active_channel = None
disable_logging = False
fade_in_active = False
fade_out_active = False
# ------------------------------------------------------------------------------------------ #
def __init__(self, config):
......@@ -123,7 +124,7 @@ class LiquidSoapCommunicator(ExceptionLogger):
gets active channel from programme
:return:
"""
active_entry = self.scheduler.get_active_entry()
(show, active_entry) = self.scheduler.get_active_entry()
if active_entry is None:
return ""
return active_entry.type
......@@ -224,42 +225,69 @@ class LiquidSoapCommunicator(ExceptionLogger):
self.__send_lqc_command__(self.client, "recorder", str(num), "start")
# ------------------------------------------------------------------------------------------ #
def fade_in(self, new_entry, seconds):
target_volume = new_entry.volume
step = seconds / target_volume
def fade_in(self, new_entry):
try:
fade_in_time = float(self.config.get("fade_in_time"))
if fade_in_time > 0:
self.fade_in_active = True
target_volume = new_entry.volume
step = fade_in_time / target_volume
self.logger.info("Starting to fading " + new_entry.type.value + " in. step is " + str(step) + "s. target volume is " + str(target_volume))
self.logger.info("Starting to fading " + new_entry.type.value + " in. step is " + str(step) + "s. target volume is " + str(target_volume))
self.disable_logging = True
for i in range(target_volume):
self.channel_volume(new_entry.type.value, i + 1)
time.sleep(step)
self.disable_logging = False
self.disable_logging = True
self.client.disable_logging = True
self.logger.info("Finished with fading " + new_entry.type.value + " in.")
for i in range(target_volume):
self.channel_volume(new_entry.type.value, i + 1)
time.sleep(step)
self.logger.info("Finished with fading " + new_entry.type.value + " in.")
self.fade_in_active = False
if not self.fade_out_active:
self.disable_logging = False
self.client.disable_logging = False
except LQConnectionError as e:
self.logger.critical(str(e))
return True
# ------------------------------------------------------------------------------------------ #
def fade_out(self, old_entry, seconds):
step = abs(seconds) / old_entry.volume
def fade_out(self, old_entry):
try:
fade_out_time = float(self.config.get("fade_out_time"))
if fade_out_time > 0:
step = abs(fade_out_time) / old_entry.volume
self.logger.info("Starting to fading " + old_entry.type.value + " out. step is " + str(step) + "s")
self.logger.info("Starting to fading " + old_entry.type.value + " out. step is " + str(step) + "s")
# disable logging... it is going to be enabled again after fading in
self.disable_logging = True
for i in range(old_entry.volume):
self.channel_volume(old_entry.type.value, old_entry.volume-i-1)
time.sleep(step)
# disable logging... it is going to be enabled again after fadein and -out is finished
self.disable_logging = True
self.client.disable_logging = True
self.logger.info("Finished with fading " + old_entry.type.value + " out.")
for i in range(old_entry.volume):
self.channel_volume(old_entry.type.value, old_entry.volume-i-1)
time.sleep(step)
self.logger.info("Finished with fading " + old_entry.type.value + " out.")
# enable logging again
self.fade_out_active = False
if not self.fade_in_active:
self.disable_logging = False
self.client.disable_logging = False
except LQConnectionError as e:
self.logger.critical(str(e))
return True
# ------------------------------------------------------------------------------------------ #
def activate(self, new_entry):
# grab the actual active entry
old_entry = self.scheduler.get_active_entry()
(show, old_entry) = self.scheduler.get_active_entry()
# determine its type
old_type = old_entry.type
......@@ -365,10 +393,11 @@ class LiquidSoapCommunicator(ExceptionLogger):
# ------------------------------------------------------------------------------------------ #
def init_player(self):
t = LiquidSoapInitThread()
t.liquidsoapcommunicator = self
t.active_entry = self.scheduler.get_active_entry()
(_, active_entry) = self.scheduler.get_active_entry()
t = LiquidSoapInitThread(self, active_entry)
t.start()
return "LiquidSoapInitThread started!"
# ------------------------------------------------------------------------------------------ #
......@@ -492,8 +521,6 @@ class LiquidSoapCommunicator(ExceptionLogger):
self.logger.info("LiquidSoapCommunicator is calling " + str(namespace) + str(args))
else:
self.logger.info("LiquidSoapCommunicator is calling " + str(namespace) + "." + str(command) + str(args))
else:
lqs_instance.disable_logging = True
# call wanted function ...
func = getattr(lqs_instance, namespace)
......@@ -502,8 +529,6 @@ class LiquidSoapCommunicator(ExceptionLogger):
if not self.disable_logging:
self.logger.debug("LiquidSoapCommunicator got response " + str(result))
else:
lqs_instance.disable_logging = False
self.connection_attempts = 0
......
......@@ -32,14 +32,15 @@ from libraries.enum.auraenumerations import ScheduleEntryType
class LiquidSoapInitThread(threading.Thread):
logger = None
socket = None
active_entry = None
liquidsoapcommunicator = None
# ------------------------------------------------------------------------------------------ #
def __init__(self):
def __init__(self, liquidsoapcommunicator, active_entry):
threading.Thread.__init__(self)
self.logger = logging.getLogger("AuraEngine")
self.liquidsoapcommunicator = liquidsoapcommunicator
self.active_entry = active_entry
# ------------------------------------------------------------------------------------------ #
def run(self):
......
......@@ -35,7 +35,7 @@ from modules.communication.redis.messenger import RedisMessenger
from modules.communication.connection_tester import ConnectionTester
from libraries.database.statestore import RedisStateStore
from libraries.exceptions.auraexceptions import RedisConnectionException
from libraries.enum.auraenumerations import RedisChannel, TerminalColors
from libraries.enum.auraenumerations import RedisChannel, TerminalColors, FallbackType
# ------------------------------------------------------------------------------------------ #
......@@ -182,6 +182,11 @@ class ServerRedisAdapter(threading.Thread, RedisMessenger):
playlist = playlist[0:len(playlist)-8]
self.execute(RedisChannel.SNF_REPLY.value, self.scheduler.set_next_file_for, playlist)
elif item["data"].find("get_next_file") >= 0:
playlist = item["data"].split()[1]
#playlist = playlist[0:len(playlist)-8]
self.execute(RedisChannel.GNF_REPLY.value, self.scheduler.get_next_file_for, playlist)
elif item["data"] == "recreate_db":
self.execute(RedisChannel.RDB_REPLY.value, self.scheduler.recreate_database)
......@@ -204,7 +209,7 @@ class ServerRedisAdapter(threading.Thread, RedisMessenger):
# sometimes the sender is faster than the receiver. redis messages would be lost
time.sleep(0.1)
self.logger.info(TerminalColors.ORANGE.value + "replying REDIS message " + reply + " on channel " + channel + TerminalColors.ENDC.value)
self.logger.info(TerminalColors.ORANGE.value + "replying REDIS message " + TerminalColors.ENDC.value + reply + TerminalColors.ORANGE.value + " on channel " + channel + TerminalColors.ENDC.value)
# publish
self.redisclient.publish(channel, reply)
......
......@@ -116,8 +116,8 @@ def fallback_create(~skip=true, name, requestor)
# Tell the system when a new track
# is played
source = on_metadata(fun (meta) ->
log("ON_METADATA_DISABLED"),
# system('#{list.assoc("install_dir", ini)}/guru.py --fallback-metadata-change name'),
# log("ON_METADATA_DISABLED"),
system('#{list.assoc("install_dir", ini)}/guru.py --adapt-trackservice name'),
source)
log("channel created")
......
......@@ -53,6 +53,8 @@ class AuraCalendarService(threading.Thread):
logger = None
url = dict()
data = dict()
# another crutch because of the missing TANK
used_random_playlist_ids = list()
"""
Fetching playlist data, write it into the database and notify service
......@@ -130,6 +132,10 @@ class AuraCalendarService(threading.Thread):
self.logger.debug("Fetching playlists from TANK")
self.__fetch_schedule_entry_data__()
# drop everything what is more than 30 minutes in the future to avoid strange sync errors
# the programme is still in the memory of engine and reloaded, when this fetching is finished.
self.drop_the_future(timedelta(minutes=30))
for schedule in self.fetched_schedule_data:
if "start" not in schedule:
self.logger.warning("No start of schedule given. skipping the schedule: "+str(schedule))
......@@ -155,9 +161,14 @@ class AuraCalendarService(threading.Thread):
# terminate the thread
return
# ------------------------------------------------------------------------------------------ #
def drop_the_future(self, time_in_the_future):
ScheduleEntry.drop_the_future(time_in_the_future)
Schedule.drop_the_future(time_in_the_future)
# ------------------------------------------------------------------------------------------ #
def store_schedule(self, schedule):
schedule_db = Schedule.query.filter(Schedule.schedule_id == schedule["schedule_id"]).first()
schedule_db = Schedule.select_show_on_datetime(schedule["start"])
havetoadd = False
if not schedule_db:
......@@ -196,7 +207,7 @@ class AuraCalendarService(threading.Thread):
def store_schedule_playlist(self, schedule_db, schedule, playlistname, fallbackplaylist_type=0):
playlist = schedule[playlistname]
info = "Schedule playlist (" + playlistname + ") for " + schedule_db.show_name + " stored"
debug = "Schedule playlist (" + playlistname + ") for " + schedule_db.show_name + " stored"
warning = "No scheduleentries for playlist #" + str(playlist['playlist_id']) + " in schedule #" + str(schedule_db.schedule_id) + " found"
entrynum = 0
......@@ -210,13 +221,13 @@ class AuraCalendarService(threading.Thread):
if lastentry is None:
self.logger.warning(warning)
else:
self.logger.info(info)
self.logger.debug(debug)
else:
self.logger.warning(warning)
# ------------------------------------------------------------------------------------------ #
def store_playlist_entry(self, schedule_db, playlist, entry, lastentry, entrynum, fallbackplaylist_type=0):
schedule_entry_db = ScheduleEntry.select_one(playlist["playlist_id"], entrynum)
schedule_entry_db = ScheduleEntry.select_one_playlist_entry_for_show(schedule_db.schedule_id, fallbackplaylist_type, entrynum)
havetoadd = False
if not schedule_entry_db:
......@@ -232,7 +243,8 @@ class AuraCalendarService(threading.Thread):
schedule_entry_db.entry_start = schedule_db.schedule_start + timedelta(seconds=self.get_length(lastentry))
schedule_entry_db.calc_unix_times()
# schedule_entry_db.define_clean_source()
if havetoadd:
schedule_entry_db.define_clean_source()
self.logger.debug("Storing entries... playlist_id: " + str(playlist["playlist_id"]) + " schedule_id: " + str(schedule_db.schedule_id) + " num: " + str(entrynum))
......@@ -281,7 +293,7 @@ class AuraCalendarService(threading.Thread):
try:
for schedule in self.fetched_schedule_data:
# retrieve playlist and the fallbacks for every schedule
# if a playlist is already fetched, it is not fetched again
# if a playlist (like station_fallback) is already fetched, it is not fetched again but reused
schedule["playlist"] = self.__fetch_schedule_entries__(schedule, "playlist_id", fetched_entries)
schedule["schedule_fallback"] = self.__fetch_schedule_entries__(schedule, "schedule_fallback_id", fetched_entries)
schedule["show_fallback"] = self.__fetch_schedule_entries__(schedule, "show_fallback_id", fetched_entries)
......@@ -297,52 +309,27 @@ class AuraCalendarService(threading.Thread):
servicetype = "importer"
use_testdata = False
# fetch data from importer
json_response = self.__fetch_data__(servicetype)
if not json_response:
use_testdata = True
# if a playlist is already fetched, do not fetch it again
for entry in fetched_schedule_entries:
if entry["playlist_id"] == schedule[id_name]:
self.logger.debug("playlist #" + str(schedule[id_name]) + " already fetched")
return entry
# generate testdata
if use_testdata:
# HARDCODED Testdata
if schedule[id_name] == 0 or schedule[id_name] is None:
# this happens when playlist id is not filled out in pv
# json_response = '{"playlist_id": 0}'
import random
rand_id = random.randint(1,100)
if rand_id % 4 == 0: # playlist with two files
json_response = '{"playlist_id":' + str(rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"file:///var/audio/fallback/NightmaresOnWax/DJ-Kicks/02 - Only Child - Breakneck.flac"}]}'
elif rand_id % 3 == 0: # playlist with jingle and then linein
json_response = '{"playlist_id":' + str(rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"linein://1"}]}'
elif rand_id % 2 == 0: # playlist with jingle and then http stream
json_response = '{"playlist_id":' + str(rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"http://chill.out.airtime.pro:8000/chill_a"}]}'
else: # pool playlist
json_response = '{"playlist_id":' + str(rand_id) + ',"entries":[{"source":"pool:///hiphop"}]}'
elif schedule[id_name] % 4 == 0: # playlist with two files
json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"file:///var/audio/fallback/NightmaresOnWax/DJ-Kicks/01 - Type - Slow Process.flac"}]}'
elif schedule[id_name] % 3 == 0: # playlist with jingle and then http stream
json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"linein://0"}]}'
elif schedule[id_name] % 2 == 0: # playlist with jingle and then linein
json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"http://stream.fro.at:80/fro-128.ogg"}]}'
else: # pool playlist
json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"pool:///chillout"}]}'
if schedule[id_name] == 0 or schedule[id_name] is None:
self.logger.info("Using 'randomized' playlist: " + json_response + " for " + id_name[:-3] + " for show " + schedule["show_name"] + " starting @ " + schedule["start"])
else:
self.logger.info("Using hardcoded playlist: " + json_response + " for " + id_name[:-3] + " for show " + schedule["show_name"] + " starting @ " + schedule["start"])
json_response = self.create_test_data(id_name, schedule)
# convert to list
try:
schedule_entries = simplejson.loads(json_response)
except Exception as e:
self.logger.critical("Cannot fetch schedule entries from importer")
sys.exit()
self.logger.critical("Cannot convert playlist from importer into list")
schedule_entries = list()
if "entries" in schedule_entries:
for entry in schedule_entries["entries"]:
......@@ -355,6 +342,61 @@ class AuraCalendarService(threading.Thread):
return schedule_entries
def create_test_data(self, id_name, schedule):
import random
rand_id = random.randint(1, 10000)
while rand_id in self.used_random_playlist_ids:
rand_id = random.randint(1, 10000)
self.used_random_playlist_ids.append(rand_id)
# HARDCODED Testdata
if id_name != "playlist_id":
# FALLBACK TESTDATA
if rand_id % 3 == 0: # playlist fallback
json_response = '{"playlist_id":' + str(
rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"file:///var/audio/fallback/NightmaresOnWax/DJ-Kicks/02 - Only Child - Breakneck.flac"}]}'
elif rand_id % 2 == 0: # stream fallback
json_response = '{"playlist_id":' + str(
rand_id) + ',"entries":[{"source":"http://chill.out.airtime.pro:8000/chill_a"}]}'
else: # pool fallback
json_response = '{"playlist_id":' + str(rand_id) + ',"entries":[{"source":"pool:///liedermacherei"}]}'
schedule[id_name] = rand_id
elif schedule[id_name] == 0 or schedule[id_name] is None:
# this happens when playlist id is not filled out in pv
# json_response = '{"playlist_id": 0}'
if rand_id % 4 == 0: # playlist with two files
json_response = '{"playlist_id":' + str(
rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"file:///var/audio/fallback/NightmaresOnWax/DJ-Kicks/02 - Only Child - Breakneck.flac"}]}'
elif rand_id % 3 == 0: # playlist with jingle and then linein
json_response = '{"playlist_id":' + str(
rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"linein://1"}]}'
elif rand_id % 2 == 0: # playlist with jingle and then http stream
json_response = '{"playlist_id":' + str(
rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"http://chill.out.airtime.pro:8000/chill_a"}]}'
else: # pool playlist
json_response = '{"playlist_id":' + str(rand_id) + ',"entries":[{"source":"pool:///hiphop"}]}'
schedule[id_name] = rand_id
elif schedule[id_name] % 4 == 0: # playlist with two files
json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"file:///var/audio/fallback/NightmaresOnWax/DJ-Kicks/01 - Type - Slow Process.flac"}]}'
elif schedule[id_name] % 3 == 0: # playlist with jingle and then http stream
json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"linein://0"}]}'
elif schedule[id_name] % 2 == 0: # playlist with jingle and then linein
json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"http://stream.fro.at:80/fro-128.ogg"}]}'
else: # pool playlist
json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"pool:///chillout"}]}'
self.logger.info("Using 'randomized' playlist: " + json_response + " for " + id_name[:-3] + " for show " + schedule["show_name"] + " starting @ " + schedule["start"])
return json_response
# ------------------------------------------------------------------------------------------ #
def __fetch_schedule_data__(self):
servicetype = "calendar"
......@@ -443,7 +485,7 @@ class AuraCalendarService(threading.Thread):
# ------------------------------------------------------------------------------------------ #
def get_length(self, entry):
if entry is None or entry.source == ScheduleEntryType.STREAM or entry.type == ScheduleEntryType.LIVE_0 or entry.type