Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • aura/engine
  • hermannschwaerzler/engine
  • sumpfralle/aura-engine
3 results
Show changes
Commits on Source (63)
Showing
with 1480 additions and 0 deletions
.idea/
*.pyc
*.log
image: python:3.6
stages:
- build
- test
install_dependencies:
stage: build
script:
- pwd
- ls
- cat requirements.txt
test_guru:
stage: test
script: python3 guru -h
# AURA Engine
This piece of Software is part of 'AURA - AUtomated RAdio'.
AURA Engine does:
* requesting the programme from an external Source
* switches the soundserver at the correct time to a given source for a specific show
* records what is broadcasted
## Installation
### Software
#### Operating System
Any sound supporting linux system should work. It is tested and coded on a **debian stretch**
#### Packages
On a debian machine:
```bash
sudo apt install \
git \
python3 python3-pip \
redis-server redis-tools \
liquidsoap liquidsoap-plugin-alsa liquidsoap-plugin-flac liquidsoap-plugin-icecast liquidsoap-plugin-pulseaudio \
libev4 libev-dev \
mariadb-server libmariadbclient-dev
```
#### Python Packages
```bash
sudo pip3 install \
Flask Flask-Babel Flask-SQLAlchemy Flask-WTF \
mysqlclient redis simplejson mutagen
pyev python-dateutil
```
#### get the code
git clone https://gitlab.servus.at/autoradio/engine
#### Set Up a database
```bash
mysql -u root -p
CREATE DATABASE aura_engine CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;
CREATE USER 'aura'@'localhost' IDENTIFIED BY 'secure-password';
GRANT ALL PRIVILEGES ON aura_engine.* TO 'aura'@'localhost';
```
#### Files and Folders
Create the audio folder defined in your aura.ini
```bash
mkdir /var/audio
mkdir /etc/aura
cp {where you cloned the repo}/configuration/aura.ini /etc/aura/aura.ini
```
edit installation dir and database settings in aura.ini
#### aura.py
It is the server which is connected to the external programme source, to liquidsoap and is listening for redis pubsub messages.
#### Guru
The commandline tool for interacting with the server.
#### Liquidsoap
The heart of AURA Engine. It uses the built in mixer, to switch between different sources. A source can be a stream, the filesystem or linein
### Hardware
#### Soundcard
AURA Engine ist tested with an ASUS Xonar DGX. It should work with every by ALSA supported soundcard. PulseAudio support is planned.
#### Hard/Soft
When you use ALSA, you will have to play around with ALSA settings. In the folder ./modules/liquidsoap is a scipt called alsa_settings_tester.liq. You can start it with 'liquidsoap -v --debug alsa_settings_tester.liq'. Changing and playing with settings can help you to find correct ALSA settings.
#!/usr/bin/python3
import signal
import os
from modules.scheduling.scheduler import AuraScheduler
from modules.communication.liquidsoap.communicator import LiquidSoapCommunicator
from modules.communication.redis.adapter import ServerRedisAdapter
from modules.web.routes import Routes
from libraries.base.config import AuraConfig
from libraries.base.logger import AuraLogger
class Aura(AuraConfig, AuraLogger):
server = None
messenger = None
controller = None
# ------------------------------------------------------------------------------------------ #
def __init__(self):
super(Aura, self).__init__()
server = object
# self.controller = AuraController(self.config)
# create scheduler and ls_communicator
self.liquidsoapcommunicator = LiquidSoapCommunicator(self.config)
self.scheduler = AuraScheduler(self.config)
# give both a reference of each other
self.liquidsoapcommunicator.scheduler = self.scheduler
self.scheduler.liquidsoapcommunicator = self.liquidsoapcommunicator
# create the redis adapter
self.messenger = ServerRedisAdapter()
self.messenger.config = self.config
self.messenger.scheduler = self.scheduler
self.messenger.liquidsoapcommunicator = self.liquidsoapcommunicator
def receive_signal(signum, stack):
print("received signal")
server.reload()
signal.signal(signal.SIGUSR1, receive_signal)
def join_comm(self):
# start listener thread
self.messenger.start()
def start_web_service(self):
try:
Routes()
except OSError as e:
self.messenger.halt()
self.logger.critical("AuraEngine already running? Exception: " + e.strerror + ". Exiting...")
os._exit(0)
# # ## ## ## ## ## # #
# # ENTRY FUNCTION # #
# # ## ## ## ## ## # #
def main():
aura = Aura()
aura.join_comm()
aura.start_web_service()
# # ## ## ## ## ## ## # #
# # End ENTRY FUNCTION # #
# # ## ## ## ## ## ## # #
if __name__ == "__main__":
main()
File added
[station]
station_name="Radio FRO"
station_logo="/etc/aura/stationlogo.jpg"
station_fallback_pool="/var/audio/station_fallback_pool"
[user]
#Change this settings
daemongroup="gg"
daemonuser="gg"
[configfile]
scheduler_config_file="/etc/aura/scheduler.xml"
[soundcard]
line_in_count=1
line_out_count=1
input_device[0]="hw:0" # make it comma separated!!
output_device[0]="hw:0"
use_alsa="y"
# alsa_buffer => int
alsa_buffer="16000"
# alsa_buffer_length => int
alsa_buffer_length="7"
# alsa_periods => int
alsa_periods="0"
# frame_duration => double
frame_duration="0.4"
# frame_size => int
frame_size=""
[database]
db_user="engine"
db_name="engine"
db_pass="engine"
db_host="localhost"
[socket]
socketdir="/home/gg/PycharmProjects/engine/modules/liquidsoap"
[logging]
logdir="/var/log/aura"
# possible values: debug, info, warning, error, critical
loglevel="info"
[liquidsoap]
# leave this alone if you do not know what you are doing
http_channels="http,http2"
line_in_channels="live,live2"
filesystem_channels="filesystem"
# track_sensitive => fallback_folder track sensitivity
# max_blank => maximum time of blank from source
# min_noise => minimum duration of noise on source to switch back over
# threshold => power in dB under which the stream is considered silent
fallback_max_blank="5"
fallback_min_noise="30"
fallback_threshold="-40"
[mail]
mail_server="mail.servus.at"
mail_user=""
mail_pass=""
# multiple adminmails => space separated
admin_mail="gogo@servus.at gottfried@servus.at"
from_mail=""
[dataurls]
# calendarurl="http://localhost/index.php?option=com_jimtawl&view=calendar&format=json&from=#datefrom#&to=#dateto#"
# calendarurl="http://bermudafunk-kalender.critmass.de/index.php?option=com_jimtawl&view=calendar&format=json&from=#datefrom#&to=#dateto#"
calendarurl="http://localhost:8000/api/v1/playout"
importerurl="http://localhost:8008/api/v1/groups/_public/playlists/"
[folder]
audiobase="/var/audio/rec"
altaudiobase="/var/audio/preprod"
playlistdir="/var/audio/playlists/"
install_dir="/home/gg/PycharmProjects/engine"
[stream]
stream="y"
stream_type="harbor"
#stream_type="icecast"
stream_bitrate="128"
stream_port="8000"
stream_mountpoint="aura"
stream_user="source"
stream_password="eegah5Hi"
stream_host="localhost"
stream_url="http://www.fro.at"
stream_name="Comba Test Stream"
stream_genre="mixed"
stream_description="Test Stream"
stream_admin_user="admin"
stream_admin_password="ahZ4caeg"
<Config>
<Jobs multiple="true">
<job>
<time>00:00</time>
<until>23:00</until>
<job>play_playlist</job>
<params>no_stop</params>
</job>
<job>
<job>start_recording</job>
<until>00:00</until>
<day>all</day>
<time>00:00</time>
<params>no_stop</params>
</job>
<job>
<daysolder>4</daysolder>
<job>clean_cached</job>
<day>1</day>
<time>00:03</time>
<params></params>
</job>
<job>
<time>01:00</time>
<day>all</day>
<job>precache</job>
<params></params>
</job>
</Jobs>
</Config>
{
}
\ No newline at end of file
{
"allData": {
"id": "01",
"00": "Global Metadata delivered",
"01": "Could not get Data from Sound Engine"
},
"channel_insert": {
"id": "02",
"00": "On Channel ::channel:: insert ::uri:: at position ::pos::",
"02": "On Channel ::channel:: could not insert ::uri:: at position ::pos::"
},
"channel_move": {
"id": "03",
"00": "On Channel ::channel:: moved Item from ::fromPos:: to position ::toPos::",
"01": "Warning: Position ::fromPos:: out of range",
"02": "Warning: Cannot move to same position",
"03": "On Channel ::channel:: could not move from position ::fromPos:: to position ::toPos::"
},
"channel_off": {
"id": "04",
"00": "Channel ::channel:: off",
"01": "Could not activate Channel ::channel::"
},
"channel_on": {
"id": "05",
"00": "Channel ::channel:: on",
"01": "Could not deactivate Channel ::channel::"
},
"channel_queue": {
"id": "06",
"00": "Channel Queue for ::channel:: delivered",
"01": "Could not get channel queue from channel ::channel::",
"02": "Could not get channel queue from channel ::channel::",
"03": "Could not get channel queue from channel ::channel::"
},
"channel_remove": {
"id": "07",
"00": "Removed item on position ::pos:: from channel ::channel::",
"01": "Could not remove item on position ::pos:: from channel ::channel::",
"02": "Warning: position ::pos:: out of range'"
},
"channel_seek": {
"id": "08",
"00": "Seeked channel ::channel:: ::duration:: seconds",
"01": "Could not seek channel ::channel:: ::duration:: seconds"
},
"channel_skip": {
"id": "09",
"00": "Skipped channel ::channel::",
"01": "0 Channels listed",
"02": "Could not get channels from sound engine",
"03": "Could not skip ::channel::"
},
"channel_volume": {
"id": "10",
"00": "Volume ::volume::% set on channel ::channel::",
"01": "Could not set volume to ::volume::% on channel ::channel::",
"02": "0 Channels listed",
"03": "Could not get channels from sound engine"
},
"currentData": {
"id": "11",
"00": "Current track metadata delivered",
"01": "Nothing seems to be on air",
"02": "Could not detect metadata"
},
"help": {
"id": "12",
"00": "none",
"01": "Could not open help file"
},
"listChannels": {
"id": "13",
"00": "Listed Channels",
"01": "0 Channels listed",
"02": "Could not get channels from sound engine"
},
"message": {
"id": "14",
"00": "none"
},
"playlist_data": {
"id": "15",
"00": "Playlist data delivered"
},
"playlist_flush": {
"id": "16",
"00": "Flushed playlist",
"01": "Could not flush playlist"
},
"playlist_insert": {
"id": "17",
"00": "Insert track ::uri:: on position ::pos::"
},
"playlist_load": {
"id": "18",
"00": "Load Playlist ::uri::",
"01": "Could not load Playlist ::uri::",
"02": "Playlist is not well formed XML"
},
"playlist_move": {
"id": "19",
"00": "Moved playlist track from position ::fromPos:: to ::toPos::"
},
"playlist_pause": {
"id": "20",
"00": "Playlist paused",
"01": "Playlist already paused"
},
"playlist_stop": {
"id": "21",
"00": "Playlist stopped",
"01": "Playlist already stopped"
},
"playlist_play": {
"id": "22",
"00": "Playlist started",
"01": "Playlist already playing",
"02": "0 Channels listed",
"03": "Could not get channels from sound engine"
},
"playlist_push": {
"id": "23",
"00": "Playlist: pushed ::uri::",
"01": "Could not push ::uri::"
},
"playlist_remove": {
"id": "24",
"00": "Removed track on position ::pos:: from playlist",
"01": "Could not remove track on position ::pos:: from playlist"
},
"playlist_seek": {
"id": "25",
"00": "Seeked playlist ::duration:: seconds",
"01": "Could not seek playlist ::duration:: seconds"
},
"playlist_skip": {
"id": "26",
"00": "Could not skip playlist"
},
"recorder_data": {
"id": "27",
"00": "Delivered recorder data",
"01": "Could not deliver recorder data"
},
"recorder_start": {
"id": "28",
"00": "Recorder started",
"01": "Could not start recorder"
},
"recorder_stop": {
"id": "29",
"00": "Recorder stopped",
"01": "Could not stop recorder"
},
"scheduler_reload": {
"id": "30",
"00": "Reload signal was sent to scheduler",
"01": "Could not find the scheduler process"
},
"sendLqcCommand": {
"id": "31",
"01": "Soundengine not running",
"02": "Recorder not running"
},
"get_channel_state": {
"id": "32",
"00": "Channels ::channel:: state",
"01": "Could not get channel state from channel ::channel::"
},
"setPassword": {
"id": "33",
"00": "Successfull set password",
"01": "Not enough access rights for this operation"
},
"addUser": {
"id": "34",
"00": "Successfull add user ::username::",
"01": "Not enough access rights for this operation"
},
"delUser": {
"id": "35",
"00": "Successfull removed user ::username::",
"01": "Not enough access rights for this operation"
},
"scheduler_data": {
"id": "36",
"00": "Successfull delivered scheduler config",
"01": "Scheduler config seems to be broken"
},
"scheduler_store": {
"id": "37",
"00": "Successfull stored scheduler config",
"01": "Not enough access rights for this operation",
"02": "Could not store a valid scheduler XML"
},
"getUserlist": {
"id": "38",
"00": "Userlist was successfully delivered",
"01": "Not enough access rights for this operation"
},
"get_act_programme": {
"id": "39",
"00": "Successfully fetched the program",
"01": "Cannot fetch actual program"
}
}
\ No newline at end of file
{
"exec_job": {
"id": "01",
"00": "Execute job ::job::",
"01": "Fatal: Could not execute job ::job::. Command ::exec:: results in Exception ::Exception::. Stopped watcher"
},
"schedule_job": {
"id": "02",
"00": "Scheduled job ::job:: for ::scheduled_for:: at ::scheduled_at::",
"01": "Could not execute job"
},
"load_playlist": {
"id": "03",
"00": "Load playlist ::uri::",
"01": "Could not load playlist ::uri::. File does not exist!",
"02": "Controller failed to load playlist ::uri::. Message was '::message::'"
},
"play_playlist": {
"id": "04",
"00": "Started playlist",
"01": "Controller failed to start playlist. Message was '::message::'"
},
"stop_playlist": {
"id": "05",
"00": "Started playlist",
"01": "Controller failed to start playlist. Message was '::message::'"
},
"start_recording": {
"id": "06",
"00": "Started recording",
"01": "Controller failed to start recording. Message was '::message::'"
},
"stop_recording": {
"id": "07",
"00": "Stopped recording",
"01": "Controller failed to stop recording. Message was '::message::'"
},
"precache": {
"id": "08",
"00": "Precached playlists",
"01": "Could not precache playlist."
},
"clean_cached": {
"id": "09",
"00": "Cleaned cache",
"01": "Could not clean cache"
},
"on_start": {
"id": "10",
"00": "Do initial jobs",
"01": "Could not do initial jobs"
},
"lookup_prearranged": {
"id": "11",
"00": "Lookup for prearranged tracks",
"01": "No system channel available"
},
"start_prearranged": {
"id": "12",
"00": "Started preaarranged tracks"
},
"end_prearranged": {
"id": "13",
"00": "Stopped preaarranged tracks"
}
}
\ No newline at end of file
#!/usr/bin/python3
import time
import sys
import redis
from argparse import ArgumentParser
# own libs
from modules.cli_tool.padavan import Padavan
from libraries.exceptions.auraexceptions import PlaylistException
from libraries.base.config import AuraConfig
class Guru(AuraConfig):
parser = None
args = None
# ------------------------------------------------------------------------------------------ #
def __init__(self):
super(Guru, self).__init__()
self.init_argument_parser()
self.handle_arguments()
def handle_arguments(self):
if self.args.stoptime:
start = time.time()
if not self.args.quiet:
print("Guru thinking...")
try:
p = Padavan(self.args, self.config)
p.meditate()
except PlaylistException as pe:
# typically there is no next file found
if not self.args.quiet:
print(pe)
else:
print("")
exit(4)
except redis.exceptions.TimeoutError as te:
print("Timeout when waiting for redis message. Is AURA daemon running? Exiting...")
exit(3)
if not self.args.quiet:
print("...result: ")
if p.stringreply != "":
if p.stringreply[len(p.stringreply)-1] == "\n":
print(p.stringreply[0:len(p.stringreply) - 1])
else:
print(p.stringreply[0:len(p.stringreply)])
if self.args.stoptime:
end = time.time()
exectime = end-start
print("execution time: "+str(exectime)+"s")
def init_argument_parser(self):
try:
self.create_parser()
self.args = self.parser.parse_args()
except (ValueError, TypeError) as e:
if self.parser is not None:
self.parser.print_help()
print()
print(e)
exit(1)
def create_parser(self):
self.parser = ArgumentParser()
# options
self.parser.add_argument("-sep", "--stop-execution-time", action="store_true", dest="stoptime", default=False, help="Prints the execution time at the end of the skript")
self.parser.add_argument("-q", "--quiet", action="store_true", dest="quiet", default=False, help="Just the result will outputed to stout")
self.parser.add_argument("-rd", "--recreate-database", action="store_true", dest="recreatedb", default=False, help="Do you want to recreate the database?")
# getter
self.parser.add_argument("-pcs", "--print-connection-status", action="store_true", dest="get_connection_status", default=False, help="Prints the status of the connection to liquidsoap, pv and tank")
self.parser.add_argument("-gam", "--get-active-mixer", action="store_true", dest="get_active_mixer", default=False, help="Which mixer is activated?")
self.parser.add_argument("-pms", "--print-mixer-status", action="store_true", dest="get_mixer_status", default=False, help="Prints all mixer sources and their states")
self.parser.add_argument("-pap", "--print-act-programme", action="store_true", dest="get_act_programme", default=False, help="Prints the actual Programme, the controller holds")
# liquid manipulation
self.parser.add_argument("-am", "--select-mixer", action="store", dest="select_mixer", default=-1, metavar="MIXERNAME", help="Which mixer should be activated?")
self.parser.add_argument("-dm", "--de-select-mixer", action="store", dest="deselect_mixer", default=-1, metavar="MIXERNAME", help="Which mixer should be activated?")
self.parser.add_argument("-vm", "--volume", action="store", dest="set_volume", default=0, metavar=("MIXERNUM", "VOLUME"), nargs=2, help="Set volume of a mixer source", type=int)
# parser.add_argument("-as", "--add-source", action="store", dest="add_source", default=False,
# help="Add new source to LiquidSoap mixer [Experimental]")
# playlist in/output
self.parser.add_argument("-fnp", "--fetch-new-programmes", action="store_true", dest="fetch_new_programme", default=False, help="Fetch new programmes from calendarurl in comba.ini")
self.parser.add_argument("-pmq", "--print-message-queue", action="store_true", dest="print_message_queue", default=False, help="Prints message queue")
# playlist manipulation
self.parser.add_argument("-spe", "--swap-playlist-entries", action="store", dest="swap_playlist_entries", default=0, metavar=("FROM", "TO"), nargs=2, help="Swaps the sources of two Playlistentries")
self.parser.add_argument("-dpe", "--delete-playlist-entry", action="store", dest="delete_playlist_entry", default=0, metavar="INDEX", nargs=1, help="Delete Playlistentry at INDEX")
self.parser.add_argument("-ipe", "--insert-playlist-entry", action="store", dest="insert_playlist_entry", default=0, metavar=("FROMTIME", "SOURCE"), nargs=2, help="Add a new Playlistentry at a given index. Set fromtime with this format: 2017-12-31T13:30:00") # , type=valid_playlist_entry)
# send a redis message
self.parser.add_argument("-rm", "--redis-message", action="store", dest="redis_message", default=False, metavar=("CHANNEL", "MESSAGE"), nargs=2, help="Send a redis message to the Listeners")
# calls from liquidsoap
self.parser.add_argument("-gnf", "--get-next-file-for", action="store", dest="get_file_for", default=False, metavar="PLAYLISTTYPE", help="For which type you wanna GET a next audio file?")
self.parser.add_argument("-snf", "--set-next-file-for", action="store", dest="set_file_for", default=False, metavar=("PLAYLISTTYPE", "FILE"), nargs=2, help="For which type you wanna SET a next audio file?")
self.parser.add_argument("-np", "--now-playing", action="store_true", dest="now_playing", default=False, help="Which source is now playing")
self.parser.add_argument("-ip", "--init-player", action="store_true", dest="init_player", default=False, help="Reset liquidsoap volume and mixer activations?")
if len(sys.argv) == 1:
raise ValueError("No Argument passed!")
def valid_playlist_entry(argument):
from datetime import datetime
try:
index = int(argument[0])
fromtime = datetime.strptime(argument[1], "%Y-%m-%d")
source = argument[2]
return index, fromtime, source
except:
msg = "Not a valid date: '{0}'.".format(argument[0])
raise
# # ## ## ## ## ## # #
# # ENTRY FUNCTION # #
# # ## ## ## ## ## # #
def main():
Guru()
# # ## ## ## ## ## ## # #
# # End ENTRY FUNCTION # #
# # ## ## ## ## ## ## # #
if __name__ == "__main__":
main()
"""
Common aura functions
"""
from modules.base.config import ConfigReader
class AuraConfig:
"""
AuraCommon handles logger, reads and stores config
"""
config = None
def __init__(self):
self.read_config()
def read_config(self):
"""
reads aura.ini
:return:
"""
self.config = ConfigReader()
self.config.load_config()
"""
Aura logger functions
"""
import logging
class AuraLogger:
logger = None
def __init__(self):
self.__create_logger("AuraEngine")
def __create_logger(self, name):
"""
Creates the logger instance for AuraEngine
:param name: LoggerName
:return:
"""
lvl = self.config.get("loglevel")
# create logger
self.logger = logging.getLogger(name)
self.logger.setLevel(lvl)
# create file handler for logger
file_handler = logging.FileHandler(self.config.get("logdir") + "/engine.log")
file_handler.setLevel(lvl)
# create stream handler for logger
stream_handler = logging.StreamHandler()
stream_handler.setLevel(lvl)
# set format of log
datepart = "%(asctime)s:%(name)s:%(levelname)s"
message = " - %(message)s - "
filepart = "[%(filename)s:%(lineno)s-%(funcName)s()]"
formatter = logging.Formatter(datepart + message + filepart)
# set log of handlers
file_handler.setFormatter(formatter)
stream_handler.setFormatter(formatter)
# add handlers to the logger
self.logger.addHandler(file_handler)
self.logger.addHandler(stream_handler)
\ No newline at end of file
__author__ = 'gg'
# -*- coding: utf-8 -*-
import sys
import time
import logging
import datetime
from sqlalchemy import orm, func, Boolean, Column, DateTime, Integer, String, ForeignKey, ForeignKeyConstraint
from sqlalchemy.orm import relationship
from sqlalchemy.sql.expression import false
from libraries.database.database import DB
from libraries.enum.scheduleentrytype import ScheduleEntryType
class AuraDatabaseModel:
logger = None
def __init__(self):
self.logger = logging.getLogger("AuraEngine")
def store(self, add=False, commit=False):
if add:
DB.session.add(self)
if commit:
DB.session.commit()
def delete(self, commit=False):
current_db_sessions = DB.session.object_session(self)
current_db_sessions.delete(self)
return
DB.session.delete(self)
if commit:
DB.session.commit()
def _asdict(self):
return self.__dict__
def recreate_db(self, systemexit = False):
manualschedule = Schedule()
manualschedule.schedule_id = 0
manualschedule.show_name = "Manual Show"
self.logger.debug("Recreating Database...")
DB.drop_all()
self.logger.debug("all dropped. creating...")
DB.create_all()
self.logger.debug("inserting manual scheduling possibility and fallback trackservice schedule")
DB.session.add(manualschedule)
# db.session.add(fallback_trackservice_schedule)
self.logger.debug("all created. commiting...")
DB.session.commit()
self.logger.debug("Database recreated!")
if systemexit:
sys.exit(0)
# ------------------------------------------------------------------------------------------ #
class Schedule(DB.Model, AuraDatabaseModel):
"""
One specific Schedule for a show on a timeslot
"""
__tablename__ = 'schedule'
# primary and foreign keys
schedule_id = Column(Integer, primary_key=True, autoincrement=False)
show_id = Column(Integer) # well, not needed..
schedule_start = Column(DateTime) # can be null due to manual entries
schedule_end = Column(DateTime) # can be null due to manual entries
show_name = Column(String(256))
show_hosts = Column(String(256))
rtr_category = Column(String(256))
comment = Column(String(512))
languages = Column(String(256))
type = Column(String(256))
category = Column(String(256))
topic = Column(String(256))
musicfocus = Column(String(256))
is_repetition = Column(Boolean())
playlist_id = Column(Integer)
timeslot_fallback_id = Column(Integer)
show_fallback_id = Column(Integer)
station_fallback_id = Column(Integer)
def get_length(self):
sec1 = int(datetime.datetime.strptime(self.start[0:16].replace(" ", "T"), "%Y-%m-%dT%H:%M").strftime("%s"))
sec2 = int(datetime.datetime.strptime(self.end[0:16].replace(" ", "T"), "%Y-%m-%dT%H:%M").strftime("%s"))
len = sec2 - sec1
return len
# ------------------------------------------------------------------------------------------ #
class ScheduleEntry(DB.Model, AuraDatabaseModel):
"""
One schedule can have multiple entries
"""
__tablename__ = 'schedule_entry'
# primary and foreign keys
playlist_id = Column(Integer, primary_key=True, nullable=False, autoincrement=False)
entry_num = Column(Integer, primary_key=True, nullable=False, autoincrement=False)
schedule_id = Column(Integer, ForeignKey("schedule.schedule_id"))
entry_start = Column(DateTime)
source = Column(String(256))
volume = Column(Integer, default=100)
is_fallback = Column(Boolean, default=False)
cleansource = ""
entry_start_unix = 0
programme_index = -1
type = None
schedule = relationship("Schedule", foreign_keys=[schedule_id], lazy="joined")
# normal constructor
def __init__(self, **kwargs):
super(ScheduleEntry, self).__init__(**kwargs)
self.calc_unix_times()
# constructor like - called from sqlalchemy
@orm.reconstructor
def reconstructor(self):
self.calc_unix_times()
self.set_entry_type()
def define_clean_source(self):
if self.source.startswith("http") or self.source.startswith("live") or self.source.startswith("linein"):
self.cleansource = self.source
if self.source.startswith("pool") or self.source.startswith("file"):
self.cleansource = self.source[7:]
if self.source.startswith("playlist"):
self.cleansource = self.source[11:]
def calc_unix_times(self):
if self.entry_start is not None:
self.entry_start_unix = time.mktime(self.entry_start.timetuple())
def set_entry_type(self):
if self.source.startswith("http"):
self.type = ScheduleEntryType.STREAM
if self.source.startswith("pool") or self.source.startswith("playlist") or self.source.startswith("file"):
self.type = ScheduleEntryType.FILESYSTEM
if self.source.startswith("live") or self.source.startswith("linein"):
self.type = ScheduleEntryType.LIVE
# ------------------------------------------------------------------------------------------ #
@staticmethod
def select_all():
# fetching all entries
all_entries = DB.session.query(ScheduleEntry).filter(ScheduleEntry.is_fallback == false()).order_by(ScheduleEntry.entry_start).all()
cnt = 0
for entry in all_entries:
entry.programme_index = cnt
cnt = cnt + 1
return all_entries
@staticmethod
def select_next_manual_entry_num():
max_manual_entry_num = DB.session.query(func.max(ScheduleEntry.entry_num)).filter(ScheduleEntry.schedule_id == 0).first()
if max_manual_entry_num[0] is None:
return 0
else:
return int(max_manual_entry_num[0])+1
# ------------------------------------------------------------------------------------------ #
@staticmethod
def upcoming(datefrom=datetime.datetime.now()):
upcomingtracks = DB.session.query(ScheduleEntry).filter(ScheduleEntry.start > datefrom).all()
return upcomingtracks
# ------------------------------------------------------------------------------------------ #
@staticmethod
def select_one(playlist_id, entry_num):
return DB.session.query(ScheduleEntry).filter(ScheduleEntry.playlist_id == playlist_id, ScheduleEntry.entry_num == entry_num).first()
# ------------------------------------------------------------------------------------------ #
def __str__(self):
return "ScheduleID: #" + str(self.schedule_id) + " Showname: " + self.schedule.show_name + " starts @ " + str(self.entry_start) + " and plays " + self.source
# ------------------------------------------------------------------------------------------ #
class TrackService(DB.Model, AuraDatabaseModel):
__tablename__ = 'trackservice'
trackservice_id = Column(Integer, primary_key=True, autoincrement=True)
playlist_id = Column(Integer, nullable=False)
entry_num = Column(Integer, nullable=False)
source = Column(String(255), nullable=False)
start = Column(DateTime, nullable=False, default=func.now())
__table_args__ = (
ForeignKeyConstraint(['playlist_id', 'entry_num'], ['schedule_entry.playlist_id', 'schedule_entry.entry_num']),
)
#schedule = relationship("Schedule", foreign_keys=[schedule_id], lazy="joined")
# trackservice_entry = relationship("ScheduleEntry", foreign_keys=[playlist_id, entry_num], lazy="joined")
schedule_entry = relationship("ScheduleEntry", primaryjoin="and_(TrackService.playlist_id==ScheduleEntry.playlist_id, TrackService.entry_num==ScheduleEntry.entry_num)", lazy="joined")
@staticmethod
def select_one(trackservice_id):
return DB.session.query(TrackService).filter(TrackService.trackservice_id == trackservice_id).first()
# ------------------------------------------------------------------------------------------ #
# class TrackServiceSchedule(db.Model, AuraDatabaseModel):
# """
# Trackservice is tracking every schedule.
# """
# __tablename__ = 'trackservice_schedule'
#
# # primary and foreign keys
# ts_schedule_id = Column(Integer, primary_key=True, autoincrement=True)
# schedule_id = Column(Integer, ForeignKey("schedule.schedule_id"))
#
# schedule = relationship("Schedule", foreign_keys=[schedule_id], lazy="joined")
#
# # ------------------------------------------------------------------------------------------ #
# @staticmethod
# def select_one(schedule_id):
# # damn BAND-AID
# # db.session.commit()
#
# return db.session.query(ScheduleEntry).filter(TrackServiceSchedule.schedule_id == schedule_id).first()
#
# # ------------------------------------------------------------------------------------------ #
# class TrackServiceScheduleEntry(db.Model, AuraDatabaseModel):
# """
# And a schedule can have multiple entries
# """
# __tablename__ = 'trackservice_entry'
#
# # primary and foreign keys. the foreign keys here can be null, because of fallback stuff
# ts_entry_id = Column(Integer, primary_key=True, autoincrement=True)
# ts_schedule_id = Column(Integer, ForeignKey("trackservice_schedule.ts_schedule_id"), nullable=True)
# playlist_id = Column(Integer, nullable=True)
# entry_num = Column(Integer, nullable=True)
#
# fallback = Column(Boolean, default=False)
# fallback_start = Column(DateTime, nullable=True, default=None)
# source = Column(String(256), nullable=True, default=None)
#
# # foreign key definitions
# __table_args__ = (
# ForeignKeyConstraint(['playlist_id', 'entry_num'], ['schedule_entry.playlist_id', 'schedule_entry.entry_num']),
# )
#
# trackservice_schedule = relationship("TrackServiceSchedule", foreign_keys=[ts_schedule_id], lazy="joined")
# #trackservice_entry = relationship("ScheduleEntry", foreign_keys=[playlist_id, entry_num], lazy="joined")
# trackservice_entry = relationship("ScheduleEntry", primaryjoin="and_(TrackServiceScheduleEntry.playlist_id==ScheduleEntry.playlist_id, TrackServiceScheduleEntry.entry_num==ScheduleEntry.entry_num)" , lazy="joined")
#
# @staticmethod
# def select_all():
# return db.session.query(TrackServiceScheduleEntry).filter().all()
#AuraDatabaseModel.recreate_db(True)
"""
Database conn
"""
from sqlalchemy.ext.declarative import declarative_base
from flask_sqlalchemy import SQLAlchemy
# from flask_babel import Babel
from flask import Flask
from modules.base.config import ConfigReader # pylint: disable=import-error
def create_app(install_dir, uri):
"""
creates flask app context
:param install_dir: Installdir of Aura
:param uri: Database connection uri
:return: Flask object
"""
app = Flask(__name__, template_folder=install_dir + '/modules/web/templates')
app.config["SQLALCHEMY_DATABASE_URI"] = uri
app.config['BABEL_DEFAULT_LOCALE'] = 'de'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
return app
def create_database():
"""
creates sqlalchemy database connection
:return: SQLAlchemy object
"""
#### load config ####
config = ConfigReader()
config.load_config()
#### read config ####
install_dir = config.get(str("install_dir"))
db_name = config.get(str("db_name"))
db_user = config.get(str("db_user"))
db_pass = config.get(str("db_pass"))
db_host = config.get(str("db_host"))
#### create database conn ####
uri = "mysql://"+db_user+":"+db_pass+"@"+db_host+"/"+db_name+"?charset=utf8"
app = create_app(install_dir, uri)
database = SQLAlchemy(app)
# babel = Babel(app)
return app, database
Base = declarative_base()
APP, DB = create_database()
# -*- coding: utf-8 -*-
import redis
import time
import datetime
import simplejson
import re
import uuid
class RedisStateStore(object):
"""Store and get Reports from redis"""
def __init__(self, **redis_kwargs):
"""The default connection parameters are: host='localhost', port=6379, db=0"""
self.db= redis.Redis()
self.channel = '*'
self.section = '*'
self.separator = '_'
self.daily = False
# ------------------------------------------------------------------------------------------ #
def set_channel(self, channel):
"""
Kanal setzen
@type channel: string
@param channel: Kanal
"""
self.channel = channel
# ------------------------------------------------------------------------------------------ #
def set_section(self, section):
"""
Sektion setzen
@type section: string
@param section: Sektion
"""
self.section = section
# ------------------------------------------------------------------------------------------ #
def set_alive_state(self):
"""
Alive Funktion - alle 20 Sekunden melden, dass man noch am Leben ist
"""
self.set_state('alive', 'Hi', 21)
# ------------------------------------------------------------------------------------------ #
def get_alive_state(self, channel):
"""
Alive Status eines Channels ermitteln
@type channel: string
@param channel: der Channel
@rtype: string/None
@return: Ein String, oder None, bei negativem Ergebnis
"""
return self.get_state('alive', channel)
# ------------------------------------------------------------------------------------------ #
def set_state(self, name, value, expires=None, channel=None):
"""
Setzt einen Status
@type name: string
@param name: Name des state
@type value: string
@param value: Wert
@type channel: string
@param channel: Kanal (optional)
"""
if not channel:
channel = self.channel
key = self.__create_key__(channel + 'State', name)
if value == "":
self.db.delete(key)
else:
# publish on channel
message = simplejson.dumps({'eventname':name, 'value': value})
self.db.publish(channel + 'Publish', message)
# store in database
self.db.set(key, value)
if(expires):
self.db.expire(key, 21)
# ------------------------------------------------------------------------------------------ #
def get_state(self, name, channel):
"""
Holt einen Status
@type name: string
@param name: Name des state
@type channel: string
@param channel: Kanal (optional)
"""
key = self.__create_key__(channel + 'State', name)
return self.db.get(key)
# ------------------------------------------------------------------------------------------ #
def queue_add_event(self, eventtime, name, value, channel=None):
"""
Kündigt einen Event an
@type eventtime: string
@param eventtime: Datum und Zeit des events
@type name: string
@param name: Name des Events
@type value: dict
@param value: Werte
@type channel: string
@param channel: Kanal (optional)
"""
timeevent = datetime.datetime.strptime(eventtime[0:16],"%Y-%m-%dT%H:%M")
expire = int(time.mktime(timeevent.timetuple()) - time.time()) + 60
self.__set_event__(name, eventtime, value, 'Evqueue', 'evqueue', expire, channel)
# ------------------------------------------------------------------------------------------ #
def queue_remove_events(self, name=None, channel=None):
"""
Löscht Events
@type name: string
@param name: Name des Events
@type channel: string
@param channel: Kanal (optional)
"""
query = channel + 'Evqueue_' if channel else '*Evqueue_'
query = query + '*_' + name if name else query + '*_*'
keys = self.db.keys(query)
for delkey in keys:
self.db.delete(delkey)
# ------------------------------------------------------------------------------------------ #
def fire_event(self, name, value, channel=None):
"""
Feuert einen Event
@type name: string
@param name: Name des Events
@type value: dict
@param value: Werte
@type channel: string
@param channel: Kanal (optional)
"""
eventtime = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M")
self.__set_event__(name, eventtime, value, 'Event', 'events', 60, channel)
# ------------------------------------------------------------------------------------------ #
def __set_event__(self, name, eventtime, value, type, namespace, expire, channel=None):
"""
Feuert einen Event
@type eventtime: string
@param eventtime: Datum und Zeit des events
@type value: dict
@param value: Werte
@type channel: string
@param channel: Kanal (optional)
"""
if not channel:
channel = self.channel
timeevent = datetime.datetime.strptime(eventtime[0:16],"%Y-%m-%dT%H:%M")
key = self.__create_key__(channel + type, eventtime, name)
value['starts'] = eventtime[0:16]
value['eventchannel'] = channel
value['eventname'] = name
self.db.hset(key, namespace, value)
self.db.expire(key, expire)
# ------------------------------------------------------------------------------------------ #
def get_event_queue(self, name=None, channel=None):
"""
Holt events eines Kanals
@type channel: string
@param channel: Kanal (optional)
@rtype: list
@return: Liste der Events
"""
query = channel + 'Evqueue_' if channel else '*Evqueue_'
query = query + '*_' + name if name else query + '*_*'
keys = self.db.keys(query)
keys.sort()
entries = self.__get_entries__(keys, 'evqueue')
return entries
# ------------------------------------------------------------------------------------------ #
def get_events(self, name=None, channel=None):
"""
Holt events eines Kanals
@type channel: string
@param channel: Kanal (optional)
@rtype: list
@return: Liste der Events
"""
query = channel + 'Event_' if channel else '*Event_'
query = query + '*_' + name if name else query + '*_*'
keys = self.db.keys(query)
keys.sort()
entries = self.__get_entries__(keys, 'events')
return entries
# ------------------------------------------------------------------------------------------ #
def get_next_event(self, name=None, channel=None):
"""
Holt den aktuellsten Event
@type channel: string
@param channel: Kanal (optional)
@rtype: dict/boolean
@return: ein Event oder False
"""
events = self.get_event_queue(name, channel)
if len(events) > 0:
result = events.pop(0)
else:
result = False
return result
# ------------------------------------------------------------------------------------------ #
def store(self, level, value):
"""
Hash speichern
@type level: string
@param level: der errorlevel
@type value: dict
@param value: Werte als dict
"""
microtime = str(time.time())
value['microtime'] = microtime
value['level'] = level
key = self.__create_key__(self.channel, self.section, level, microtime, str(uuid.uuid1()))
self.db.hset(key, self.channel, value)
self.db.expire(key, 864000)
# ------------------------------------------------------------------------------------------ #
def __get_keys__(self, level ='*'):
"""
Redis-Keys nach Suchkriterium ermitteln
@type level: string
@param level: einen Errorlevel filtern
@rtype: list
@return: Die Keys auf die das Suchkriterium zutrifft
"""
key = self.__create_key__(self.channel, self.section, level)
microtime = str(time.time())
search = microtime[0:4] + '*' if self.daily else '*'
return self.db.keys(key + self.separator + '*')
# ------------------------------------------------------------------------------------------ #
def __create_key__(self, *args):
"""
Key erschaffen - beliebig viele Argumente
@rtype: string
@return: Der key
"""
return self.separator.join(args)
def get_entries(self, level ='*'):
"""
Liste von Hashs nach Suchkriterium erhalten
@type level: string
@param level: einen Errorlevel filtern
@rtype: list
@return: Redis Hashs
"""
def tsort(x,y):
if float(x.split('_',4)[3]) > float(y.split('_',4)[3]):
return 1
elif float(x.split('_',4)[3]) < float(y.split('_',4)[3]):
return -1
else:
return 0
keys = self.__get_keys__(level)
keys.sort(tsort)
entries = self.__get_entries__(keys, self.channel)
entries = sorted(entries, key=lambda k: k['microtime'], reverse=True)
return entries
# ------------------------------------------------------------------------------------------ #
def __get_entries__(self, keys, channel):
entries = []
for key in keys:
entry = self.db.hget(key,channel)
entry = simplejson.dumps(entry.decode('utf-8'))
if not (entry is None):
try:
entry = entry.decode('utf-8').replace('None','"None"')
entry = re.sub("########[^]]*########", lambda x:x.group(0).replace('\"','').replace('\'',''),entry.replace("\\\"","########").replace("\\'","++++++++").replace("'",'"').replace('u"','"').replace('"{','{').replace('}"','}')).replace("########","\"")
entry = simplejson.loads(entry)
entry['key'] = key
entries.append(entry)
except:
pass
return entries
# ------------------------------------------------------------------------------------------ #
def publish(self, channel, message):
subscriber_count = self.db.execute_command('PUBSUB', 'NUMSUB', channel)
if channel.lower().find("reply") < 0 and subscriber_count[1] == 0:
raise Exception("No subscriber! Is Aura daemon running?")
self.db.publish(channel, message)
from enum import Enum
class TerminalColors(Enum):
HEADER = "\033[95m"
RED = "\033[31m"
GREEN = "\033[32m"
ORANGE = "\033[33m"
BLUE = "\033[34m"
PINK = "\033[35m"
CYAN = "\033[36m"
WARNING = "\033[31m"
FAIL = "\033[41m"
BOLD = "\033[1m"
UNDERLINE = "\033[4m"
ENDC = "\033[0m"
\ No newline at end of file
from enum import Enum
class RedisChannel(Enum):
STANDARD = "aura"
DPE_REPLY = "delete_playlist_entry_reply"
FNP_REPLY = "fetch_new_programme_reply"
GAP_REPLY = "get_act_programme_reply"
GCS_REPLY = "get_connection_status_reply"
IPE_REPLY = "insert_playlist_entry_reply"
IP_REPLY = "init_player_reply"
MPE_REPLY = "move_playlist_entry_reply"
PMQ_REPLY = "print_message_queue_reply"
RDB_REPLY = "recreate_database_reply"
SNF_REPLY = "get_next_file_reply"
from enum import Enum
class ScheduleEntryType(Enum):
FILESYSTEM = "fs"
STREAM = "http"
LIVE = "live"
class NoProgrammeLoadedException(Exception):
pass
class LQConnectionError(Exception):
pass
class RedisConnectionException(Exception):
pass
class PlaylistException(Exception):
pass