#
# Aura Engine (https://gitlab.servus.at/aura/engine)
#
# Copyright (C) 2017-2020 - The Aura Engine Team.

# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.

# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.

# You should have received a copy of the GNU Affero General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.




import threading
import queue
import logging

from datetime                           import datetime

from src.base.utils                     import SimpleUtil as SU
from src.base.models                    import Schedule, Playlist, PlaylistEntry, PlaylistEntryMetaData
from src.scheduling.calender_fetcher    import CalendarFetcher



class AuraCalendarService(threading.Thread):
    """ 
    The `AuraCalendarService` retrieves all current schedules and related
    playlists including audio files from the configured API endpoints and
    stores it in the local database.

    To perform the API queries it utilizes the CalendarFetcher class.


    Attributes:
        #FIXME Review attributes not needed.

    """
    queue = None
    config = None
    logger = None
    fetched_schedule_data = None
    calendar_fetcher = None
    stop_event = None


    def __init__(self, config):
        """
        Initializes the class.

        Args:
            config (AuraConfig):    The configuration
        """
        threading.Thread.__init__(self)

        self.config = config
        self.logger = logging.getLogger("AuraEngine")
        self.queue = queue.Queue()
        self.stop_event = threading.Event()
        self.calendar_fetcher = CalendarFetcher(config)



    def get_queue(self):
        """ 
        Retrieves the queue of fetched data.
        """
        return self.queue



    def run(self):
        """
        Fetch calendar data and store it in the database. Also handles local deletion of remotely
        deleted schedules.

        Returns
            Schedule ([]):  An arrar of retrieved schedules passed via `self.queue`
        """
        result = []
        now_unix = SU.timestamp()
        scheduling_window_start = self.config.get("scheduling_window_start")

        try:
            fetched_schedule_data = self.calendar_fetcher.fetch()
            self.logger.debug("Schedule data fetched from API: " + str(fetched_schedule_data))

            # If nothing is fetched, return
            if not fetched_schedule_data:
                self.queue.put("fetching_aborted Nothing fetched")
                return
            
            # Check if existing schedules have been deleted
            local_schedules = Schedule.select_programme(datetime.now())
            for local_schedule in local_schedules:

                # Only allow deletion of schedules which are deleted before the start of the scheduling window
                if local_schedule.start_unix > now_unix:
                    if (local_schedule.start_unix - scheduling_window_start) > now_unix:

                        # Filter the local schedule from the fetched ones
                        existing_schedule = list(filter(lambda new_schedule: \
                            new_schedule["schedule_id"] == local_schedule.schedule_id, fetched_schedule_data))
                        
                        if existing_schedule:
                            # self.logger.debug("Schedule #%s is still existing remotely!" % (local_schedule.schedule_id))
                            pass
                        else:
                            self.logger.info("Schedule #%s has been deleted remotely, hence also delete it locally [%s]" % \
                                (local_schedule.schedule_id, str(local_schedule)))
                            local_schedule.delete(commit=True)
                            self.logger.info("Deleted local schedule #%s from database" % local_schedule.schedule_id)

                    else:
                        msg = "Schedule #%s has been deleted remotely. Since the scheduling window has already started, it won't be deleted locally." % \
                            local_schedule.schedule_id
                        self.logger.error(SU.red(msg))

            # Process fetched schedules    
            for schedule in fetched_schedule_data:

                # Check schedule for validity
                if "start" not in schedule:
                    self.logger.warning("No 'start' of schedule given. Skipping the schedule: %s " % str(schedule))
                    continue
                if "end" not in schedule:
                    self.logger.warning("No 'end' of schedule given. Skipping the schedule: %s " % str(schedule))
                    continue

                # Store the schedule
                schedule_db = self.store_schedule(schedule)

                # Store playlists to play
                self.store_playlist(schedule_db, schedule_db.playlist_id, schedule["playlist"])
                if schedule_db.schedule_fallback_id:
                    self.store_playlist(schedule_db, schedule_db.schedule_fallback_id, schedule["schedule_fallback"])
                if schedule_db.show_fallback_id:
                    self.store_playlist(schedule_db, schedule_db.show_fallback_id, schedule["show_fallback"])
                if schedule_db.station_fallback_id:
                    self.store_playlist(schedule_db, schedule_db.station_fallback_id, schedule["station_fallback"])


                # self.store_playlist(schedule_db, schedule_db.playlist_id, schedule["playlist"], PlaylistType.DEFAULT.id)
                # if schedule_db.schedule_fallback_id:
                #     self.store_playlist(schedule_db, schedule_db.schedule_fallback_id, schedule["schedule_fallback"], PlaylistType.TIMESLOT.id)
                # if schedule_db.show_fallback_id:
                #     self.store_playlist(schedule_db, schedule_db.show_fallback_id, schedule["show_fallback"], PlaylistType.SHOW.id)
                # if schedule_db.station_fallback_id:
                #     self.store_playlist(schedule_db, schedule_db.station_fallback_id, schedule["station_fallback"], PlaylistType.STATION.id)

                result.append(schedule_db)

            # Release the mutex
            self.queue.put(result)
        except Exception as e:
            # Release the mutex
            self.logger.warning("Fetching aborted due to: %s" % str(e), e)
            self.queue.put("fetching_aborted " + str(e))

        # terminate the thread
        return



    def store_schedule(self, schedule):
        """
        Stores the given schedule to the database.

        Args:
            schedule (Schedule):    The schedule
        """
        schedule_db = Schedule.select_show_on_datetime(schedule["start"])
        havetoadd = False

        if not schedule_db:
            self.logger.debug("no schedule with given schedule id in database => create new")
            schedule_db = Schedule()
            havetoadd = True


        schedule_db.show_id = schedule["show_id"]
        schedule_db.schedule_id = schedule["schedule_id"]
        schedule_db.schedule_start = schedule["start"]
        schedule_db.schedule_end = schedule["end"]
        schedule_db.show_name = schedule["show_name"]
        schedule_db.show_hosts = schedule["show_hosts"]
        schedule_db.is_repetition = schedule["is_repetition"]
        schedule_db.funding_category = schedule["show_fundingcategory"]
        schedule_db.languages = schedule["show_languages"]
        schedule_db.type = schedule["show_type"]
        schedule_db.category = schedule["show_categories"]
        schedule_db.topic = schedule["show_topics"]
        schedule_db.musicfocus = schedule["show_musicfocus"]

        schedule_db.playlist_id = schedule["playlist_id"]
        schedule_db.schedule_fallback_id = schedule["schedule_fallback_id"]
        schedule_db.show_fallback_id = schedule["show_fallback_id"]
        schedule_db.station_fallback_id = schedule["station_fallback_id"]

        schedule_db.store(add=havetoadd, commit=True)

        return schedule_db



    # def store_playlist(self, schedule_db, playlist_id, fetched_playlist, fallbackplaylist_type=0):
    def store_playlist(self, schedule_db, playlist_id, fetched_playlist):
        """
        Stores the Playlist to the database.
        """
        if not playlist_id or not fetched_playlist:
            self.logger.debug(f"Playlist ID#{playlist_id} is not available!")
            # self.logger.debug("Playlist type %s with ID '%s' is not available!" % (fallbackplaylist_type, playlist_id))
            return
            
        playlist_db = Playlist.select_playlist_for_schedule(schedule_db.schedule_start, playlist_id)
        havetoadd = False

        if not playlist_db:
            playlist_db = Playlist()
            havetoadd = True

        self.logger.debug("Storing playlist %d for schedule (%s)" % (playlist_id, str(schedule_db)))
        playlist_db.playlist_id = playlist_id
        playlist_db.schedule_start = schedule_db.schedule_start
        playlist_db.show_name = schedule_db.show_name
        # playlist_db.fallback_type = fallbackplaylist_type
        if "entries" in fetched_playlist:
            playlist_db.entry_count = len(fetched_playlist["entries"])
        else:
            playlist_db.entry_count = 0

        playlist_db.store(havetoadd, commit=True)
      
        if playlist_db.entry_count > 0:
            self.store_playlist_entries(schedule_db, playlist_db, fetched_playlist)

        return playlist_db



    def store_playlist_entries(self, schedule_db, playlist_db, fetched_playlist):
        """
        Stores the playlist entries to the database.
        """
        entry_num = 0
        time_marker = playlist_db.start_unix

        self.expand_entry_duration(schedule_db, fetched_playlist)
        self.delete_orphaned_entries(playlist_db, fetched_playlist)  

        for entry in fetched_playlist["entries"]:
            entry_db = PlaylistEntry.select_playlistentry_for_playlist(playlist_db.artificial_id, entry_num)
            havetoadd = False
            if not entry_db:
                entry_db = PlaylistEntry()
                havetoadd = True

            entry_db.entry_start = datetime.fromtimestamp(time_marker)
            entry_db.artificial_playlist_id = playlist_db.artificial_id
            entry_db.entry_num = entry_num
            entry_db.duration = SU.nano_to_seconds(entry["duration"])

            if "uri" in entry:
                # FIXME Refactor mix of uri/filename/file/source
                entry_db.uri = entry["uri"]
                entry_db.source = entry["uri"]
            if "filename" in entry:
                entry_db.source = entry["filename"]

            entry_db.store(havetoadd, commit=True)

            if "file" in entry:
                self.store_playlist_entry_metadata(entry_db, entry["file"]["metadata"])

            entry_num = entry_num + 1
            time_marker += entry_db.duration



    def delete_orphaned_entries(self, playlist_db, fetched_playlist):
        """
        Deletes all playlist entries which are beyond the current playlist's `entry_count`.
        Such entries might be existing due to a remotely changed playlist, which now has
        less entries than before.
        """
        new_last_idx = len(fetched_playlist["entries"])
        existing_last_idx = PlaylistEntry.count_entries(playlist_db.artificial_id)-1

        if existing_last_idx < new_last_idx:
            return 

        for entry_num in range(new_last_idx, existing_last_idx+1, 1):
            PlaylistEntry.delete_entry(playlist_db.artificial_id, entry_num)            
            self.logger.info(SU.yellow("Deleted playlist entry %s:%s" % (playlist_db.artificial_id, entry_num)))
            entry_num += 1


    def expand_entry_duration(self, schedule_db, fetched_playlist):
        """
        If some playlist entry doesn't have a duration assigned, its duration is expanded to the
        remaining duration of the playlist (= schedule duration minus playlist entries with duration).
        If there's more than one entry without duration, such entries are removed from the playlist.
        """
        total_seconds = (schedule_db.schedule_end - schedule_db.schedule_start).total_seconds()
        total_duration = SU.seconds_to_nano(total_seconds)
        actual_duration = 0        
        missing_duration = []
        idx = 0

        for entry in fetched_playlist["entries"]:
            if not "duration" in entry:
                missing_duration.append(idx)
            else:
                actual_duration += entry["duration"]
            idx += 1
                
        if len(missing_duration) == 1:
            fetched_playlist["entries"][missing_duration[0]]["duration"] = total_duration - actual_duration
            self.logger.info("Expanded duration of playlist entry #%s:%s" % (fetched_playlist["id"], missing_duration[0]))

        elif len(missing_duration) > 1:
            # This case should actually never happen, as TANK doesn't allow more than one entry w/o duration anymore
            for i in reversed(missing_duration[1:-1]):
                self.logger.error(SU.red("Deleted Playlist Entry without duration: %s" % \
                    str(fetched_playlist["entries"][i])))
                del fetched_playlist["entries"][i]



    def store_playlist_entry_metadata(self, entry_db, metadata):
        """
        Stores the meta-data for a PlaylistEntry.
        """
        metadata_db = PlaylistEntryMetaData.select_metadata_for_entry(entry_db.artificial_id)
        havetoadd = False
        if not metadata_db:
            metadata_db = PlaylistEntryMetaData()
            havetoadd = True

        metadata_db.artificial_entry_id = entry_db.artificial_id

        if "artist" in metadata:
            metadata_db.artist = metadata["artist"]
        else:
            metadata_db.artist = ""
        
        if "album" in metadata:
            metadata_db.album = metadata["album"]
        else:
            metadata_db.album = ""

        if "title" in metadata:
            metadata_db.title = metadata["title"]
        else:
            metadata_db.title = ""

        metadata_db.store(havetoadd, commit=True)



    def stop(self):
        self.stop_event.set()