calender_fetcher.py 18.9 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
import os
import sys
import urllib
import logging
import simplejson

from datetime import datetime, timedelta
#from modules.models.schedule import Schedule


class CalendarFetcher:
    url = dict()
    url_parameter = dict()
    config = None
    logging = None
    has_already_fetched = False
    fetched_schedule_data = None
18
    # FIXME another crutch because of the missing TANK
19
20
21
22
23
24
25
    used_random_playlist_ids = list()

    def __init__(self, config):
        self.config = config
        self.logger = logging.getLogger("AuraEngine")
        self.__set_url__("calendar")
        self.__set_url__("importer")
26
        self.__set_url__("api_show_")
27
28
29
30
31

    def fetch(self):
        # fetch upcoming schedules from STEERING
        try:
            self.logger.debug("Fetching schedules from STEERING")
32
            self.fetched_schedule_data = self.__fetch_schedule_data__()
33
34
        except urllib.error.HTTPError as e:
            self.logger.critical("Cannot fetch from " + self.url["calendar"] + "! Reason: " + str(e))
35
36
            self.fetched_schedule_data = None
            return None
37
38
        except (urllib.error.URLError, IOError, ValueError) as e:
            self.logger.critical("Cannot connect to " + self.url["calendar"] + "! Reason: " + str(e))
39
40
            self.fetched_schedule_data = None
            return None
41
42
43
44
45
46
47

        # fetch playlist and fallbacks to the schedules from TANK
        try:
            self.logger.debug("Fetching playlists from TANK")
            self.__fetch_schedule_playlists__()
        except urllib.error.HTTPError as e:
            self.logger.critical("Cannot fetch from " + self.url["importer"] + "! Reason: " + str(e))
48
49
            self.fetched_schedule_data = None
            return None
50
51
        except (urllib.error.URLError, IOError, ValueError) as e:
            self.logger.critical("Cannot connect to " + self.url["importer"] + "! Reason: " + str(e))
52
53
            self.fetched_schedule_data = None
            return None
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73

        return_data = []
        # gather returndata
        try:
            for schedule in self.fetched_schedule_data:
                # skip schedule if no start or end is given
                if "start" not in schedule:
                    self.logger.warning("No start of schedule given. skipping schedule: " + str(schedule))
                    schedule = None
                if "end" not in schedule:
                    self.logger.warning("No end of schedule given. skipping schedule: " + str(schedule))
                    schedule = None
                if "playlist" not in schedule:
                    self.logger.warning("No playlist for schedule given. skipping schedule: " + str(schedule))
                    schedule = None

                if schedule:
                    return_data.append(schedule)
        except TypeError as e:
            self.logger.error("Nothing fetched...")
74
75
            self.fetched_schedule_data = None
            return None
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91

        return return_data

    # ------------------------------------------------------------------------------------------ #
    def __set_url__(self, type):
        url = self.config.get(type+"url")
        pos = url.find("?")

        if pos > 0:
            self.url[type] = url[0:pos]
            self.url_parameter[type] = url[pos:]
        else:
            self.url[type] = url

    # ------------------------------------------------------------------------------------------ #
    def __fetch_schedule_data__(self):
92
        servicetype = "calendar"
93
94
95
        schedule = None

        # fetch data from steering
96
        html_response = self.__fetch_data__(servicetype)
97

98
99
100
101
102
        # FIXME move hardcoded test-data to separate testing logic.
        # use testdata if response fails or is empty
        if not html_response or html_response == b"[]":
            self.logger.critical("Got no response from Steering!")
            #html_response = '[{"schedule_id":1,"start":"' + (datetime.now() + timedelta(hours=0)).strftime('%Y-%m-%d %H:00:00') + '","end":"' + (datetime.now() + timedelta(hours=1)).strftime('%Y-%m-%d %H:00:00') + '","show_id":9,"show_name":"FROzine","show_hosts":"Sandra Hochholzer, Martina Schweiger","is_repetition":false,"playlist_id":2,"schedule_fallback_id":12,"show_fallback_id":92,"station_fallback_id":1,"rtr_category":"string","comment":"Kommentar","languages":"Sprachen","type":"Typ","category":"Kategorie","topic":"Topic","musicfocus":"Fokus"},{"schedule_id":2,"schedule_start":"' + (datetime.now() + timedelta(hours=1)).strftime('%Y-%m-%d %H:00:00') + '","schedule_end":"' + (datetime.now() + timedelta(hours=2)).strftime('%Y-%m-%d %H:00:00') + '","show_id":10,"show_name":"FROMat","show_hosts":"Sandra Hochholzer, Martina Schweiger","is_repetition":false,"playlist_id":4,"schedule_fallback_id":22,"show_fallback_id":102,"station_fallback_id":1,"rtr_category":"string","comment":"Kommentar","languages":"Sprachen","type":"Typ","category":"Kategorie","topic":"Topic","musicfocus":"Fokus"},{"schedule_id":3,"schedule_start":"' + (datetime.now() + timedelta(hours=2)).strftime('%Y-%m-%d %H:00:00') + '","schedule_end":"' + (datetime.now() + timedelta(hours=3)).strftime('%Y-%m-%d %H:00:00') + '","show_id":11,"show_name":"Radio für Senioren","show_hosts":"Sandra Hochholzer, Martina Schweiger","is_repetition":false,"playlist_id":6,"schedule_fallback_id":32,"show_fallback_id":112,"station_fallback_id":1,"rtr_category":"string","comment":"Kommentar","languages":"Sprachen","type":"Typ","category":"Kategorie","topic":"Topic","musicfocus":"Fokus"}]'
103
104
105

            # use testdata if wanted
            if self.config.get("use_test_data"):
106
                # FIXME move hardcoded test-data to separate testing logic.
107
108
109
                html_response = '[{"id":1,"schedule_id":1,"automation-id":1,"className":"TestData","memo":"TestData","show_fundingcategory":"TestData","start":"' + (datetime.now() + timedelta(hours=0)).strftime('%Y-%m-%dT%H:00:00') + '","end":"' + (datetime.now() + timedelta(hours=1)).strftime('%Y-%m-%dT%H:00:00') + '","show_id":9,"show_name":"TestData: FROzine","show_hosts":"TestData: Sandra Hochholzer, Martina Schweiger","title":"TestData:title","is_repetition":false,"playlist_id":2,"schedule_fallback_id":12,"show_fallback_id":92,"station_fallback_id":1,"rtr_category":"string","comment":"TestData: Kommentar","show_languages":"TestData: Sprachen","show_type":"TestData: Typ","show_categories":"TestData: Kategorie","show_topics":"TestData: Topic","show_musicfocus":"TestData: Fokus"},' \
                                 '{"id":2,"schedule_id":2,"automation-id":1,"className":"TestData","memo":"TestData","show_fundingcategory":"TestData","start":"' + (datetime.now() + timedelta(hours=1)).strftime('%Y-%m-%dT%H:00:00') + '","end":"' + (datetime.now() + timedelta(hours=2)).strftime('%Y-%m-%dT%H:00:00') + '","show_id":10,"show_name":"TestData: FROMat","show_hosts":"TestData: Sandra Hochholzer, Martina Schweiger","title":"TestData:title","is_repetition":false,"playlist_id":4,"schedule_fallback_id":22,"show_fallback_id":102,"station_fallback_id":1,"rtr_category":"TestData: string","comment":"TestData: Kommentar","show_languages":"TestData: Sprachen","show_type":"TestData: Typ","show_categories":"TestData: Kategorie","show_topics":"TestData: Topic","show_musicfocus":"TestData: Fokus"},' \
                                 '{"id":3,"schedule_id":3,"automation-id":1,"className":"TestData","memo":"TestData","show_fundingcategory":"TestData","start":"' + (datetime.now() + timedelta(hours=2)).strftime('%Y-%m-%dT%H:00:00') + '","end":"' + (datetime.now() + timedelta(hours=3)).strftime('%Y-%m-%dT%H:00:00') + '","show_id":11,"show_name":"TestData: Radio für Senioren","show_hosts":"TestData: Sandra Hochholzer, Martina Schweiger","title":"TestData:title","is_repetition":false,"playlist_id":6,"schedule_fallback_id":32,"show_fallback_id":112,"station_fallback_id":1,"rtr_category":"TestData: string","comment":"TestData: Kommentar","show_languages":"TestData: Sprachen","show_type":"TestData: Typ","show_categories":"TestData: Kategorie","show_topics":"TestData: Topic","show_musicfocus":"TestData: Fokus"}]'
110
                self.logger.critical("Using hardcoded Response!")
111
112
113
            else:
                html_response = "{}"

114

115
116
117
118
119
120
        # convert to dict
        schedule = simplejson.loads(html_response)

        # check data
        self.logger.critical("no JSON data checks. I believe what i get here")

121
        #self.fetched_schedule_data = self.remove_unnecessary_data(schedule)
122
        return self.remove_unnecessary_data(schedule)
123
124
125
126
127
128

    # ------------------------------------------------------------------------------------------ #
    def __fetch_schedule_playlists__(self):
        # store fetched entries => do not have to fetch playlist_id more than once
        fetched_entries=[]

129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
        try:
            self.logger.warning("only fetching normal playlists. no fallbacks")
            for schedule in self.fetched_schedule_data:

                # Enhance schedule with details of show (e.g. slug)
                schedule = self.__fetch_show_details__(schedule)
                # retrieve playlist and the fallbacks for every schedule
                # if a playlist (like station_fallback) is already fetched, it is not fetched again but reused
                schedule["playlist"]          = self.__fetch_schedule_playlist__(schedule, "playlist_id",          fetched_entries)
                #schedule["schedule_fallback"] = self.__fetch_schedule_playlist__(schedule, "schedule_fallback_id", fetched_entries)
                #schedule["show_fallback"]     = self.__fetch_schedule_playlist__(schedule, "show_fallback_id",     fetched_entries)
                #schedule["station_fallback"]  = self.__fetch_schedule_playlist__(schedule, "station_fallback_id",  fetched_entries)

                self.logger.info(str(schedule))

        except Exception as e:
            self.logger.error("Error: "+str(e))

    # ------------------------------------------------------------------------------------------ #
    def __fetch_show_details__(self, schedule):
        servicetype = "api_show_"

        json_response = self.__fetch_data__(servicetype, "${ID}", str(schedule["show_id"]))
        show_details = simplejson.loads(json_response)

        # Augment "schedules" with details of "show"
        schedule["show_slug"] = show_details["slug"]
        ### ... add more properties here, if needed ... ###

        return schedule
159
160
161

    # ------------------------------------------------------------------------------------------ #
    def __fetch_schedule_playlist__(self, schedule, id_name, fetched_schedule_entries):
162
163
164
165
166
        servicetype = "importer"

        # fetch playlists from TANK
        if not "show_slug" in schedule:
            raise ValueError("Missing 'show_slug' for schedule", schedule)
167

168
169
        slug = str(schedule["show_slug"])
        json_response = self.__fetch_data__(servicetype, "${SLUG}", slug)
170

171
172
        # if a playlist is already fetched, do not fetch it again
        for entry in fetched_schedule_entries:
173
174
            # FIXME schedule["playlist_id"] is always None, review if entry["id"] is valid
            if entry["id"] == schedule[id_name]:
175
176
177
                self.logger.debug("playlist #" + str(schedule[id_name]) + " already fetched")
                return entry

178
        if self.config.get("use_test_data"):
179
            # FIXME move hardcoded test-data to separate testing logic.
180
            self.logger.warn("Using test-data for fetch-schedule-playlist")
181
182
183
184
            json_response = self.create_test_data(id_name, schedule)

        # convert to list
        schedule_entries = simplejson.loads(json_response)
185
186
        if "results" in schedule_entries:
            schedule_entries = schedule_entries["results"][0]
187
188
189
190
191
192
193
194
195
196
197
198
199

            for entry in schedule_entries["entries"]:
                if entry["uri"].startswith("file"):
                    entry["filename"] = self.convert_to_filename(entry["uri"])

            fetched_schedule_entries.append(schedule_entries)

        return schedule_entries

    def convert_to_filename(self, uri):
        # convert to normal filename
        e = self.config.get("audiofolder") + "/" + uri[7:] + ".flac"
        if not os.path.isfile(e):
200
            self.logger.warning("File %s does not exist!" % e)
201
202
203
        return e

    # ------------------------------------------------------------------------------------------ #
204
205
    def __fetch_data__(self, type, placeholder=None, value=None):
        # Init html_response
206
        html_response = b''
207
        url = self.__build_url__(type, placeholder, value)
208

209
210
        # Send request to the API and read the data
        try:
211
            if type not in self.url_parameter:
212
213
214
215
                if self.url[type] == "":
                    return False
                request = urllib.request.Request(url)
            else:
216
                request = urllib.request.Request(url, self.url_parameter[type])
217
218
219

            response = urllib.request.urlopen(request)
            html_response = response.read()
220

221
222
223
224
225
226
        except (urllib.error.URLError, IOError, ValueError) as e:
            self.logger.error("Cannot connect to " + self.url[type] +
                " (type: " + type + ")! Reason: " + str(e.reason))
            #if not self.has_already_fetched:  # first fetch
            #    self.logger.critical("exiting fetch data thread..")
            #    sys.exit()
227
228
229

        self.has_already_fetched = True
        return html_response.decode("utf-8")
230
231
232
233
234
235
236
    # ------------------------------------------------------------------------------------------ #
    def __build_url__(self, type, placeholder=None, value=None):
        url = self.url[type]
        if placeholder:
            url = url.replace(placeholder, value)
            # print("built URL: "+url)
        return url
237
238
    # ------------------------------------------------------------------------------------------ #
    def remove_unnecessary_data(self, schedule):
239
240
241
242
        count_before = len(schedule)
        schedule = self.remove_data_more_than_24h_in_the_future(schedule)
        schedule = self.remove_data_in_the_past(schedule)
        count_after = len(schedule)
243

244
        self.logger.info("Removed %d unnecessary schedules from response. Entries left: %d" % ((count_before - count_after), count_after))
245
        return schedule
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
    # ------------------------------------------------------------------------------------------ #
    def remove_data_more_than_24h_in_the_future(self, schedule_from_pv):
        act_list = []
        now = datetime.now()
        now_plus_24hours = now + timedelta(hours=24)

        for s in schedule_from_pv:
            date_start = datetime.strptime(s["start"], "%Y-%m-%dT%H:%M:%S")

            # append only elements which are close enough to now
            if date_start <= now_plus_24hours and date_start >= now - timedelta(hours=1):
                act_list.append(s)

        return act_list

    # ------------------------------------------------------------------------------------------ #
    def remove_data_in_the_past(self, schedule_from_pv):
        act_list = []
        now = datetime.now()

        for index,curr in enumerate(schedule_from_pv[:-1]):
            date_start = datetime.strptime(curr["start"], "%Y-%m-%dT%H:%M:%S")
            date_next_start = datetime.strptime(schedule_from_pv[index+1]["start"], "%Y-%m-%dT%H:%M:%S")

            # append all elements in the future
            if date_start >= now:
                act_list.append(curr)
            # append the one which is now playing
            if date_start <= now and date_next_start >= now:
                act_list.append(curr)

        return act_list

    # ------------------------------------------------------------------------------------------ #
    def create_test_data(self, id_name, schedule):
        import random
        rand_id = random.randint(1, 10000)

        while rand_id in self.used_random_playlist_ids:
            rand_id = random.randint(1, 10000)

        self.used_random_playlist_ids.append(rand_id)

289
        # FIXME move hardcoded test-data to separate testing logic.
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
        # HARDCODED Testdata
        if id_name != "playlist_id":
            # FALLBACK TESTDATA

            if rand_id % 3 == 0:  # playlist fallback
                json_response = '{"playlist_id":' + str(
                    rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"file:///var/audio/fallback/NightmaresOnWax/DJ-Kicks/02 - Only Child - Breakneck.flac"}]}'
            elif rand_id % 2 == 0:  # stream fallback
                json_response = '{"playlist_id":' + str(
                    rand_id) + ',"entries":[{"source":"http://chill.out.airtime.pro:8000/chill_a"}]}'
            else:  # pool fallback
                json_response = '{"playlist_id":' + str(rand_id) + ',"entries":[{"source":"pool:///liedermacherei"}]}'

            schedule[id_name] = rand_id

        elif schedule[id_name] == 0 or schedule[id_name] is None:
            # this happens when playlist id is not filled out in pv
            # json_response = '{"playlist_id": 0}'

            if rand_id % 4 == 0:  # playlist with two files
                json_response = '{"playlist_id":' + str(
                    rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"file:///var/audio/fallback/NightmaresOnWax/DJ-Kicks/02 - Only Child - Breakneck.flac"}]}'
            elif rand_id % 3 == 0:  # playlist with jingle and then linein
                json_response = '{"playlist_id":' + str(
                    rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"linein://1"}]}'
            elif rand_id % 2 == 0:  # playlist with jingle and then http stream
                json_response = '{"playlist_id":' + str(
                    rand_id) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"http://chill.out.airtime.pro:8000/chill_a"}]}'
            else:  # pool playlist
                json_response = '{"playlist_id":' + str(rand_id) + ',"entries":[{"source":"pool:///hiphop"}]}'

            schedule[id_name] = rand_id

        elif schedule[id_name] % 4 == 0:  # playlist with two files
            json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"file:///var/audio/fallback/NightmaresOnWax/DJ-Kicks/01 - Type - Slow Process.flac"}]}'
        elif schedule[id_name] % 3 == 0:  # playlist with jingle and then http stream
            json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"linein://0"}]}'
        elif schedule[id_name] % 2 == 0:  # playlist with jingle and then linein
            json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"file:///var/audio/fallback/music.flac"},{"source":"http://stream.fro.at:80/fro-128.ogg"}]}'
        else:  # pool playlist
            json_response = '{"playlist_id":' + str(schedule[id_name]) + ',"entries":[{"source":"pool:///chillout"}]}'

        self.logger.info("Using 'randomized' playlist: " + json_response + " for " + id_name[:-3] + " for show " + schedule["show_name"] + " starting @ " + schedule["start"])

334
        return json_response