diff --git a/src/__init__.py b/.codespell-excludes similarity index 100% rename from src/__init__.py rename to .codespell-excludes diff --git a/.codespell-ignore-words b/.codespell-ignore-words new file mode 100644 index 0000000000000000000000000000000000000000..9b52f093b3a44f508c3b04f887fb252c4e9ee6af --- /dev/null +++ b/.codespell-ignore-words @@ -0,0 +1 @@ +connexion diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000000000000000000000000000000000000..4fb7475a06393c433ae082a4054eff5b23f3eaae --- /dev/null +++ b/.flake8 @@ -0,0 +1,4 @@ +[flake8] +max-line-length = 99 +exclude = config/, python/, src/aura_engine_api/rest/, .tox/ +ignore = E121,E123,E126,E203,E226,E24,E704,W503,N802 diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 88e0f678baebd3f9dd2d29e6202509c9bd750f27..7fec2683f74cb3bbf0e17794a515159f06bc9e0a 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -2,11 +2,12 @@ image: python:3.8-buster stages: - bundle + - test - deploy - release variables: - API_ROOT_FILE: ./src/rest/swagger/swagger.yaml + API_ROOT_FILE: ./src/aura_engine_api/rest/swagger/swagger.yaml BUNDLED_JSON_FILE: ./public/api.json .release-rules: &release-rules @@ -24,10 +25,8 @@ cache: bundle_and_test: stage: bundle before_script: - - apt-get update - - apt-get install -y curl - - curl -sL https://deb.nodesource.com/setup_10.x | bash - - - apt-get install -y nodejs + - apt-get --quiet update + - apt-get --quiet --yes install nodejs - npm install -g @apidevtools/swagger-cli script: @@ -40,7 +39,17 @@ bundle_and_test: - public expire_in: 2 days only: - - master + - $CI_DEFAULT_BRANCH + +check_style: + stage: test + before_script: + - apt-get --quiet update + - apt-get --quiet --yes install codespell make + - pip install flake8 + script: + - make lint + - make spelling # test_api: # stage: test @@ -51,10 +60,8 @@ bundle_and_test: pages: stage: deploy before_script: - - apt-get update - - apt-get install -y curl - - curl -sL https://deb.nodesource.com/setup_10.x | bash - - - apt-get install -y nodejs + - apt-get --quiet update + - apt-get --quiet --yes install nodejs - npm install swagger-ui-dist@3.22.1 script: - cp -rp node_modules/swagger-ui-dist/* ./public @@ -64,7 +71,7 @@ pages: - public expire_in: 2 days only: - - master + - $CI_DEFAULT_BRANCH docker-push: # Use the official docker image. @@ -88,16 +95,16 @@ docker-push: else docker build -t $AURA_IMAGE_NAME -t $AURA_IMAGE_NAME:$CI_COMMIT_TAG . fi - docker push "$AURA_IMAGE_NAME" --all-tags - rules: + rules: - *release-rules # every commit on master/main branch should trigger a push to docker-hub as unstable without a release - - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH + - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH exists: - Dockerfile release_job: stage: release - needs: + needs: - docker-push image: registry.gitlab.com/gitlab-org/release-cli:latest rules: *release-rules @@ -108,3 +115,4 @@ release_job: description: ./CHANGELOG tag_name: '$CI_COMMIT_TAG' ref: '$CI_COMMIT_TAG' + diff --git a/Makefile b/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..e1322433dd88aa360e924f34cca540b3a3522863 --- /dev/null +++ b/Makefile @@ -0,0 +1,30 @@ +SPELLING_PATHS = $(wildcard *.md) docs src +SPELLING_EXCLUDE_FILENAME = .codespell-excludes +SPELLING_IGNORE_WORDS_FILENAME = .codespell-ignore-words + + +.PHONY: help +help: + @echo "Supported targets:" + @echo " lint - verify code style" + @echo " spelling - check spelling of text" + @echo " style - apply automatic formatting" + @echo + + +.PHONY: lint +lint: + python3 -m flake8 . + + +.PHONY: spelling +spelling: + codespell \ + --exclude-file "$(SPELLING_EXCLUDE_FILENAME)" \ + --ignore-words "$(SPELLING_IGNORE_WORDS_FILENAME)" \ + $(SPELLING_PATHS) + +.PHONY: style +style: + python3 -m isort . + black . diff --git a/README.md b/README.md index 2dc039e1a21cf9a207fa6010586eeeb0d328d067..af097db1e92e623d6f673e943af586ac95ac6d50 100644 --- a/README.md +++ b/README.md @@ -67,7 +67,7 @@ acting as a reverse proxy. ### Redundant Deployment In this scenario there are two Engine instances involved. Here you will need to deploy one Engine API on the host of each Engine instance. Additionally you'll have to set up -a third, so-called *Syncronization Node* of the Engine API. This sync instance of Engine API is in charge of synchronizing playlogs and managing the active engine state. +a third, so-called *Synchronization Node* of the Engine API. This sync instance of Engine API is in charge of synchronizing playlogs and managing the active engine state. <img src="docs/engine-api_redundancy.png" /> @@ -89,8 +89,8 @@ acting as a reverse proxy to shield your API. Usually when some new audio source starts playing, AURA Engine logs it to its local Engine API instance via some REST call. Now, the *Local API server* stores this information in its local database. Next, it also performs a POST request to the *Synchronization API Server*. This *Sync Node* checks if this request is coming from the currently active engine instance. -If yes, it stores this information in its playlog database. This keeps the playlogs of individual (currently active) Engine instances in sync with the *Engine API syncronization node*. -The *Engine API syncronization node* always only stores the valid (i.e. actually played) playlog records. +If yes, it stores this information in its playlog database. This keeps the playlogs of individual (currently active) Engine instances in sync with the *Engine API synchronization node*. +The *Engine API synchronization node* always only stores the valid (i.e. actually played) playlog records. ##### Active Sync @@ -103,7 +103,7 @@ obviously can not be synced. That means the local playlog at the *Engine Node* i Such marked entries are focus of the secondary synchronization approach, the so called **Passive Sync**: Whenever the *Synchronization Node* is up- and running again, some automated job on this node is continuously checking for records on remote nodes marked as "unsynced". If there are such records found, this indicates that there has been an outage of the *Sync Node*. Hence those "unsynced" records are pending to be synced. Now an automated job on the *Sync Node* reads those records as batches from that Engine Node and stores them in its local database. -It also keeps track when the last sync has happend, avoiding to query unnecceary records on any remote nodes. +It also keeps track when the last sync has happened, avoiding to query unnecceary records on any remote nodes. In order to avoid that this **Passive Sync** job might be causing high traffic on an engine instance, these batches are read with some configured delay time (see `sync_interval` and `sync_step_sleep` in the *Sync Node* configuration; all values are in seconds) and a configurable batch size (`sync_batch_size`; count of max unsynced playlogs which are read at once). @@ -130,7 +130,7 @@ For Production use you also need following: Create a virtual environment for your Python dependencies: ```bash -python3.8 -m venv python +python3 -m venv python ``` To activate that environment, run @@ -243,7 +243,7 @@ To run the API in an local development server execute: ./run.sh dev ``` -This command implicitely activates the virtual environment before starting the API. +This command implicitly activates the virtual environment before starting the API. For convenience running a plain `./run.sh` also starts the development server. @@ -393,13 +393,13 @@ http://localhost:8008/api/v1/openapi.json ### Extending the API The workflow for extending the API follows the **API First** approach. This means you have to edit the API at https://app.swaggerhub.com/apis/AURA-Engine/engine-api/, -using the SwaggerHub web editor. Then download the `python-flask` server stubs, and replace & merge the existing generated sources in `./src/rest`. +using the SwaggerHub web editor. Then download the `python-flask` server stubs, and replace & merge the existing generated sources in `./src/aura_engine_api/rest`. All model files can usually be overwritten. Only controller and test classes need to undergo a merge action. In the future it might be favorable to use a local Codegen to generate the API artifacts. -> Caveat: There is an issue with the generated source related to Python 3.8. Therefore `./src/rest/util.py` contains a workaround. Think about that when +> Caveat: There is an issue with the generated source related to Python 3.8. Therefore `./src/aura_engine_api/rest/util.py` contains a workaround. Think about that when overwriting the existing file. ### Creating a local image diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..2fec27db0012ac4f34d44cea43676a2bece05663 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,22 @@ +[tool.black] +target-version = ["py38"] +# TODO: Use extend-exclude as soon as Debian Bookworm is released. +exclude = ''' + ^/src/aura_engine_api/rest/ + | ^/config/ + | ^/python/ + | ^/.tox/ +''' + +[tool.isort] +py_version = 38 +profile = "black" +# TODO: switch to "extend_skip", after Debian Bookworm is released +skip = [ + "config", + "node_modules", + "python", + "src/aura_engine_api/rest/", + ".git", + ".tox", +] diff --git a/run.sh b/run.sh index bd5d4552d2d3343bac1de7bc5b716f9a447ff703..1d4775494e4deed03178fb0ba16e7d2b92207973 100755 --- a/run.sh +++ b/run.sh @@ -34,42 +34,44 @@ echo "[ Docker=$docker ]" # Check for the correct Python version (3.8+) PYTHON_EXEC="python3" -echo "[ Using $(python3 -V) ]" +PYTHON_BASE_DIR="src" +echo "[ Using $("$PYTHON_EXEC" -V) ]" # +++ DEFAULT COMMANDS +++ # if [[ $docker == "false" ]]; then + export PYTHONPATH="$PYTHON_BASE_DIR" ### Runs the API Server (Development) ### if [[ $mode == "dev" ]]; then source python/bin/activate - echo "Running Engine API in Python Environment ($(python3 -V))" + echo "Running Engine API in Python Environment ($("$PYTHON_EXEC" -V))" echo "Starting API Server" - python src/app.py + "$PYTHON_EXEC" -m aura_engine_api.app fi ### Runs the API Server (Test) ### if [[ $mode == "api-test-0" ]]; then echo "Starting API Server 0" - /usr/bin/env python3 src/app.py config=tests/config/engine-0-api.ini + "$PYTHON_EXEC" -m aura_engine_api.app config=tests/config/engine-0-api.ini fi if [[ $mode == "api-test-1" ]]; then echo "Starting API Server 1" - /usr/bin/env python3 src/app.py config=tests/config/engine-1-api.ini + "$PYTHON_EXEC" -m aura_engine_api.app config=tests/config/engine-1-api.ini fi if [[ $mode == "api-test-2" ]]; then echo "Starting API Server 2" - /usr/bin/env python3 src/app.py config=tests/config/engine-2-api.ini + "$PYTHON_EXEC" -m aura_engine_api.app config=tests/config/engine-2-api.ini fi ### Runs the API Server using Gunicorn without a system daemon (Production) ### if [[ $mode == "prod" ]]; then echo "Starting API Server" - gunicorn -c config/gunicorn.conf.py src.app:app + gunicorn -c config/gunicorn.conf.py aura_engine_api.app:app fi if [[ $mode == "test" ]]; then @@ -80,7 +82,7 @@ if [[ $docker == "false" ]]; then ### CAUTION: This deletes everything in your database ### if [[ $mode == "recreate-database" ]]; then - /usr/bin/env python3 src/app.py --recreate-database + "$PYTHON_EXEC" -m aura_engine_api.app --recreate-database fi fi @@ -117,4 +119,4 @@ if [[ $docker == "true" ]]; then if [[ $mode == "push" ]]; then exec sudo docker push autoradio/engine-api fi -fi \ No newline at end of file +fi diff --git a/setup.py b/setup.py index f9c96b81f126d7abd7f374cad48eed28de49bbff..94c61f356b86c0c3866280dc80c40fc33e141c97 100644 --- a/setup.py +++ b/setup.py @@ -17,9 +17,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. - -import sys -from setuptools import setup, find_packages +from setuptools import find_packages, setup NAME = "engine-api" VERSION = "0.1.0" @@ -42,12 +40,10 @@ setup( keywords=["OpenAPI", "AURA Engine API"], install_requires=REQUIRES, packages=find_packages(), - package_data={'': ['src/rest/swagger/swagger.yaml']}, + package_data={"": ["src/aura_engine_api/rest/swagger/swagger.yaml"]}, include_package_data=True, - entry_points={ - 'console_scripts': ['src.app=src.app.__main__:main'] - }, + entry_points={"console_scripts": ["src.app=src.app.__main__:main"]}, long_description="""\ This is the AURA Engine API. Read more at https://gitlab.servus.at/aura/engine. - """ + """, ) diff --git a/meta.py b/src/aura_engine_api/__init__.py similarity index 91% rename from meta.py rename to src/aura_engine_api/__init__.py index 1d232ffcd2e5a44443af86ad6c146be7c75020d7..b2857d9483acbf108719e63d41bbb9b88ba0b3fd 100644 --- a/meta.py +++ b/src/aura_engine_api/__init__.py @@ -7,4 +7,4 @@ __version__ = "0.9.0" __version_info__ = (0, 9, 0) __maintainer__ = "David Trattnig" __email__ = "david.trattnig@subsquare.at" -__status__ = "Development" \ No newline at end of file +__status__ = "Development" diff --git a/src/app.py b/src/aura_engine_api/app.py similarity index 71% rename from src/app.py rename to src/aura_engine_api/app.py index 18e3042ede5e5dd86f0e659c5e8213da9bbe443f..507da8e6057989e602259f37b42fea92a3d83420 100644 --- a/src/app.py +++ b/src/aura_engine_api/app.py @@ -17,22 +17,18 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. -import sys -import os import atexit - -sys.path.append(os.path.join(os.path.dirname(__file__), '..')) +import sys import connexion - -import meta -from base.config import AuraConfig -from base.logger import AuraLogger -from base.node import NodeType -from rest import encoder -from service import ApiService -from sync import SyncJob -from models import db, ma +import aura_engine_api +from aura_engine_api.base.config import AuraConfig +from aura_engine_api.base.logger import AuraLogger +from aura_engine_api.base.node import NodeType +from aura_engine_api.models import db, ma +from aura_engine_api.rest import encoder +from aura_engine_api.service import ApiService +from aura_engine_api.sync import SyncJob # Read command line parameters @@ -48,28 +44,34 @@ config = AuraConfig(config_file) logger = AuraLogger(config, "engine-api").logger sync_job = None + def build_app(app): app.json_encoder = encoder.JSONEncoder app.config["SQLALCHEMY_DATABASE_URI"] = config.get_database_uri() - app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False - app.config['ENV'] = "development" - app.config['FLASK_ENV'] = "development" + app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False + app.config["ENV"] = "development" + app.config["FLASK_ENV"] = "development" if config.get("debug_flask") == "true": - app.config['DEBUG'] = True + app.config["DEBUG"] = True db.init_app(app) ma.init_app(app) return app -api = connexion.App(__name__, specification_dir='rest/swagger', arguments={'title': 'AURA Engine API'}) -api.add_api('swagger.yaml', pythonic_params=True) + +api = connexion.App( + __name__, specification_dir="rest/swagger", arguments={"title": "AURA Engine API"} +) +api.add_api("swagger.yaml", pythonic_params=True) app = build_app(api.app) + @app.after_request def after_request(response): header = response.headers - header['Access-Control-Allow-Origin'] = config.get("api_cors") + header["Access-Control-Allow-Origin"] = config.get("api_cors") return response + def startup(): """ Startup Server. @@ -98,7 +100,7 @@ with app.app_context(): node_type = NodeType.SYNC service = ApiService(config, logger, node_type) - app.config['SERVICE'] = service + app.config["SERVICE"] = service # Run sync job only in SYNC NODE mode if node_type == NodeType.SYNC: @@ -115,9 +117,12 @@ with app.app_context(): federation = "enabled" if config.get("enable_federation") == "false": federation = "disabled" - splash = "\n\n â–‘Eâ–‘Nâ–‘Gâ–‘Iâ–‘Nâ–‘Eâ–‘â–‘â–‘Aâ–‘Pâ–‘Iâ–‘ - v%s running as %s node - Federation %s.\n\n" % (meta.__version__, type, federation) + splash = ( + "\n\n â–‘Eâ–‘Nâ–‘Gâ–‘Iâ–‘Nâ–‘Eâ–‘â–‘â–‘Aâ–‘Pâ–‘Iâ–‘ - v%s running as %s node - Federation %s.\n\n" + % (aura_engine_api.__version__, type, federation) + ) logger.info(splash) -if __name__ == '__main__': +if __name__ == "__main__": startup() diff --git a/src/base/__init__.py b/src/aura_engine_api/base/__init__.py similarity index 100% rename from src/base/__init__.py rename to src/aura_engine_api/base/__init__.py diff --git a/src/base/config.py b/src/aura_engine_api/base/config.py similarity index 86% rename from src/base/config.py rename to src/aura_engine_api/base/config.py index c5547a9ec20d8399ac31b51ea1adf0381795cb64..e42e90fbf3860fc06059a7ceeb351f3ed9a54ced 100644 --- a/src/base/config.py +++ b/src/aura_engine_api/base/config.py @@ -17,13 +17,12 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. +import logging import os import os.path import sys -import logging - -from pathlib import Path from configparser import ConfigParser +from pathlib import Path class AuraConfig: @@ -32,10 +31,10 @@ class AuraConfig: Holds the Aura Configuration as in the file `engine-api.ini`. """ + ini_path = "" logger = None - def __init__(self, ini_path="/etc/aura/engine-api.ini"): """ Initializes the configuration, defaults to `/etc/aura/engine.ini`. @@ -45,7 +44,11 @@ class AuraConfig: Args: ini_path(String): The path to the configuration file `engine-api.ini` """ - default_ini_path = "%s/config/engine-api.ini" % Path(__file__).parent.parent.parent.absolute() + default_ini_path = os.path.join( + Path(__file__).parent.parent.parent.parent.absolute(), + "config", + "engine-api.ini", + ) if ini_path: config_file = Path(ini_path) @@ -58,8 +61,6 @@ class AuraConfig: self.logger = logging.getLogger("AuraEngineApi") self.load_config() - - def set(self, key, value): """ Setter for some specific config property. @@ -69,11 +70,10 @@ class AuraConfig: default (*): value """ try: - self.__dict__[key] = int(value) - except: - self.__dict__[key] = str(value) - - + parsed_value = int(value) + except ValueError: + parsed_value = str(value) + self.__dict__[key] = parsed_value def get(self, key, default=None): """ @@ -87,7 +87,9 @@ class AuraConfig: if default: self.set(key, default) else: - self.logger.warning("Key " + key + " not found in configfile " + self.ini_path + "!") + self.logger.warning( + "Key " + key + " not found in configfile " + self.ini_path + "!" + ) return None if key == "loglevel": @@ -112,8 +114,6 @@ class AuraConfig: value = os.path.expandvars(value) return value - - def load_config(self): """ Set config defaults and load settings from file @@ -123,7 +123,7 @@ class AuraConfig: sys.exit(1) # Read the file - f = open(self.ini_path, 'r') + f = open(self.ini_path, "r") ini_str = f.read() f.close() @@ -137,15 +137,13 @@ class AuraConfig: for section in config_parser.sections(): for key, value in config_parser.items(section): - v = config_parser.get(section, key).replace('"', '').strip() + v = config_parser.get(section, key).replace('"', "").strip() self.set(key, v) # Custom overrides and defaults self.set("install_dir", os.path.realpath(__file__ + "..")) self.set("api_prefix", "/api/v1") - - def get_database_uri(self): """ Retrieves the database connection string. @@ -160,9 +158,12 @@ class AuraConfig: if db_type == "mysql": return f"mysql://{db_user}:{db_pass}@{db_host}/{db_name}?charset={db_charset}" else: - return f"postgresql+psycopg2://{db_user}:{db_pass}@{db_host}/{db_name}?client_encoding={db_charset}" + return ( + f"postgresql+psycopg2://{db_user}:{db_pass}@{db_host}/{db_name}" + f"?client_encoding={db_charset}" + ) elif db_type == "sqlite": # "db_name" is expected to be either a relative or an absolute path to the sqlite file return f"sqlite:///{db_name}.db" else: - return f"Error: invalid database type '{db_type}'" \ No newline at end of file + return f"Error: invalid database type '{db_type}'" diff --git a/src/base/logger.py b/src/aura_engine_api/base/logger.py similarity index 89% rename from src/base/logger.py rename to src/aura_engine_api/base/logger.py index 3825a92a050c8d771b1a7ead8fb520fbfe837167..bf4f6341b387495763e4489bc9266446289d8fb9 100644 --- a/src/base/logger.py +++ b/src/aura_engine_api/base/logger.py @@ -19,28 +19,25 @@ import logging -from .config import AuraConfig - -class AuraLogger(): +class AuraLogger: """ AuraLogger Class Logger for all Aura Engine components. The default logger is `AuraEngine`. Other loggers are defined - by passing a custom name on instantiation. - + by passing a custom name on instantiation. + The logger respects the log-level as defined in the engine's configuration file. """ + config = None logger = None - - def __init__(self, config, name="AuraEngine"): """ - Constructor to create a new logger defined by + Constructor to create a new logger defined by the passed name. Args: @@ -49,8 +46,6 @@ class AuraLogger(): self.config = config self.__create_logger(name) - - def __create_logger(self, name): """ Creates the logger instance for the given name. @@ -66,7 +61,9 @@ class AuraLogger(): if not self.logger.hasHandlers(): # create file handler for logger - file_handler = logging.FileHandler(self.config.get("logdir") + "/"+name+".log") + file_handler = logging.FileHandler( + self.config.get("logdir") + "/" + name + ".log" + ) file_handler.setLevel(lvl) # create stream handler for logger @@ -89,4 +86,4 @@ class AuraLogger(): self.logger.debug("Added handlers to logger") else: - self.logger.debug("Reused logger") \ No newline at end of file + self.logger.debug("Reused logger") diff --git a/src/base/node.py b/src/aura_engine_api/base/node.py similarity index 87% rename from src/base/node.py rename to src/aura_engine_api/base/node.py index 4d8ee8dc08852f6956a6feb93c6e28e865c2311a..4ffd0d625547520d962c04b6eb81569a35838a08 100644 --- a/src/base/node.py +++ b/src/aura_engine_api/base/node.py @@ -1,4 +1,3 @@ - # # Aura Engine API (https://gitlab.servus.at/aura/engine-api) # @@ -20,13 +19,11 @@ from enum import Enum -from models import PlayLog, PlayLogSchema, TrackSchema, ActivityLog, HealthHistory, HealthHistorySchema - - class NodeType(Enum): """ Types of API Server deployment models. """ + MAIN = "main" - SYNC = "sync" \ No newline at end of file + SYNC = "sync" diff --git a/src/models.py b/src/aura_engine_api/models.py similarity index 63% rename from src/models.py rename to src/aura_engine_api/models.py index 42b7a2c031cd52636125302ab9cd8ef2d5a6b6b7..624ef4a31f6bc335e99d2129e9a196d3bebdeba3 100644 --- a/src/models.py +++ b/src/aura_engine_api/models.py @@ -1,4 +1,3 @@ - # # Aura Engine API (https://gitlab.servus.at/aura/engine-api) # @@ -21,11 +20,11 @@ import datetime import json -from sqlalchemy import create_engine, Column, DateTime, String, Integer, Boolean -from sqlalchemy.event import listen -from flask_sqlalchemy import SQLAlchemy -from flask_marshmallow import Marshmallow -from marshmallow import Schema, fields, post_dump +from flask_marshmallow import Marshmallow +from flask_sqlalchemy import SQLAlchemy +from marshmallow import post_dump +from sqlalchemy import Boolean, Column, DateTime, Integer, String +from sqlalchemy.event import listen db = SQLAlchemy() ma = Marshmallow() @@ -35,65 +34,66 @@ class PlayLog(db.Model): """ Table holding play-log entries. """ - __tablename__ = 'playlog' + + __tablename__ = "playlog" # Primary Key - track_start = Column(DateTime, primary_key=True) + track_start = Column(DateTime, primary_key=True) # Columns - track_artist = Column(String(256)) - track_album = Column(String(256)) - track_title = Column(String(256)) - track_duration = Column(Integer) - track_type = Column(Integer) - track_num = Column(Integer) - playlist_id = Column(Integer) - timeslot_id = Column(Integer) - show_id = Column(Integer) - show_name = Column(String(256)) - log_source = Column(Integer) # The play-out source which this log is coming from (e.g. engine1, engine2) - is_synced = Column(Boolean) # Only relevant for main nodes, in a multi-node setup - - + track_artist = Column(String(256)) + track_album = Column(String(256)) + track_title = Column(String(256)) + track_duration = Column(Integer) + track_type = Column(Integer) + track_num = Column(Integer) + playlist_id = Column(Integer) + timeslot_id = Column(Integer) + show_id = Column(Integer) + show_name = Column(String(256)) + log_source = Column( + Integer + ) # The play-out source which this log is coming from (e.g. engine1, engine2) + is_synced = Column(Boolean) # Only relevant for main nodes, in a multi-node setup def __init__(self, data): """ Initializes a trackservice entry """ - self.track_start = data.track_start - self.track_artist = data.track_artist - self.track_album = data.track_album - self.track_title = data.track_title - self.track_duration = data.track_duration - self.track_type = data.track_type - self.track_num = data.track_num - self.playlist_id = data.playlist_id - self.timeslot_id = data.timeslot_id - self.show_id = data.show_id - self.show_name = data.show_name - self.log_source = data.log_source - self.is_synced = False + self.track_start = data.track_start + self.track_artist = data.track_artist + self.track_album = data.track_album + self.track_title = data.track_title + self.track_duration = data.track_duration + self.track_type = data.track_type + self.track_num = data.track_num + self.playlist_id = data.playlist_id + self.timeslot_id = data.timeslot_id + self.show_id = data.show_id + self.show_name = data.show_name + self.log_source = data.log_source + self.is_synced = False if not self.track_duration: self.track_duration = 0 - def save(self): db.session.add(self) db.session.commit() - @staticmethod def get(start_time): """ Selects the playlog identified by start time. """ db.session.commit() - track = db.session.query(PlayLog).\ - filter(PlayLog.track_start <= str(start_time)).\ - order_by(PlayLog.track_start.desc()).first() + track = ( + db.session.query(PlayLog) + .filter(PlayLog.track_start <= str(start_time)) + .order_by(PlayLog.track_start.desc()) + .first() + ) return track - @staticmethod def select_recent(): """ @@ -102,14 +102,15 @@ class PlayLog(db.Model): db.session.commit() now = datetime.datetime.now() - track = db.session.query(PlayLog).\ - order_by(PlayLog.track_start.desc()).\ - filter(PlayLog.track_start <= str(now)).first() + track = ( + db.session.query(PlayLog) + .order_by(PlayLog.track_start.desc()) + .filter(PlayLog.track_start <= str(now)) + .first() + ) return track - - @staticmethod def select_current(): """ @@ -131,7 +132,6 @@ class PlayLog(db.Model): return None - @staticmethod def select_for_timeslot(timeslot_id): """ @@ -140,17 +140,18 @@ class PlayLog(db.Model): the last 50 total playlogs are respected. """ db.session.commit() - now = datetime.datetime.now() before12h = datetime.datetime.now() - datetime.timedelta(hours=12) playlogs = None # Invalid Timeslot ID if timeslot_id == -1: playlogs = [] - result = db.session.query(PlayLog).\ - order_by(PlayLog.track_start.desc()).\ - filter(PlayLog.track_start >= str(before12h)).\ - limit(50) + result = ( + db.session.query(PlayLog) + .order_by(PlayLog.track_start.desc()) + .filter(PlayLog.track_start >= str(before12h)) + .limit(50) + ) for playlog in result.all(): if playlog.timeslot_id != -1: break @@ -158,14 +159,15 @@ class PlayLog(db.Model): # Valid Timeslot ID else: - result = db.session.query(PlayLog).\ - order_by(PlayLog.track_start.desc()).\ - filter(PlayLog.timeslot_id == timeslot_id) + result = ( + db.session.query(PlayLog) + .order_by(PlayLog.track_start.desc()) + .filter(PlayLog.timeslot_id == timeslot_id) + ) playlogs = result.all() return playlogs - @staticmethod def paginate(page, page_size, from_time=None, to_time=None, skip_synced=False): """ @@ -179,12 +181,16 @@ class PlayLog(db.Model): def q(page=0, page_size=None): query = db.session.query(PlayLog).order_by(PlayLog.track_start.desc()) if isinstance(from_time, datetime.datetime): - query = query.filter(PlayLog.track_start >= from_time.isoformat(' ', 'seconds')) + query = query.filter( + PlayLog.track_start >= from_time.isoformat(" ", "seconds") + ) if isinstance(to_time, datetime.datetime): - query = query.filter(PlayLog.track_start <= to_time.isoformat(' ', 'seconds')) - if skip_synced == True: - query = query.filter(PlayLog.is_synced == False) - listen(query, 'before_compile', apply_limit(page, page_size), retval=True) + query = query.filter( + PlayLog.track_start <= to_time.isoformat(" ", "seconds") + ) + if skip_synced: + query = query.filter(PlayLog.is_synced.is_(False)) + listen(query, "before_compile", apply_limit(page, page_size), retval=True) print("Paginate Query: " + str(query)) return query @@ -195,11 +201,11 @@ class PlayLog(db.Model): if page: query = query.offset(page * page_size) return query + return wrapped return q(page, page_size) - @staticmethod def select_last_hours(n): """ @@ -207,11 +213,14 @@ class PlayLog(db.Model): """ db.session.commit() last_hours = datetime.datetime.today() - datetime.timedelta(hours=n) - tracks = db.session.query(PlayLog).filter(PlayLog.track_start >= str(last_hours)).\ - order_by(PlayLog.track_start.desc()).all() + tracks = ( + db.session.query(PlayLog) + .filter(PlayLog.track_start >= str(last_hours)) + .order_by(PlayLog.track_start.desc()) + .all() + ) return tracks - @staticmethod def select_by_day(day): """ @@ -219,32 +228,44 @@ class PlayLog(db.Model): """ db.session.commit() day_plus_one = day + datetime.timedelta(days=1) - tracks = db.session.query(PlayLog).\ - filter(PlayLog.track_start >= str(day), PlayLog.track_start < str(day_plus_one)).\ - order_by(PlayLog.track_start.desc()).all() + tracks = ( + db.session.query(PlayLog) + .filter( + PlayLog.track_start >= str(day), PlayLog.track_start < str(day_plus_one) + ) + .order_by(PlayLog.track_start.desc()) + .all() + ) return tracks - @staticmethod def select_by_range(from_day, to_day): """ Selects the track-service items for a day range. """ db.session.commit() - tracks = db.session.query(PlayLog).filter(PlayLog.track_start >= str(from_day),\ - PlayLog.track_start < str(to_day)).order_by(PlayLog.track_start.desc()).all() + tracks = ( + db.session.query(PlayLog) + .filter( + PlayLog.track_start >= str(from_day), PlayLog.track_start < str(to_day) + ) + .order_by(PlayLog.track_start.desc()) + .all() + ) return tracks - def __str__(self): - return "Track [track_start: %s, track_title: %s]" % (str(self.track_start), str(self.track_title)) - + return "Track [track_start: %s, track_title: %s]" % ( + str(self.track_start), + str(self.track_title), + ) class PlayLogSchema(ma.SQLAlchemyAutoSchema): """ Schema for playlog entries. """ + class Meta: model = PlayLog sqla_session = db.session @@ -254,8 +275,7 @@ class PlayLogSchema(ma.SQLAlchemyAutoSchema): @post_dump def remove_skip_values(self, data, many=False): return { - key: value for key, value in data.items() - if value not in self.SKIP_VALUES + key: value for key, value in data.items() if value not in self.SKIP_VALUES } @@ -263,6 +283,7 @@ class TrackSchema(ma.SQLAlchemySchema): """ Schema for trackservice entries. """ + class Meta: model = PlayLog sqla_session = db.session @@ -277,9 +298,8 @@ class TrackSchema(ma.SQLAlchemySchema): "playlist_id", "timeslot_id", "show_id", - "show_name" - ) - + "show_name", + ) class ActivityLog(db.Model): @@ -288,15 +308,15 @@ class ActivityLog(db.Model): Only used in "SYNC" deployment mode. """ - __tablename__ = 'activity_log' + + __tablename__ = "activity_log" # Primary Key - log_time = Column(DateTime, primary_key=True) + log_time = Column(DateTime, primary_key=True) # Columns - source_number = Column(Integer) - is_synced = Column(Boolean) - + source_number = Column(Integer) + is_synced = Column(Boolean) def __init__(self, source_number): """ @@ -306,7 +326,6 @@ class ActivityLog(db.Model): self.source_number = source_number self.is_synced = False - @staticmethod def is_empty(): """ @@ -315,39 +334,39 @@ class ActivityLog(db.Model): db.session.commit() return not db.session.query(ActivityLog).one_or_none() - @staticmethod def get_active_source(): """ Retrieves the currently active source. """ db.session.commit() - source = db.session.query(ActivityLog).\ - order_by(ActivityLog.log_time.desc()).first() + source = ( + db.session.query(ActivityLog).order_by(ActivityLog.log_time.desc()).first() + ) return source - def save(self): db.session.add(self) db.session.commit() - class HealthHistory(db.Model): """ Table holding an history of health information for sources. """ - __tablename__ = 'health_history' + + __tablename__ = "health_history" # Primary Key - log_time = Column(DateTime, primary_key=True) + log_time = Column(DateTime, primary_key=True) # Columns - log_source = Column(Integer) # The source the history entry relates to - is_healthy = Column(Boolean) # Indicates if source is "healthy enough" to be used for play-out - is_synced = Column(Boolean) # Only relevant for main nodes, in a multi-node setup - health_info = Column(String(4096)) # Stringified JSON object or other, if needed - + log_source = Column(Integer) # The source the history entry relates to + is_healthy = Column( + Boolean + ) # Indicates if source is "healthy enough" to be used for play-out + is_synced = Column(Boolean) # Only relevant for main nodes, in a multi-node setup + health_info = Column(String(4096)) # Stringified JSON object or other, if needed def __init__(self, source_number, log_time, is_healthy, health_info): """ @@ -359,63 +378,73 @@ class HealthHistory(db.Model): self.is_synced = False self.health_info = health_info - @staticmethod def get_latest_entry(source_number): """ Retrieves the most recent health history entry for the given source number. """ - return db.session.query(HealthHistory).filter(HealthHistory.log_source == source_number).\ - order_by(HealthHistory.log_time.desc()).first() - + return ( + db.session.query(HealthHistory) + .filter(HealthHistory.log_source == source_number) + .order_by(HealthHistory.log_time.desc()) + .first() + ) def save(self): db.session.add(self) db.session.commit() - class HealthHistorySchema(ma.SQLAlchemyAutoSchema): """ Schema for health history entries. """ + class Meta: model = HealthHistory sqla_session = db.session - class ClockInfo(db.Model): """ Table holding information for the current and next show to be displayed by the studio clock. - Important: This table doesn't hold the history of information for efficiency. It only stores one - record for each possible source. For example in a redundant deployment it holds two records, for - engine1 and engine2. + Important: This table doesn't hold the history of information for efficiency. + It only stores one record for each possible source. + For example in a redundant deployment it holds two records, for engine1 and engine2. - The stringified objects allow easy, future extension of the properties, without the need to change - the database model. + The stringified objects allow easy, future extension of the properties, without the need to + change the database model. """ - __tablename__ = 'clock_info' + + __tablename__ = "clock_info" # Primary Key - log_source = Column(Integer, primary_key=True) # The source this entry was updated from ("1" for engine1, "2" for engine2) + log_source = Column( + Integer, primary_key=True + ) # The source this entry was updated from ("1" for engine1, "2" for engine2) # Columns - log_time = Column(DateTime) - current_track = None # Populated live from within `get_info(..)` - planned_playlist = Column(String(4096)) # Stringified "#/components/schemas/Playlist" OpenAPI JSON object - current_timeslot = Column(String(2048)) # Stringified "#/components/schemas/Timeslot" OpenAPI JSON object - next_timeslot = Column(String(2048)) # Stringified "#/components/schemas/Timeslot" OpenAPI JSON object - + log_time = Column(DateTime) + current_track = None # Populated live from within `get_info(..)` + planned_playlist = Column( + String(4096) + ) # Stringified "#/components/schemas/Playlist" OpenAPI JSON object + current_timeslot = Column( + String(2048) + ) # Stringified "#/components/schemas/Timeslot" OpenAPI JSON object + next_timeslot = Column( + String(2048) + ) # Stringified "#/components/schemas/Timeslot" OpenAPI JSON object def __init__(self): """ Initializes an clock info entry. """ - - def set_info(self, source_number, planned_playlist, current_timeslot, next_timeslot): + def set_info( + self, source_number, planned_playlist, current_timeslot, next_timeslot + ): """ Sets the values for a clock info entry. """ @@ -434,14 +463,16 @@ class ClockInfo(db.Model): else: self.next_timeslot = None - @staticmethod def get(source_number): """ Retrieves the clock info for the given source number. """ - return db.session.query(ClockInfo).filter(ClockInfo.log_source == source_number).first() - + return ( + db.session.query(ClockInfo) + .filter(ClockInfo.log_source == source_number) + .first() + ) @staticmethod def get_info(source_number): @@ -450,9 +481,12 @@ class ClockInfo(db.Model): """ track_schema = TrackSchema() info = dict() - data = db.session.query(ClockInfo).filter(ClockInfo.log_source == source_number).first() + data = ( + db.session.query(ClockInfo) + .filter(ClockInfo.log_source == source_number) + .first() + ) current_track = PlayLog.select_current() - planned_playlist_id = -1 playlogs = None # Construct the clock `info` object @@ -478,20 +512,28 @@ class ClockInfo(db.Model): most_recent_track = PlayLog.select_recent() # Is the most recent track part of the current timeslot? - if most_recent_track.timeslot_id == info["current_timeslot"]["timeslot_id"]: + if ( + most_recent_track.timeslot_id + == info["current_timeslot"]["timeslot_id"] + ): # Get the actual playlogs of the current timeslot, until now playlog_schema = PlayLogSchema(many=True) - playlogs = PlayLog.select_for_timeslot(most_recent_track.timeslot_id) + playlogs = PlayLog.select_for_timeslot( + most_recent_track.timeslot_id + ) playlogs.sort(key=lambda track: track.track_start, reverse=False) info["current_playlogs"] = playlog_schema.dump(playlogs) - if info["current_playlogs"] == None: + if info["current_playlogs"] is None: info["current_playlogs"] = {} - # Invalid timeslots (e.g. in fallback scenarios) get a virtual start date of the first fallback track + # Invalid timeslots (e.g. in fallback scenarios) get a virtual start date of + # the first fallback track. if info["current_timeslot"]["timeslot_id"] == -1: if playlogs and playlogs[0]: - info["current_timeslot"]["timeslot_start"] = playlogs[0].track_start + info["current_timeslot"]["timeslot_start"] = playlogs[ + 0 + ].track_start # Get the next timeslot if data.next_timeslot: @@ -499,26 +541,22 @@ class ClockInfo(db.Model): else: info["next_timeslot"] = {} - return info - - def save(self): db.session.add(self) db.session.commit() - def update(self): db.session.merge(self) db.session.commit() - class ClockInfoSchema(ma.SQLAlchemySchema): """ Schema for trackservice entries. """ + class Meta: model = ClockInfo sqla_session = db.session @@ -529,5 +567,5 @@ class ClockInfoSchema(ma.SQLAlchemySchema): "planned_playlist", "current_playlogs", "current_timeslot", - "next_timeslot" - ) \ No newline at end of file + "next_timeslot", + ) diff --git a/src/rest/__init__.py b/src/aura_engine_api/rest/__init__.py similarity index 100% rename from src/rest/__init__.py rename to src/aura_engine_api/rest/__init__.py diff --git a/src/rest/controllers/__init__.py b/src/aura_engine_api/rest/controllers/__init__.py similarity index 100% rename from src/rest/controllers/__init__.py rename to src/aura_engine_api/rest/controllers/__init__.py diff --git a/src/rest/controllers/authorization_controller.py b/src/aura_engine_api/rest/controllers/authorization_controller.py similarity index 100% rename from src/rest/controllers/authorization_controller.py rename to src/aura_engine_api/rest/controllers/authorization_controller.py diff --git a/src/rest/controllers/internal_controller.py b/src/aura_engine_api/rest/controllers/internal_controller.py similarity index 94% rename from src/rest/controllers/internal_controller.py rename to src/aura_engine_api/rest/controllers/internal_controller.py index 93817db3d923e51c4c080a2b081aeb5f682bac4f..412f1476f5dd5704eae0117f6dcd76ab6a525fc3 100644 --- a/src/rest/controllers/internal_controller.py +++ b/src/aura_engine_api/rest/controllers/internal_controller.py @@ -1,14 +1,13 @@ import connexion import six - from dateutil.parser import parse from flask import current_app -from src.rest.models.clock_info import ClockInfo # noqa: E501 -from src.rest.models.health_log import HealthLog # noqa: E501 -from src.rest.models.inline_response400 import InlineResponse400 # noqa: E501 -from src.rest.models.play_log import PlayLog # noqa: E501 -from src.rest import util +from aura_engine_api.rest import util +from aura_engine_api.rest.models.clock_info import ClockInfo +from aura_engine_api.rest.models.health_log import HealthLog +from aura_engine_api.rest.models.inline_response400 import InlineResponse400 +from aura_engine_api.rest.models.play_log import PlayLog def add_playlog(body): # noqa: E501 diff --git a/src/rest/controllers/public_controller.py b/src/aura_engine_api/rest/controllers/public_controller.py similarity index 92% rename from src/rest/controllers/public_controller.py rename to src/aura_engine_api/rest/controllers/public_controller.py index ffeea843f48139a920e69d701ac3428c0e639766..d09e1d163bb3922a2951510b03ecf9cdd303f731 100644 --- a/src/rest/controllers/public_controller.py +++ b/src/aura_engine_api/rest/controllers/public_controller.py @@ -1,9 +1,9 @@ import connexion import six - from flask import current_app -from src.rest.models.track import Track # noqa: E501 -from src.rest import util + +from aura_engine_api.rest import util +from aura_engine_api.rest.models.track import Track # noqa: E501 def current_track(): # noqa: E501 diff --git a/src/rest/encoder.py b/src/aura_engine_api/rest/encoder.py similarity index 90% rename from src/rest/encoder.py rename to src/aura_engine_api/rest/encoder.py index 5341e85ca0bed744b780577fd9e8160fc33a9bff..c9769f83e67edc3ea12b6def8fb28ec7488e2baa 100644 --- a/src/rest/encoder.py +++ b/src/aura_engine_api/rest/encoder.py @@ -1,7 +1,7 @@ from connexion.apps.flask_app import FlaskJSONEncoder import six -from src.rest.models.base_model_ import Model +from aura_engine_api.rest.models.base_model_ import Model class JSONEncoder(FlaskJSONEncoder): diff --git a/src/aura_engine_api/rest/models/__init__.py b/src/aura_engine_api/rest/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b43f4d8a9b3470c6bd77ac1994216057d90c98c8 --- /dev/null +++ b/src/aura_engine_api/rest/models/__init__.py @@ -0,0 +1,14 @@ +# coding: utf-8 + +# flake8: noqa +from __future__ import absolute_import + +# import models into model package +from aura_engine_api.rest.models.clock_info import ClockInfo +from aura_engine_api.rest.models.health_log import HealthLog +from aura_engine_api.rest.models.inline_response400 import InlineResponse400 +from aura_engine_api.rest.models.play_log import PlayLog +from aura_engine_api.rest.models.playlist import Playlist +from aura_engine_api.rest.models.playlist_entry import PlaylistEntry +from aura_engine_api.rest.models.timeslot import Timeslot +from aura_engine_api.rest.models.track import Track diff --git a/src/rest/models/base_model_.py b/src/aura_engine_api/rest/models/base_model_.py similarity index 97% rename from src/rest/models/base_model_.py rename to src/aura_engine_api/rest/models/base_model_.py index 028910ff56e72325323bd0d80954e9b69291a4cf..04e095faaded0b0261de58ff45817fe30533e0e3 100644 --- a/src/rest/models/base_model_.py +++ b/src/aura_engine_api/rest/models/base_model_.py @@ -2,7 +2,7 @@ import pprint import six -from src.rest import util +from aura_engine_api.rest import util class Model(object): diff --git a/src/rest/models/clock_info.py b/src/aura_engine_api/rest/models/clock_info.py similarity index 93% rename from src/rest/models/clock_info.py rename to src/aura_engine_api/rest/models/clock_info.py index 8796e7512233cc491332f5a222a818bab2e9779e..db19acf5f72cf0b262b4acfd8ad64a2935850eda 100644 --- a/src/rest/models/clock_info.py +++ b/src/aura_engine_api/rest/models/clock_info.py @@ -3,13 +3,13 @@ from __future__ import absolute_import from datetime import date, datetime # noqa: F401 -from typing import List, Dict # noqa: F401 +from typing import Dict, List # noqa: F401 -from src.rest.models.base_model_ import Model -from src.rest.models.play_log import PlayLog # noqa: F401,E501 -from src.rest.models.playlist import Playlist # noqa: F401,E501 -from src.rest.models.timeslot import Timeslot # noqa: F401,E501 -from src.rest import util +from aura_engine_api.rest import util +from aura_engine_api.rest.models.base_model_ import Model +from aura_engine_api.rest.models.play_log import PlayLog # noqa: F401,E501 +from aura_engine_api.rest.models.playlist import Playlist # noqa: F401,E501 +from aura_engine_api.rest.models.timeslot import Timeslot # noqa: F401,E501 class ClockInfo(Model): diff --git a/src/rest/models/health_log.py b/src/aura_engine_api/rest/models/health_log.py similarity index 97% rename from src/rest/models/health_log.py rename to src/aura_engine_api/rest/models/health_log.py index 59a210204d8b39fef62319869acf76f18c09cba6..6ec7d0b6402f8d646318cf25b3e8e622b74d8871 100644 --- a/src/rest/models/health_log.py +++ b/src/aura_engine_api/rest/models/health_log.py @@ -5,8 +5,8 @@ from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from src.rest.models.base_model_ import Model -from src.rest import util +from aura_engine_api.rest import util +from aura_engine_api.rest.models.base_model_ import Model class HealthLog(Model): diff --git a/src/rest/models/inline_response400.py b/src/aura_engine_api/rest/models/inline_response400.py similarity index 93% rename from src/rest/models/inline_response400.py rename to src/aura_engine_api/rest/models/inline_response400.py index 8428ccdffd6b1af60bfb7958dc46eb1683bbe7cb..85f140e9f1508c68447298e951d6fad04f23e3c1 100644 --- a/src/rest/models/inline_response400.py +++ b/src/aura_engine_api/rest/models/inline_response400.py @@ -5,8 +5,8 @@ from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from src.rest.models.base_model_ import Model -from src.rest import util +from aura_engine_api.rest import util +from aura_engine_api.rest.models.base_model_ import Model class InlineResponse400(Model): diff --git a/src/rest/models/play_log.py b/src/aura_engine_api/rest/models/play_log.py similarity index 99% rename from src/rest/models/play_log.py rename to src/aura_engine_api/rest/models/play_log.py index 06bccb3f259b47964f63265601f1e9b649844b30..c344d5e2036f7d06c79176caf64fed2fc8bd69b3 100644 --- a/src/rest/models/play_log.py +++ b/src/aura_engine_api/rest/models/play_log.py @@ -5,8 +5,8 @@ from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from src.rest.models.base_model_ import Model -from src.rest import util +from aura_engine_api.rest import util +from aura_engine_api.rest.models.base_model_ import Model class PlayLog(Model): diff --git a/src/rest/models/playlist.py b/src/aura_engine_api/rest/models/playlist.py similarity index 92% rename from src/rest/models/playlist.py rename to src/aura_engine_api/rest/models/playlist.py index 8e297213fee49cac2fd6cc7017794aff49393039..d66d55833c902722e04258e2aba7e1fbd4bb011d 100644 --- a/src/rest/models/playlist.py +++ b/src/aura_engine_api/rest/models/playlist.py @@ -5,9 +5,9 @@ from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from src.rest.models.base_model_ import Model -from src.rest.models.playlist_entry import PlaylistEntry # noqa: F401,E501 -from src.rest import util +from aura_engine_api.rest import util +from aura_engine_api.rest.models.base_model_ import Model +from aura_engine_api.rest.models.playlist_entry import PlaylistEntry # noqa: F401,E501 class Playlist(Model): diff --git a/src/rest/models/playlist_entry.py b/src/aura_engine_api/rest/models/playlist_entry.py similarity index 98% rename from src/rest/models/playlist_entry.py rename to src/aura_engine_api/rest/models/playlist_entry.py index aaf6566ab7c5b63d8305be94ea23660d3a259238..a82b7f7e34ba94f1cd0a60df3e433f7390a6a25c 100644 --- a/src/rest/models/playlist_entry.py +++ b/src/aura_engine_api/rest/models/playlist_entry.py @@ -5,8 +5,8 @@ from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from src.rest.models.base_model_ import Model -from src.rest import util +from aura_engine_api.rest import util +from aura_engine_api.rest.models.base_model_ import Model class PlaylistEntry(Model): diff --git a/src/rest/models/timeslot.py b/src/aura_engine_api/rest/models/timeslot.py similarity index 98% rename from src/rest/models/timeslot.py rename to src/aura_engine_api/rest/models/timeslot.py index 4aea4e330b87f18e2d42ceab136af2d1069bff04..d94bea36093c9c7c2625496a58b6c46790e1fa49 100644 --- a/src/rest/models/timeslot.py +++ b/src/aura_engine_api/rest/models/timeslot.py @@ -5,8 +5,8 @@ from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from src.rest.models.base_model_ import Model -from src.rest import util +from aura_engine_api.rest import util +from aura_engine_api.rest.models.base_model_ import Model class Timeslot(Model): diff --git a/src/rest/models/track.py b/src/aura_engine_api/rest/models/track.py similarity index 98% rename from src/rest/models/track.py rename to src/aura_engine_api/rest/models/track.py index 542208907a1ad295c014bcf8be1d8aed9e5c39e7..7fa5249787769c31fbfc16348766337919d89c03 100644 --- a/src/rest/models/track.py +++ b/src/aura_engine_api/rest/models/track.py @@ -5,8 +5,8 @@ from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from src.rest.models.base_model_ import Model -from src.rest import util +from aura_engine_api.rest import util +from aura_engine_api.rest.models.base_model_ import Model class Track(Model): diff --git a/src/rest/swagger/swagger.yaml b/src/aura_engine_api/rest/swagger/swagger.yaml similarity index 96% rename from src/rest/swagger/swagger.yaml rename to src/aura_engine_api/rest/swagger/swagger.yaml index a847f0c113b07049939aa7b3e1f3e983876dcbcb..023c9544778363cb03acaa08d3432b648ba7f24e 100644 --- a/src/rest/swagger/swagger.yaml +++ b/src/aura_engine_api/rest/swagger/swagger.yaml @@ -96,7 +96,7 @@ paths: x-content-type: application/json "400": description: bad input parameter - x-openapi-router-controller: src.rest.controllers.public_controller + x-openapi-router-controller: aura_engine_api.rest.controllers.public_controller /trackservice/current: get: tags: @@ -114,7 +114,7 @@ paths: $ref: '#/components/schemas/Track' "400": description: bad input parameter - x-openapi-router-controller: src.rest.controllers.public_controller + x-openapi-router-controller: aura_engine_api.rest.controllers.public_controller /clock: get: tags: @@ -132,7 +132,7 @@ paths: $ref: '#/components/schemas/ClockInfo' "400": description: bad input parameter - x-openapi-router-controller: src.rest.controllers.internal_controller + x-openapi-router-controller: aura_engine_api.rest.controllers.internal_controller put: tags: - internal @@ -152,7 +152,7 @@ paths: description: status updated "400": description: bad input parameter - x-openapi-router-controller: src.rest.controllers.internal_controller + x-openapi-router-controller: aura_engine_api.rest.controllers.internal_controller /playlog: get: tags: @@ -223,7 +223,7 @@ paths: x-content-type: application/json "400": description: bad input parameter - x-openapi-router-controller: src.rest.controllers.internal_controller + x-openapi-router-controller: aura_engine_api.rest.controllers.internal_controller post: tags: - internal @@ -246,7 +246,7 @@ paths: application/json: schema: $ref: '#/components/schemas/inline_response_400' - x-openapi-router-controller: src.rest.controllers.internal_controller + x-openapi-router-controller: aura_engine_api.rest.controllers.internal_controller /playlog/report/{year_month}: get: tags: @@ -276,7 +276,7 @@ paths: x-content-type: application/json "400": description: bad input parameter - x-openapi-router-controller: src.rest.controllers.internal_controller + x-openapi-router-controller: aura_engine_api.rest.controllers.internal_controller /source/active: get: tags: @@ -295,7 +295,7 @@ paths: x-content-type: application/json "400": description: bad input parameter - x-openapi-router-controller: src.rest.controllers.internal_controller + x-openapi-router-controller: aura_engine_api.rest.controllers.internal_controller /source/active/{number}: put: tags: @@ -320,7 +320,7 @@ paths: description: status updated "400": description: bad input parameter - x-openapi-router-controller: src.rest.controllers.internal_controller + x-openapi-router-controller: aura_engine_api.rest.controllers.internal_controller /source/health/{number}: get: tags: @@ -349,7 +349,7 @@ paths: $ref: '#/components/schemas/HealthLog' "400": description: bad input parameter - x-openapi-router-controller: src.rest.controllers.internal_controller + x-openapi-router-controller: aura_engine_api.rest.controllers.internal_controller post: tags: - internal @@ -378,7 +378,7 @@ paths: description: health info logged "400": description: bad input parameter - x-openapi-router-controller: src.rest.controllers.internal_controller + x-openapi-router-controller: aura_engine_api.rest.controllers.internal_controller components: schemas: Track: diff --git a/src/rest/test/__init__.py b/src/aura_engine_api/rest/test/__init__.py similarity index 55% rename from src/rest/test/__init__.py rename to src/aura_engine_api/rest/test/__init__.py index 85b82f6cf739817aea1b4d49b228eaad99522b55..66b15d5ddf6f313ac73c456ab0b91626be636e3f 100644 --- a/src/rest/test/__init__.py +++ b/src/aura_engine_api/rest/test/__init__.py @@ -3,13 +3,15 @@ import logging import connexion from flask_testing import TestCase -from src.rest.encoder import JSONEncoder +from aura_engine_api.rest.encoder import JSONEncoder class BaseTestCase(TestCase): def create_app(self): - logging.getLogger('connexion.operation').setLevel('ERROR') + # silence the noise debug log of the relevant modules + for log_name in ("connexion", "openapi_spec_validator"): + logging.getLogger(log_name).setLevel('ERROR') app = connexion.App(__name__, specification_dir='../swagger/') app.app.json_encoder = JSONEncoder app.add_api('swagger.yaml') diff --git a/src/rest/test/test_internal_controller.py b/src/aura_engine_api/rest/test/test_internal_controller.py similarity index 86% rename from src/rest/test/test_internal_controller.py rename to src/aura_engine_api/rest/test/test_internal_controller.py index 5c5c129acc2739074e574d8971d5cddecb8462e3..0c5558cf5ec7c9561a6640a92a206d8be24e3795 100644 --- a/src/rest/test/test_internal_controller.py +++ b/src/aura_engine_api/rest/test/test_internal_controller.py @@ -17,17 +17,18 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. - from __future__ import absolute_import from flask import json from six import BytesIO -from src.rest.models.clock_info import ClockInfo # noqa: E501 -from src.rest.models.health_log import HealthLog # noqa: E501 -from src.rest.models.inline_response400 import InlineResponse400 # noqa: E501 -from src.rest.models.play_log import PlayLog # noqa: E501 -from src.rest.test import BaseTestCase +from aura_engine_api.rest.models.clock_info import ClockInfo # noqa: E501 +from aura_engine_api.rest.models.health_log import HealthLog # noqa: E501 +from aura_engine_api.rest.models.inline_response400 import ( # noqa: E501 + InlineResponse400, +) +from aura_engine_api.rest.models.play_log import PlayLog # noqa: E501 +from aura_engine_api.rest.test import BaseTestCase class TestInternalController(BaseTestCase): @@ -38,7 +39,7 @@ class TestInternalController(BaseTestCase): Adds an entry to the playlog """ - body = PlayLog() + body = {"track_start": "2018-08-12 16:41+01:00"} response = self.client.open( '/api/v1/playlog', method='POST', @@ -100,7 +101,7 @@ class TestInternalController(BaseTestCase): ('to_date', '2013-10-20T19:20:30+01:00'), ('page', 56), ('limit', 200), - ('skip_synced', true)] + ('skip_synced', True)] response = self.client.open( '/api/v1/playlog/', method='GET', @@ -113,7 +114,11 @@ class TestInternalController(BaseTestCase): Log health info """ - body = HealthLog() + body = { + "details": {}, + "is_healthy": True, + "log_time": "2014-11-21T17:16:23+01:00", + } response = self.client.open( 'api/v1/source/health/{number}'.format(number=2), method='POST', @@ -136,7 +141,8 @@ class TestInternalController(BaseTestCase): def test_set_clock_info(self): """Test case for set_clock_info - Set current studio clock information such as timeslot info and track-list for engine 1 or 2 within the Engine API database. + Set current studio clock information such as timeslot info and track-list for engine 1 or 2 + within the Engine API database. """ body = ClockInfo() response = self.client.open( @@ -150,4 +156,4 @@ class TestInternalController(BaseTestCase): if __name__ == '__main__': import unittest - unittest.main() \ No newline at end of file + unittest.main() diff --git a/src/rest/test/test_public_controller.py b/src/aura_engine_api/rest/test/test_public_controller.py similarity index 94% rename from src/rest/test/test_public_controller.py rename to src/aura_engine_api/rest/test/test_public_controller.py index 1022a0a326aa86b1060e82429c486e4526bcbee5..de9603bf513725fa4a41d5a1059e5aae95ee35bf 100644 --- a/src/rest/test/test_public_controller.py +++ b/src/aura_engine_api/rest/test/test_public_controller.py @@ -22,8 +22,8 @@ from __future__ import absolute_import from flask import json from six import BytesIO -from src.rest.models.track import Track # noqa: E501 -from src.rest.test import BaseTestCase +from aura_engine_api.rest.models.track import Track # noqa: E501 +from aura_engine_api.rest.test import BaseTestCase class TestPublicController(BaseTestCase): diff --git a/src/rest/util.py b/src/aura_engine_api/rest/util.py similarity index 98% rename from src/rest/util.py rename to src/aura_engine_api/rest/util.py index ae481251bd02adeacc7374943b0897831cf87cd9..05878d324fecd1c1e8ecad65eaa9c466bfa78b0a 100644 --- a/src/rest/util.py +++ b/src/aura_engine_api/rest/util.py @@ -24,7 +24,7 @@ def _deserialize(data, klass): elif klass == datetime.datetime: return deserialize_datetime(data) - # NOTE: Due to Pyton 3.7 not providing `typing.GenericMeta` anymore, + # NOTE: Due to Python 3.7 not providing `typing.GenericMeta` anymore, # this workaround is needed (See https://github.com/swagger-api/swagger-codegen/issues/8921) # As soon this has been fixed in SwaggerHub Codegen this class can # be replaced by the generated one again. diff --git a/src/service.py b/src/aura_engine_api/service.py similarity index 68% rename from src/service.py rename to src/aura_engine_api/service.py index f343932805196d04e78f0cd0165aadcefc237408..efdaf237fdde1d531b24ca1ca4e972f73fa39b51 100644 --- a/src/service.py +++ b/src/aura_engine_api/service.py @@ -18,21 +18,26 @@ import datetime + import requests -import json import sqlalchemy - from dateutil.parser import parse -from base.node import NodeType -from rest import util -from models import \ - PlayLog, PlayLogSchema, TrackSchema, ActivityLog, \ - ClockInfo, ClockInfoSchema, HealthHistory, HealthHistorySchema - - - -class ApiService(): +from aura_engine_api.base.node import NodeType +from aura_engine_api.models import ( + ActivityLog, + ClockInfo, + ClockInfoSchema, + HealthHistory, + HealthHistorySchema, + PlayLog, + PlayLogSchema, + TrackSchema, +) +from aura_engine_api.rest import util + + +class ApiService: """ Service handling for API actions. """ @@ -47,7 +52,6 @@ class ApiService(): api_healthlog = None api_clockinfo = None - def __init__(self, config, logger, node_type): """ Initialize Service. @@ -58,9 +62,11 @@ class ApiService(): # Configured as Sync Node if not node_type == NodeType.MAIN: self.node_type = NodeType.SYNC - self.main_hosts = [ config.get("main_host_1"), config.get("main_host_2") ] + self.main_hosts = [config.get("main_host_1"), config.get("main_host_2")] if not self.main_hosts[0] and not self.main_hosts[1]: - self.logger.warn("Not a single main host defined. Be aware what you are doing.") + self.logger.warn( + "Not a single main host defined. Be aware what you are doing." + ) msg = "No sync possible as no host nodes are configured" else: msg = "Syncing data of hosts '%s'" % (self.main_hosts) @@ -74,12 +80,12 @@ class ApiService(): msg = "No child node for synchronization defined" else: if not self.config.get("enable_federation") == "false": - msg = "Pushing data to '%s'" % (self.sync_host) - + msg = "Pushing data to '%s'" % (self.sync_host) # Set active source source = ActivityLog.get_active_source() - if source: self.active_source = source.source_number + if source: + self.active_source = source.source_number if not self.active_source: if self.node_type == NodeType.MAIN: source_number = self.config.get("host_id") @@ -95,10 +101,9 @@ class ApiService(): self.logger.info("Running in '%s' mode. %s." % (self.node_type, msg)) - - def current_track(self): + def current_track(self): """ - Retrieves the currently playing track. + Retrieves the currently playing track. Returns: (JSON) @@ -107,46 +112,52 @@ class ApiService(): track_schema = TrackSchema() return track_schema.dump(track) - - def list_tracks(self, page=None, size=None, from_time=None, to_time=None): + def list_tracks(self, page=None, size=None, from_time=None, to_time=None): """ Lists track-service entries with pagination. Args: page (Integer): The number of the page to return size (Integer): The numbers of items to return - from_time (datetime): Optionally, get entries after this timestamp (e.g. "2020-08-29T09:12:33.001Z") - to_time (datetime): Optionally, get entries before this timestamp (e.g. "2020-08-29T09:12:33.001Z") + from_time (datetime): Optionally, get entries after this timestamp + (e.g. "2020-08-29T09:12:33.001Z") + to_time (datetime): Optionally, get entries before this timestamp + (e.g. "2020-08-29T09:12:33.001Z") Returns: (JSON) """ - if not size or size > 50 or size < 1: size = 20 + if not size or size > 50 or size < 1: + size = 20 tracklist = PlayLog.paginate(page, size, from_time, to_time, False) tracklist_schema = TrackSchema(many=True) return tracklist_schema.dump(tracklist) - - def list_playlog(self, page=None, size=None, from_time=None, to_time=None, skip_synced=False): + def list_playlog( + self, page=None, size=None, from_time=None, to_time=None, skip_synced=False + ): """ - Get paginated playlog entries for since the given timestamp. + Get paginated playlog entries for since the given timestamp. Args: page (Integer): The number of items to skip before starting to collect the result set size (Integer): The numbers of items to return per page - from_time (datetime): Optionally, get entries after this timestamp (e.g. "2020-08-29T09:12:33.001Z") - to_time (datetime): Optionally, get entries before this timestamp (e.g. "2020-08-29T09:12:33.001Z") - skip_synced (Boolean): Optionally, don't return entries which have been posted directly before (sync node only) + from_time (datetime): Optionally, get entries after this timestamp + (e.g. "2020-08-29T09:12:33.001Z") + to_time (datetime): Optionally, get entries before this timestamp + (e.g. "2020-08-29T09:12:33.001Z") + skip_synced (Boolean): Optionally, don't return entries which have been posted directly + before (sync node only) Returns: (JSON) """ - if not page: page = 0 + if not page: + page = 0 tracklist = PlayLog.paginate(page, size, from_time, to_time, skip_synced) tracklist_schema = PlayLogSchema(many=True) return tracklist_schema.dump(tracklist) - - def store_playlog(self, data, plain_json): + def store_playlog(self, data, plain_json): """ Stores the passed playlog entry. @@ -156,70 +167,87 @@ class ApiService(): if not data.log_source: data.log_source = self.config.get("host_id") - # Main Node: Alway log entry, independed of the source + # Main Node: Always log entry, independent of the source # Sync Node: Only log entry when it's coming from an active source - if self.node_type == NodeType.MAIN or \ - (self.node_type == NodeType.SYNC and data.log_source == self.active_source): + if self.node_type == NodeType.MAIN or ( + self.node_type == NodeType.SYNC and data.log_source == self.active_source + ): try: playlog = PlayLog(data) playlog.save() self.logger.debug("Stored playlog for '%s'" % data.track_start) - except sqlalchemy.exc.IntegrityError as e: - self.logger.info("Playlog for '%s' is already existing in local database. Skipping..." % data.track_start) + except sqlalchemy.exc.IntegrityError: + self.logger.info( + "Playlog for '%s' is already existing in local database. Skipping..." + % data.track_start + ) if self.config.get("enable_federation") == "false": return - + # Main Node: Push to Sync Node, if enabled if self.node_type == NodeType.MAIN and self.sync_host and self.api_playlog: try: api_url = self.sync_host + self.api_playlog r = requests.post(api_url, json=plain_json) if r.status_code == 204: - self.logger.info("Successfully pushed playlog for '%s' to '%s'" % (playlog.track_start, self.sync_host)) + self.logger.info( + "Successfully pushed playlog for '%s' to '%s'" + % (playlog.track_start, self.sync_host) + ) playlog.is_synced = True playlog.save() else: - self.logger.error("Error while pushing playlog to sync-node: " + str(r.json())) + self.logger.error( + "Error while pushing playlog to sync-node: " + str(r.json()) + ) except Exception as e: - self.logger.error("Error while posting to sync-node API '%s'!\n%s" % (api_url, str(e))) + self.logger.error( + "Error while posting to sync-node API '%s'!\n%s" + % (api_url, str(e)) + ) else: self.logger.info("Ditching playlog sent from an inactive source") - def get_clock_info(self): """ - Retrieves the dataset required to render the studio clock. + Retrieves the dataset required to render the studio clock. """ info = ClockInfo.get_info(self.get_active_source()) now = datetime.datetime.now() if "current_timeslot" in info and info["current_timeslot"]["timeslot_end"]: - timeslot_end = util.deserialize_datetime(info["current_timeslot"]["timeslot_end"]) + timeslot_end = util.deserialize_datetime( + info["current_timeslot"]["timeslot_end"] + ) if timeslot_end < now: info["current_timeslot"] = None - + clockinfo_schema = ClockInfoSchema() return clockinfo_schema.dump(info) - def set_clock_info(self, data, plain_json): """ Sets the clock info for the given source (engine1, engine2, other). """ if data.engine_source <= 0: return - + is_existing = False - clock_info = ClockInfo.get(self.get_active_source()) - if clock_info: + clock_info = ClockInfo.get(self.get_active_source()) + if clock_info: is_existing = True else: clock_info = ClockInfo() - clock_info.set_info(data.engine_source, data.planned_playlist, data.current_timeslot, data.next_timeslot) - + clock_info.set_info( + data.engine_source, + data.planned_playlist, + data.current_timeslot, + data.next_timeslot, + ) + if is_existing: clock_info.update() else: @@ -234,16 +262,26 @@ class ApiService(): api_url = self.sync_host + self.api_clockinfo r = requests.put(api_url, json=plain_json) if r.status_code == 204: - self.logger.info("Successfully pushed clock info for '%s' to '%s'" % (clock_info.log_time, self.sync_host)) + self.logger.info( + "Successfully pushed clock info for '%s' to '%s'" + % (clock_info.log_time, self.sync_host) + ) else: - self.logger.error("HTTP %s | Error while pushing clock info to sync-node: " % (r.status_code, str(r.json()))) + self.logger.error( + "HTTP %s | Error while pushing clock info to sync-node: %s", + r.status_code, + str(r.json()), + ) except Exception as e: - self.logger.error("Error while putting clock info to sync-node API '%s'!\n%s" % (api_url, str(e))) - + self.logger.error( + "Error while putting clock info to sync-node API '%s'!\n%s" + % (api_url, str(e)) + ) def set_default_source(self, source_number): """ - Create initial source (API Epoch) in the ActivityLog being able to sync old entries upon first start + Create initial source (API Epoch) in the ActivityLog being able to sync old entries upon + first start. Args: source_number (Integer): Number of the default engine @@ -253,11 +291,13 @@ class ApiService(): epoch_source = ActivityLog(default_source) epoch_source.log_time = parse("2020-02-22 02:20:20") epoch_source.save() - self.logger.info("Created API epoch source %s:%s" % (epoch_source.source_number, epoch_source.log_time)) + self.logger.info( + "Created API epoch source %s:%s" + % (epoch_source.source_number, epoch_source.log_time) + ) else: self.set_active_source(source_number) - def set_active_source(self, source_number): """ Sets the active source (engine1, engine2, other) identified by its source number. @@ -271,7 +311,6 @@ class ApiService(): activity_log = ActivityLog(source_number) activity_log.save() - def get_active_source(self): """ Retrieves number of the currently active source (engine1, engine2, other) @@ -281,7 +320,6 @@ class ApiService(): """ return self.active_source - def get_source_health(self, source_number): """ Retrieves the most recent health info of the requested source @@ -297,7 +335,6 @@ class ApiService(): health_schema = HealthHistorySchema() return health_schema.dump(health) - def log_source_health(self, source_number, data, plain_json): """ Logs an health entry for the given source @@ -310,12 +347,15 @@ class ApiService(): if not source_number: source_number = self.host_id - # Main Node: Alway log entry, independed of the source + # Main Node: Always log entry, independent of the source # Sync Node: Only log entry when it's coming from an active source - if self.node_type == NodeType.MAIN or \ - (self.node_type == NodeType.SYNC and source_number == self.active_source): + if self.node_type == NodeType.MAIN or ( + self.node_type == NodeType.SYNC and source_number == self.active_source + ): - healthlog = HealthHistory(source_number, data.log_time, data.is_healthy, data.details) + healthlog = HealthHistory( + source_number, data.log_time, data.is_healthy, data.details + ) healthlog.save() self.logger.debug("Stored health info for '%s'" % str(source_number)) @@ -323,22 +363,33 @@ class ApiService(): return # Main Node: Push to Sync Node, if enabled - if self.node_type == NodeType.MAIN and self.sync_host and self.api_healthlog: + if ( + self.node_type == NodeType.MAIN + and self.sync_host + and self.api_healthlog + ): # health_schema = HealthHistorySchema() # json_healthlog = health_schema.dump(healthlog) api_url = self.sync_host + self.api_healthlog + "/" + str(source_number) - try: + try: r = requests.post(api_url, json=plain_json) if r.status_code == 200: - self.logger.info("Successfully pushed healthlog for source '%s' to '%s'" % (str(source_number), self.sync_host)) + self.logger.info( + "Successfully pushed healthlog for source '%s' to '%s'" + % (str(source_number), self.sync_host) + ) healthlog.is_synced = True healthlog.save() else: - self.logger.error("Error while pushing healthlog to sync-node: " + str(r.json())) + self.logger.error( + "Error while pushing healthlog to sync-node: " + + str(r.json()) + ) except Exception as e: - self.logger.error("Error while posting to sync-node API '%s'!" % (api_url), e) + self.logger.error( + "Error while posting to sync-node API '%s'!" % (api_url), e + ) else: self.logger.info("Ditching healthlog sent from an inactive source") - diff --git a/src/sync.py b/src/aura_engine_api/sync.py similarity index 63% rename from src/sync.py rename to src/aura_engine_api/sync.py index 52b5224312ff806a6346c01a36825ec66d4e4dc7..bde729b6375756d26dd6ddee68536cae0597154c 100644 --- a/src/sync.py +++ b/src/aura_engine_api/sync.py @@ -18,18 +18,15 @@ import threading -import requests import time -from sqlalchemy import create_engine -from sqlalchemy.orm import scoped_session -from sqlalchemy.orm import sessionmaker -from sqlalchemy.exc import IntegrityError, InvalidRequestError - -from rest.models.play_log import PlayLog as PlayLogAPI -from models import PlayLog, PlayLogSchema, ActivityLog - +import requests +from sqlalchemy import create_engine +from sqlalchemy.exc import IntegrityError, InvalidRequestError +from sqlalchemy.orm import scoped_session, sessionmaker +from aura_engine_api.models import ActivityLog, PlayLog +from aura_engine_api.rest.models.play_log import PlayLog as PlayLogAPI class SyncJob(threading.Thread): @@ -48,7 +45,6 @@ class SyncJob(threading.Thread): Session = None - def __init__(self, config, logger, app): """ Initialize Job. @@ -61,27 +57,32 @@ class SyncJob(threading.Thread): self.exit_event = threading.Event() self.sync_interval = self.config.get("sync_interval") self.sync_batch_size = self.config.get("sync_batch_size") - self.logger.info("Initialized Sync Job - Synchronizing API Nodes every %s seconds and with a max batch-size of %s." % (self.sync_interval, self.sync_batch_size)) + self.logger.info( + "Initialized Sync Job - Synchronizing API Nodes every %s seconds and with a max" + " batch-size of %s.", + self.sync_interval, + self.sync_batch_size, + ) # Create a scoped local database session to be thread safe engine = create_engine(self.config.get_database_uri()) session_factory = sessionmaker(autoflush=True, autocommit=False, bind=engine) self.Session = scoped_session(session_factory) - - def run(self): """ Starts the Job. - """ + """ self.synchronize() while not self.exit_event.wait(self.sync_interval): - try: + try: self.synchronize() self.logger.info("Sync cycle done.\n\n") except Exception: - self.logger.info("Error while syncing entries. Maybe there's some connectivity issue. Aborting cycle ...") - + self.logger.info( + "Error while syncing entries. Maybe there's some connectivity issue." + " Aborting cycle ..." + ) def synchronize(self): """ @@ -90,16 +91,22 @@ class SyncJob(threading.Thread): entries = None synced_entries = 0 unsynced_sources = self.get_unsynced_history() - self.logger.info("Synchronization of API Nodes: There are %s sources open to be synced." % len(unsynced_sources)) + self.logger.info( + "Synchronization of API Nodes: There are %s sources open to be synced." + % len(unsynced_sources) + ) for i in range(len(unsynced_sources)): source = unsynced_sources[i] - self.logger.info("Syncing source %s which is unsynced since %s" % (source.source_number, source.log_time)) + self.logger.info( + "Syncing source %s which is unsynced since %s" + % (source.source_number, source.log_time) + ) # Store the next source to build a datetime range next_source = None - if i+1 < len(unsynced_sources): - next_source = unsynced_sources[i+1] + if i + 1 < len(unsynced_sources): + next_source = unsynced_sources[i + 1] else: next_source = self.create_next_source_log(source) @@ -108,24 +115,32 @@ class SyncJob(threading.Thread): entries = self.get_unsynced_entries(source, next_source, page) while entries and len(entries) > 0: - if not entries: self.logger.info("Retrieved no entries to be synced") - else: self.logger.info("Retrieved %s playlogs to be synced" % len(entries)) + if not entries: + self.logger.info("Retrieved no entries to be synced") + else: + self.logger.info( + "Retrieved %s playlogs to be synced" % len(entries) + ) # Store unsynced entries locally for entry in entries: try: self.Session.begin_nested() - entry = PlayLogAPI.from_dict(entry) + entry = PlayLogAPI.from_dict(entry) playlog = PlayLog(entry) playlog.is_synced = True self.db_save(playlog) - self.logger.info("Stored synced playlog for '%s'" % playlog.track_start) + self.logger.info( + "Stored synced playlog for '%s'" % playlog.track_start + ) synced_entries += 1 - except (IntegrityError, InvalidRequestError) as e: + except (IntegrityError, InvalidRequestError): self.Session.rollback() - self.logger.info("Playlog for '%s' is already existing in local database. Skipping..." % playlog.track_start) - + self.logger.info( + "Playlog for '%s' is already existing in local database. Skipping..." + % playlog.track_start + ) # Sleep a little to keep the effective load down low time.sleep(self.config.get("sync_step_sleep")) @@ -138,26 +153,31 @@ class SyncJob(threading.Thread): try: source.is_synced = True self.db_save(source) - self.logger.info("Sync for source %s:%s finalized!" % (source.source_number, source.log_time)) + self.logger.info( + "Sync for source %s:%s finalized!" + % (source.source_number, source.log_time) + ) except Exception as e: - self.logger.error("Cannot finalize sync state for source=%s:%s - Reason: %s" % (source.source_number, source.log_time, str(e))) + self.logger.error( + "Cannot finalize sync state for source=%s:%s - Reason: %s" + % (source.source_number, source.log_time, str(e)) + ) # For now, let it be ... if there's more to sync let's do it in the next cycle - self.logger.info("... successfully synchronized %s playlogs!" % synced_entries) - - + self.logger.info( + "... successfully synchronized %s playlogs!" % synced_entries + ) def create_next_source_log(self, current_source): """ Create and store the next source in the ActivityLog. It's actually the same, - as the current, but acts as a references for the current sync and as a marker - for the entrypoint of the next sync -> to avoid unneccessary sync cycles. + as the current, but acts as a references for the current sync and as a marker + for the entrypoint of the next sync -> to avoid unnecessary sync cycles. """ next_source = ActivityLog(current_source.source_number) self.Session.add(next_source) return next_source - def db_save(self, model): """ Store some object to the database using the local, scoped session. @@ -165,19 +185,19 @@ class SyncJob(threading.Thread): self.Session.add(model) self.Session.flush() self.Session.commit() - - def get_unsynced_history(self): """ Retrieves all sources with un-synced states - """ + """ self.Session.commit() - unsynced = self.Session.query(ActivityLog).filter(ActivityLog.is_synced == False).\ - order_by(ActivityLog.log_time.asc()) + unsynced = ( + self.Session.query(ActivityLog) + .filter(ActivityLog.is_synced.is_(False)) + .order_by(ActivityLog.log_time.asc()) + ) return unsynced.all() - def get_unsynced_entries(self, source, next_source, page): """ Retrieve unsynced entries from main node @@ -189,21 +209,32 @@ class SyncJob(threading.Thread): to_time = next_source.log_time try: - params = { "page": page, "limit": self.sync_batch_size, "skip_synced": "true", "from_date": source.log_time, "to_date": to_time} + params = { + "page": page, + "limit": self.sync_batch_size, + "skip_synced": "true", + "from_date": source.log_time, + "to_date": to_time, + } url = self.get_url(source.source_number) response = requests.get(url, params=params) if response.status_code == 200: self.logger.info("Response from '%s' OK (200)" % url) return response.json() else: - msg = "Invalid status code while getting unsynced entries from remote API: " + str(response.status_code) + msg = ( + "Invalid status code while getting unsynced entries from remote API: " + + str(response.status_code) + ) self.logger.warn(msg) raise Exception(msg) except Exception as e: - self.logger.warn("Error while getting unsynced entries from remote API '%s'!\n%s" % (url, str(e))) + self.logger.warn( + "Error while getting unsynced entries from remote API '%s'!\n%s" + % (url, str(e)) + ) raise e - def get_url(self, source_number): """ Builds an URL for the remote API. @@ -212,13 +243,8 @@ class SyncJob(threading.Thread): url += self.config.get("sync_api_get_playlog") return url - def exit(self): """ Called when the application shuts down. """ self.exit_event.set() - - - - diff --git a/src/rest/models/__init__.py b/src/rest/models/__init__.py deleted file mode 100644 index 7330bd7883d892b1eb9832aade077286d2001028..0000000000000000000000000000000000000000 --- a/src/rest/models/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# coding: utf-8 - -# flake8: noqa -from __future__ import absolute_import -# import models into model package -from src.rest.models.clock_info import ClockInfo -from src.rest.models.health_log import HealthLog -from src.rest.models.inline_response400 import InlineResponse400 -from src.rest.models.play_log import PlayLog -from src.rest.models.playlist import Playlist -from src.rest.models.playlist_entry import PlaylistEntry -from src.rest.models.timeslot import Timeslot -from src.rest.models.track import Track diff --git a/tox.ini b/tox.ini index 7dd565f9910b4af794efd2834ecfbc66aa795dc9..2751b218c1d2385c081c114c9094fc5e388126dc 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py37 +envlist = py38 [testenv] deps=-r{toxinidir}/requirements.txt