...
 
Commits (2)
......@@ -26,23 +26,23 @@ from flask import Flask, g, request
app = Flask(__name__)
app.config.from_object('assurancetourix.defaults')
if 'FLASK_SETTINGS' in os.environ:
app.config.from_envvar('FLASK_SETTINGS')
app.config.from_object("assurancetourix.defaults")
if "FLASK_SETTINGS" in os.environ:
app.config.from_envvar("FLASK_SETTINGS")
# Logging
if not app.debug:
from logging import StreamHandler
handler = StreamHandler()
handler.setLevel(logging.WARNING)
app.logger.addHandler(handler)
# Multiple template folders
if app.config["TEMPLATE_DIRS"]:
app.jinja_loader = jinja2.ChoiceLoader([
jinja2.FileSystemLoader(app.config["TEMPLATE_DIRS"]),
app.jinja_loader,
])
app.jinja_loader = jinja2.ChoiceLoader(
[jinja2.FileSystemLoader(app.config["TEMPLATE_DIRS"]), app.jinja_loader]
)
#
......@@ -50,41 +50,45 @@ if app.config["TEMPLATE_DIRS"]:
#
from flask_babel import Babel, gettext # noqa
babel = Babel(app)
LANGUAGES = [(locale.language, locale.display_name.capitalize())
for locale in babel.list_translations()]
LANGUAGES = [
(locale.language, locale.display_name.capitalize())
for locale in babel.list_translations()
]
LANGUAGES.insert(0, ("en", "English"))
LANGUAGES.sort(key=lambda lang: lang[0])
@babel.localeselector
def get_locale():
return request.accept_languages.best_match([
lang[0] for lang in LANGUAGES])
return request.accept_languages.best_match([lang[0] for lang in LANGUAGES])
@app.before_request
def store_locale():
from flask_babel import get_locale as babel_get_locale # noqa
g.locale = babel_get_locale()
@app.context_processor
def inject_covers_dir():
return dict(
covers_dir=app.config["COVERS_DIR"],
covers_size=app.config["COVERS_SIZE"],
)
covers_dir=app.config["COVERS_DIR"], covers_size=app.config["COVERS_SIZE"]
)
#
# Database
#
@app.before_request
def db_connect():
from assurancetourix.database import Database
g.db = Database(app).get_session()
......@@ -106,13 +110,14 @@ def set_sqlite_pragma(dbapi_connection, connection_record):
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
#
# REST API
#
from assurancetourix.api import api_bp # noqa
app.register_blueprint(api_bp, url_prefix='/api')
app.register_blueprint(api_bp, url_prefix="/api")
#
......@@ -122,5 +127,5 @@ app.register_blueprint(api_bp, url_prefix='/api')
from assurancetourix.views import * # noqa
if __name__ == '__main__':
if __name__ == "__main__":
app.run()
......@@ -5,18 +5,18 @@ from .songs import SongV1, SongListV1, SongSearch
from .genres import GenreV1, GenreListV1, GenreSearch
api_bp = Blueprint('api', __name__)
api_bp = Blueprint("api", __name__)
api = Api(api_bp)
api.add_resource(ArtistListV1, '/v1/artists/')
api.add_resource(ArtistV1, '/v1/artists/<int:artist_id>')
api.add_resource(ArtistSongsListV1, '/v1/artists/<int:artist_id>/songs/')
api.add_resource(ArtistSearch, '/v1/artists/search')
api.add_resource(ArtistListV1, "/v1/artists/")
api.add_resource(ArtistV1, "/v1/artists/<int:artist_id>")
api.add_resource(ArtistSongsListV1, "/v1/artists/<int:artist_id>/songs/")
api.add_resource(ArtistSearch, "/v1/artists/search")
api.add_resource(SongListV1, '/v1/songs/')
api.add_resource(SongV1, '/v1/songs/<int:song_id>')
api.add_resource(SongSearch, '/v1/songs/search')
api.add_resource(SongListV1, "/v1/songs/")
api.add_resource(SongV1, "/v1/songs/<int:song_id>")
api.add_resource(SongSearch, "/v1/songs/search")
api.add_resource(GenreListV1, '/v1/genres/')
api.add_resource(GenreV1, '/v1/genres/<int:genre_id>')
api.add_resource(GenreSearch, '/v1/genres/search')
api.add_resource(GenreListV1, "/v1/genres/")
api.add_resource(GenreV1, "/v1/genres/<int:genre_id>")
api.add_resource(GenreSearch, "/v1/genres/search")
......@@ -6,15 +6,15 @@ from .serializers import artist_fields, song_fields
from .utils import paginate_query, token_auth
parser = reqparse.RequestParser()
parser.add_argument('name', required=True)
parser.add_argument("name", required=True)
class ArtistV1(Resource):
method_decorators = {
'put': [token_auth],
'patch': [token_auth],
'delete': [token_auth],
"put": [token_auth],
"patch": [token_auth],
"delete": [token_auth],
}
@marshal_with(artist_fields)
......@@ -25,7 +25,7 @@ class ArtistV1(Resource):
def delete(self, artist_id):
artist = g.db.query(Artist).get(artist_id)
artist.delete()
return '', 204
return "", 204
@marshal_with(artist_fields)
def put(self, artist_id):
......@@ -50,9 +50,7 @@ class ArtistV1(Resource):
class ArtistListV1(Resource):
method_decorators = {
'post': [token_auth],
}
method_decorators = {"post": [token_auth]}
@paginate_query(artist_fields)
def get(self):
......@@ -76,19 +74,17 @@ class ArtistSongsListV1(Resource):
class ArtistSearch(Resource):
@paginate_query(artist_fields)
def get(self):
parser = reqparse.RequestParser()
parser.add_argument('name')
parser.add_argument('contains')
parser.add_argument("name")
parser.add_argument("contains")
args = parser.parse_args()
artists = g.db.query(Artist)
if args.get("name"):
artists = artists.filter_by(name=args["name"])
if args.get("contains"):
artists = artists.filter(
Artist.name.like("%{}%".format(
args["contains"].lower()
))).distinct()
Artist.name.like("%{}%".format(args["contains"].lower()))
).distinct()
return artists
......@@ -7,15 +7,12 @@ from .utils import paginate_query, token_auth
parser = reqparse.RequestParser()
parser.add_argument('name')
parser.add_argument("name")
class GenreV1(Resource):
method_decorators = {
'patch': [token_auth],
'delete': [token_auth],
}
method_decorators = {"patch": [token_auth], "delete": [token_auth]}
@marshal_with(genre_fields)
def get(self, genre_id):
......@@ -25,7 +22,7 @@ class GenreV1(Resource):
def delete(self, genre_id):
genre = g.db.query(Genre).get(genre_id)
genre.delete()
return '', 204
return "", 204
@marshal_with(genre_fields)
def patch(self, genre_id):
......@@ -39,9 +36,7 @@ class GenreV1(Resource):
class GenreListV1(Resource):
method_decorators = {
'post': [token_auth],
}
method_decorators = {"post": [token_auth]}
@paginate_query(genre_fields)
def get(self):
......@@ -58,11 +53,10 @@ class GenreListV1(Resource):
class GenreSearch(Resource):
@paginate_query(genre_fields)
def get(self):
parser = reqparse.RequestParser()
parser.add_argument('name')
parser.add_argument("name")
args = parser.parse_args()
genres = g.db.query(Genre)
if args.get("name"):
......
......@@ -3,7 +3,6 @@ from flask_restful import fields
class StaticUrl(fields.Url):
def __init__(self, field_name, **kwargs):
super().__init__(endpoint="static", **kwargs)
self.field_name = field_name
......@@ -17,28 +16,28 @@ class StaticUrl(fields.Url):
artist_fields = {
'id': fields.Integer(attribute="artist_id"),
'name': fields.String,
'uri': fields.Url('.artistv1'),
'songs': fields.Url('.artistsongslistv1'),
"id": fields.Integer(attribute="artist_id"),
"name": fields.String,
"uri": fields.Url(".artistv1"),
"songs": fields.Url(".artistsongslistv1"),
}
song_fields = {
'uri': fields.Url('.songv1'),
'id': fields.Integer(attribute="song_id"),
'artist': fields.Nested(artist_fields),
'title': fields.String,
'genre': fields.String(attribute="genre.name"),
'language': fields.String,
'cover': fields.String,
'date_added': fields.DateTime,
'date_updated': fields.DateTime,
'duo': fields.Boolean,
'has_karaoke': fields.Boolean,
"uri": fields.Url(".songv1"),
"id": fields.Integer(attribute="song_id"),
"artist": fields.Nested(artist_fields),
"title": fields.String,
"genre": fields.String(attribute="genre.name"),
"language": fields.String,
"cover": fields.String,
"date_added": fields.DateTime,
"date_updated": fields.DateTime,
"duo": fields.Boolean,
"has_karaoke": fields.Boolean,
}
genre_fields = {
'id': fields.Integer(attribute="genre_id"),
'name': fields.String,
'uri': fields.Url('.genrev1'),
"id": fields.Integer(attribute="genre_id"),
"name": fields.String,
"uri": fields.Url(".genrev1"),
}
......@@ -30,15 +30,15 @@ def convert_to_bool(value):
parser = reqparse.RequestParser()
parser.add_argument('artist', type=convert_to_artist)
parser.add_argument('title')
parser.add_argument('genre', type=convert_to_genre)
parser.add_argument('language')
parser.add_argument('date_added', type=convert_to_date)
parser.add_argument('date_updated', type=convert_to_date)
parser.add_argument('duo', type=convert_to_bool)
parser.add_argument('has_karaoke', type=convert_to_bool)
parser.add_argument('cover', type=FileStorage, location='files')
parser.add_argument("artist", type=convert_to_artist)
parser.add_argument("title")
parser.add_argument("genre", type=convert_to_genre)
parser.add_argument("language")
parser.add_argument("date_added", type=convert_to_date)
parser.add_argument("date_updated", type=convert_to_date)
parser.add_argument("duo", type=convert_to_bool)
parser.add_argument("has_karaoke", type=convert_to_bool)
parser.add_argument("cover", type=FileStorage, location="files")
def update_song(song, args):
......@@ -56,10 +56,7 @@ def update_song(song, args):
class SongV1(Resource):
method_decorators = {
'patch': [token_auth],
'delete': [token_auth],
}
method_decorators = {"patch": [token_auth], "delete": [token_auth]}
@marshal_with(song_fields)
def get(self, song_id):
......@@ -69,7 +66,7 @@ class SongV1(Resource):
def delete(self, song_id):
song = g.db.query(Song).get(song_id)
if song is None:
return 'No such song', 404
return "No such song", 404
artist = song.artist
genre = song.genre
g.db.delete(song)
......@@ -81,7 +78,7 @@ class SongV1(Resource):
if song.genre is not None and len(genre.songs) == 0:
g.db.delete(genre)
g.db.commit()
return '', 204
return "", 204
@marshal_with(song_fields)
def patch(self, song_id):
......@@ -95,10 +92,7 @@ class SongV1(Resource):
class SongListV1(Resource):
method_decorators = {
'post': [token_auth],
'delete': [token_auth],
}
method_decorators = {"post": [token_auth], "delete": [token_auth]}
@paginate_query(song_fields)
def get(self):
......@@ -121,20 +115,19 @@ class SongListV1(Resource):
def delete(self):
clear_all()
g.db.commit()
return '', 204
return "", 204
search_parser = reqparse.RequestParser()
search_parser.add_argument('artist')
search_parser.add_argument('title')
search_parser.add_argument('contains')
search_parser.add_argument('genre')
search_parser.add_argument('duo', type=bool)
search_parser.add_argument('karaoke', type=bool)
search_parser.add_argument("artist")
search_parser.add_argument("title")
search_parser.add_argument("contains")
search_parser.add_argument("genre")
search_parser.add_argument("duo", type=bool)
search_parser.add_argument("karaoke", type=bool)
class SongSearch(Resource):
@paginate_query(song_fields)
def get(self):
args = search_parser.parse_args()
......@@ -144,10 +137,11 @@ class SongSearch(Resource):
if args.get("title"):
songs = songs.filter_by(title=args["title"])
if args.get("contains"):
songs = songs.join(SearchTerm).filter(
SearchTerm.terms.like("%{}%".format(
args["contains"].lower()
))).distinct()
songs = (
songs.join(SearchTerm)
.filter(SearchTerm.terms.like("%{}%".format(args["contains"].lower())))
.distinct()
)
if args.get("duo", False):
songs = songs.filter(Song.duo.is_(True))
if args.get("karaoke", False):
......
......@@ -11,7 +11,6 @@ from assurancetourix.utils import get_cover_folder
class paginate_query:
def __init__(self, fields=None):
self.fields = fields
......@@ -20,6 +19,7 @@ class paginate_query:
def wrapper(*args, **kwargs):
query = f(*args, **kwargs)
return pagination(query, self.fields)
return wrapper
......@@ -78,4 +78,5 @@ def token_auth(func):
if token != app.config.get("API_TOKEN"):
abort(401)
return func(*args, **kwargs)
return wrapper
......@@ -29,13 +29,13 @@ from sqlalchemy.orm.exc import NoResultFound
class Database:
def __init__(self, app):
self._app = app
self._engine = None
self.alembic_cfg = AlembicConfig()
self.alembic_cfg.set_main_option(
"script_location", "assurancetourix:migrations")
"script_location", "assurancetourix:migrations"
)
@property
def engine(self):
......@@ -47,9 +47,11 @@ class Database:
return self._engine
def get_session(self):
session = scoped_session(sessionmaker(
autocommit=False, autoflush=False, bind=self.engine))
session = scoped_session(
sessionmaker(autocommit=False, autoflush=False, bind=self.engine)
)
from .models import Base
Base.query = session.query_property()
return session
......@@ -68,6 +70,7 @@ class Database:
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
from .models import Base
Base.metadata.create_all(bind=self.engine)
command.stamp(self.alembic_cfg, "head")
......@@ -87,8 +90,9 @@ def get_or_create(model, **attrs):
# Migration helpers
def is_sqlite(bind):
return bind.dialect.name == 'sqlite'
return bind.dialect.name == "sqlite"
def exists_in_db(bind, tablename, columnname=None):
......@@ -97,7 +101,6 @@ def exists_in_db(bind, tablename, columnname=None):
if columnname is None:
return tablename in md.tables
else:
return (
tablename in md.tables and
columnname in [c.name for c in md.tables[tablename].columns]
)
return tablename in md.tables and columnname in [
c.name for c in md.tables[tablename].columns
]
# -*- coding: utf-8 -*-
import os
basedir = os.path.abspath(os.path.dirname(__file__))
DEBUG = True
SQLALCHEMY_DATABASE_URI = \
'sqlite:///' + os.path.join(basedir, '..', 'asstx.db')
SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join(basedir, "..", "asstx.db")
BROWSERID_AUDIENCE = ["http://127.0.0.1:5000", "http://127.0.0.1:5001"]
SECRET_KEY = 'SomethingVerySecretThatYouMustChange'
SECRET_KEY = "SomethingVerySecretThatYouMustChange"
ADMINS = ("you@example.com",)
TEMPLATE_DIRS = []
MAX_CONTENT_LENGTH = 16 * 1024 * 1024 # Don't upload more than 16MB
......
......@@ -20,16 +20,18 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
from alembic import context
from sqlalchemy import create_engine, pool
#from logging.config import fileConfig
# from logging.config import fileConfig
# Interpret the config file for Python logging.
# This line sets up loggers basically.
#fileConfig(config.config_file_name)
# fileConfig(config.config_file_name)
try:
import assurancetourix
except ImportError:
import os, sys
if "assurancetourix" not in os.listdir("."):
raise
sys.path.append(os.getcwd())
......@@ -52,8 +54,7 @@ def run_migrations_offline():
script output.
"""
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True)
context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
with context.begin_transaction():
context.run_migrations()
......@@ -68,14 +69,12 @@ def run_migrations_online():
"""
connectable = create_engine(url, poolclass=pool.NullPool)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
)
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
......
......@@ -9,8 +9,8 @@ Create Date: 2015-10-03 16:27:08.778320
from __future__ import absolute_import, unicode_literals, print_function
# revision identifiers, used by Alembic.
revision = '3b77e67ebe60'
down_revision = u'4459c8f139bb'
revision = "3b77e67ebe60"
down_revision = "4459c8f139bb"
branch_labels = None
depends_on = None
......@@ -21,28 +21,38 @@ from assurancetourix.database import is_sqlite, exists_in_db
def upgrade():
conn = op.get_bind()
genres_table = op.create_table('genres',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.Unicode(length=254), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_genres'))
genres_table = op.create_table(
"genres",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.Unicode(length=254), nullable=False),
sa.PrimaryKeyConstraint("id", name=op.f("pk_genres")),
)
op.create_index(op.f('ix_genres_name'), 'genres', ['name'], unique=True)
op.add_column(u'songs', sa.Column('genre_id', sa.Integer(), nullable=True))
op.drop_index('ix_songs_genre', table_name='songs')
op.create_index(op.f("ix_genres_name"), "genres", ["name"], unique=True)
op.add_column("songs", sa.Column("genre_id", sa.Integer(), nullable=True))
op.drop_index("ix_songs_genre", table_name="songs")
if not is_sqlite(conn):
op.create_foreign_key(
op.f('fk_songs_genre_id_genres'), 'songs', 'genres', ['genre_id'], ['id'],
onupdate=u'cascade', ondelete=u'set null')
# Data migration
songs_table = sa.sql.table('songs',
sa.sql.column('id', sa.Integer),
sa.sql.column('genre', sa.Unicode),
sa.sql.column('genre_id', sa.Integer),
op.f("fk_songs_genre_id_genres"),
"songs",
"genres",
["genre_id"],
["id"],
onupdate="cascade",
ondelete="set null",
)
# Data migration
songs_table = sa.sql.table(
"songs",
sa.sql.column("id", sa.Integer),
sa.sql.column("genre", sa.Unicode),
sa.sql.column("genre_id", sa.Integer),
)
for song in conn.execute(songs_table.select()).fetchall():
if not song["genre"]:
continue
result = conn.execute(genres_table.select().where(genres_table.c.name == song["genre"])).fetchone()
result = conn.execute(
genres_table.select().where(genres_table.c.name == song["genre"])
).fetchone()
if result is None:
result = conn.execute(genres_table.insert().values(name=song["genre"]))
genre_id = result.inserted_primary_key[0]
......@@ -50,33 +60,40 @@ def upgrade():
genre_id = result["id"]
conn.execute(songs_table.update().values(genre_id=genre_id))
if not is_sqlite(conn):
op.drop_column('songs', 'genre')
op.drop_column("songs", "genre")
def downgrade():
conn = op.get_bind()
if not exists_in_db(conn, "songs", "genre"):
op.add_column('songs', sa.Column('genre', sa.VARCHAR(length=254), nullable=True))
op.create_index('ix_songs_genre', 'songs', ['genre'], unique=False)
songs_table = sa.sql.table('songs',
sa.sql.column('id', sa.Integer),
sa.sql.column('genre', sa.Unicode),
sa.sql.column('genre_id', sa.Integer),
op.add_column(
"songs", sa.Column("genre", sa.VARCHAR(length=254), nullable=True)
)
genres_table = sa.sql.table('genres',
sa.sql.column('id', sa.Integer),
sa.sql.column('name', sa.Unicode),
op.create_index("ix_songs_genre", "songs", ["genre"], unique=False)
songs_table = sa.sql.table(
"songs",
sa.sql.column("id", sa.Integer),
sa.sql.column("genre", sa.Unicode),
sa.sql.column("genre_id", sa.Integer),
)
genres_table = sa.sql.table(
"genres", sa.sql.column("id", sa.Integer), sa.sql.column("name", sa.Unicode)
)
for song in conn.execute(
sa.select([songs_table.c.id, genres_table.c.name]).select_from(
songs_table.join(genres_table, songs_table.c.genre_id == genres_table.c.id)
)
).fetchall():
conn.execute(
songs_table.update()
.values(genre=song["name"])
.where(songs_table.c.id == song["id"])
)
for song in conn.execute(sa.select(
[songs_table.c.id, genres_table.c.name]
).select_from(songs_table.join(
genres_table, songs_table.c.genre_id == genres_table.c.id))
).fetchall():
conn.execute(songs_table.update().values(genre=song["name"]
).where(songs_table.c.id == song["id"]))
if not is_sqlite(conn):
op.drop_constraint(op.f('fk_songs_genre_id_genres'), 'songs', type_='foreignkey')
op.drop_column('songs', 'genre_id')
op.drop_index(op.f('ix_genres_name'), table_name='genres')
op.drop_table('genres')
op.drop_constraint(
op.f("fk_songs_genre_id_genres"), "songs", type_="foreignkey"
)
op.drop_column("songs", "genre_id")
op.drop_index(op.f("ix_genres_name"), table_name="genres")
op.drop_table("genres")
### end Alembic commands ###
......@@ -9,7 +9,7 @@ Create Date: 2015-09-30 09:26:51.856727
from __future__ import absolute_import, unicode_literals, print_function
# revision identifiers, used by Alembic.
revision = '438fcca3ba4a'
revision = "438fcca3ba4a"
down_revision = None
branch_labels = None
depends_on = None
......@@ -23,45 +23,50 @@ from assurancetourix.models import get_sort_string
def upgrade():
conn = op.get_bind()
if not is_sqlite(conn):
op.alter_column('songs', 'artist',
existing_type=sa.VARCHAR(length=254),
nullable=False)
op.alter_column('songs', 'title',
existing_type=sa.VARCHAR(length=254),
nullable=False)
op.alter_column(
"songs", "artist", existing_type=sa.VARCHAR(length=254), nullable=False
)
op.alter_column(
"songs", "title", existing_type=sa.VARCHAR(length=254), nullable=False
)
# don't import the table definition from the models, it may break this
# migration when the model is updated in the future (see the Alembic doc)
op.add_column('songs', sa.Column('sort_as', sa.Unicode(length=254), nullable=True))
op.add_column("songs", sa.Column("sort_as", sa.Unicode(length=254), nullable=True))
if not is_sqlite(conn):
songs_table = sa.sql.table('songs',
sa.sql.column('id', sa.Integer),
sa.sql.column('artist', sa.Unicode),
sa.sql.column('title', sa.Unicode),
sa.sql.column('sort_as', sa.Unicode),
)
songs_table = sa.sql.table(
"songs",
sa.sql.column("id", sa.Integer),
sa.sql.column("artist", sa.Unicode),
sa.sql.column("title", sa.Unicode),
sa.sql.column("sort_as", sa.Unicode),
)
for song in conn.execute(songs_table.select()).fetchall():
sort_as = get_sort_string(song["artist"], song["title"])
conn.execute(songs_table.update().where(
songs_table.c.id == song["id"]).values(
sort_as=sort_as))
op.alter_column('songs', 'sort_as',
existing_type=sa.VARCHAR(length=254),
nullable=False)
op.create_index(op.f('ix_songs_sort_as'), 'songs', ['sort_as'], unique=False)
conn.execute(
songs_table.update()
.where(songs_table.c.id == song["id"])
.values(sort_as=sort_as)
)
op.alter_column(
"songs", "sort_as", existing_type=sa.VARCHAR(length=254), nullable=False
)
op.create_index(op.f("ix_songs_sort_as"), "songs", ["sort_as"], unique=False)
if not is_sqlite(conn):
op.create_unique_constraint(op.f('uq_songs_artist'), 'songs', ['artist', 'title'])
op.create_unique_constraint(
op.f("uq_songs_artist"), "songs", ["artist", "title"]
)
def downgrade():
conn = op.get_bind()
if not is_sqlite(conn):
op.drop_constraint(op.f('uq_songs_artist'), 'songs', type_='unique')
op.alter_column('songs', 'title',
existing_type=sa.VARCHAR(length=254),
nullable=True)
op.alter_column('songs', 'artist',
existing_type=sa.VARCHAR(length=254),
nullable=True)
op.drop_index(op.f('ix_songs_sort_as'), table_name='songs')
op.drop_constraint(op.f("uq_songs_artist"), "songs", type_="unique")
op.alter_column(
"songs", "title", existing_type=sa.VARCHAR(length=254), nullable=True
)
op.alter_column(
"songs", "artist", existing_type=sa.VARCHAR(length=254), nullable=True
)
op.drop_index(op.f("ix_songs_sort_as"), table_name="songs")
if not is_sqlite(conn):
op.drop_column('songs', 'sort_as')
op.drop_column("songs", "sort_as")
......@@ -9,8 +9,8 @@ Create Date: 2015-10-03 10:38:38.640553
from __future__ import absolute_import, unicode_literals, print_function
# revision identifiers, used by Alembic.
revision = '4459c8f139bb'
down_revision = u'4ee4a2824244'
revision = "4459c8f139bb"
down_revision = "4ee4a2824244"
branch_labels = None
depends_on = None
......@@ -20,10 +20,18 @@ from assurancetourix.database import is_sqlite, exists_in_db
def upgrade():
op.add_column('songs', sa.Column('duo', sa.Boolean(), server_default=sa.text(u'0'), nullable=False))
op.add_column('songs', sa.Column('has_karaoke', sa.Boolean(), server_default=sa.text(u'0'), nullable=False))
op.add_column(
"songs",
sa.Column("duo", sa.Boolean(), server_default=sa.text("0"), nullable=False),
)
op.add_column(
"songs",
sa.Column(
"has_karaoke", sa.Boolean(), server_default=sa.text("0"), nullable=False
),
)
def downgrade():
op.drop_column('songs', 'has_karaoke')
op.drop_column('songs', 'duo')
op.drop_column("songs", "has_karaoke")
op.drop_column("songs", "duo")
......@@ -9,8 +9,8 @@ Create Date: 2015-10-01 16:04:05.406991
from __future__ import absolute_import, unicode_literals, print_function
# revision identifiers, used by Alembic.
revision = '491bc3d020f3'
down_revision = u'438fcca3ba4a'
revision = "491bc3d020f3"
down_revision = "438fcca3ba4a"
branch_labels = None
depends_on = None
......@@ -21,42 +21,53 @@ from assurancetourix.database import is_sqlite, exists_in_db
def upgrade():
conn = op.get_bind()
op.create_table('artists',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.Unicode(length=254), nullable=False),
sa.Column('sort_as', sa.Unicode(length=254), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_artists'))
op.create_table(
"artists",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.Unicode(length=254), nullable=False),
sa.Column("sort_as", sa.Unicode(length=254), nullable=False),
sa.PrimaryKeyConstraint("id", name=op.f("pk_artists")),
)
op.create_index(op.f("ix_artists_name"), "artists", ["name"], unique=True)
op.create_index(op.f("ix_artists_sort_as"), "artists", ["sort_as"], unique=False)
op.add_column("songs", sa.Column("artist_id", sa.Integer(), nullable=False))
op.create_foreign_key(
op.f("fk_songs_artist_id_artists"),
"songs",
"artists",
["artist_id"],
["id"],
onupdate="cascade",
ondelete="cascade",
)
op.create_unique_constraint(
op.f("uq_songs_artist_id"), "songs", ["artist_id", "title"]
)
op.create_index(op.f('ix_artists_name'), 'artists', ['name'], unique=True)
op.create_index(op.f('ix_artists_sort_as'), 'artists', ['sort_as'], unique=False)
op.add_column(u'songs', sa.Column('artist_id', sa.Integer(), nullable=False))
op.create_foreign_key(op.f('fk_songs_artist_id_artists'), 'songs', 'artists', ['artist_id'], ['id'], onupdate=u'cascade', ondelete=u'cascade')
op.create_unique_constraint(op.f('uq_songs_artist_id'), 'songs', ['artist_id', 'title'])
# Data migration not supported, just recreate the library
if not is_sqlite(conn):
op.alter_column(u'songs', 'title',
existing_type=sa.VARCHAR(length=254),
nullable=False)
op.drop_index('ix_songs_artist', table_name='songs')
op.drop_index('ix_songs_sort_as', table_name='songs')
op.drop_column(u'songs', 'sort_as')
op.drop_column(u'songs', 'artist')
op.alter_column(
"songs", "title", existing_type=sa.VARCHAR(length=254), nullable=False
)
op.drop_index("ix_songs_artist", table_name="songs")
op.drop_index("ix_songs_sort_as", table_name="songs")
op.drop_column("songs", "sort_as")
op.drop_column("songs", "artist")
def downgrade():
raise RuntimeError("Downgrade not supported")
#### commands auto generated by Alembic - please adjust! ###
#op.add_column(u'songs', sa.Column('artist', sa.VARCHAR(length=254), nullable=True))
#op.add_column(u'songs', sa.Column('sort_as', sa.VARCHAR(length=254), nullable=True))
#op.drop_constraint(op.f('fk_songs_artist_id_artists'), 'songs', type_='foreignkey')
#op.create_index('ix_songs_sort_as', 'songs', ['sort_as'], unique=False)
#op.create_index('ix_songs_artist', 'songs', ['artist'], unique=False)
#op.drop_constraint(op.f('uq_songs_artist_id'), 'songs', type_='unique')
#op.alter_column(u'songs', 'title',
# op.add_column(u'songs', sa.Column('artist', sa.VARCHAR(length=254), nullable=True))
# op.add_column(u'songs', sa.Column('sort_as', sa.VARCHAR(length=254), nullable=True))
# op.drop_constraint(op.f('fk_songs_artist_id_artists'), 'songs', type_='foreignkey')
# op.create_index('ix_songs_sort_as', 'songs', ['sort_as'], unique=False)
# op.create_index('ix_songs_artist', 'songs', ['artist'], unique=False)
# op.drop_constraint(op.f('uq_songs_artist_id'), 'songs', type_='unique')
# op.alter_column(u'songs', 'title',
# existing_type=sa.VARCHAR(length=254),
# nullable=True)
#op.drop_column(u'songs', 'artist_id')
#op.drop_index(op.f('ix_artists_sort_as'), table_name='artists')
#op.drop_index(op.f('ix_artists_name'), table_name='artists')
#op.drop_table('artists')
# op.drop_column(u'songs', 'artist_id')
# op.drop_index(op.f('ix_artists_sort_as'), table_name='artists')
# op.drop_index(op.f('ix_artists_name'), table_name='artists')
# op.drop_table('artists')
#### end Alembic commands ###
......@@ -9,8 +9,8 @@ Create Date: 2015-10-02 15:42:38.547326
from __future__ import absolute_import, unicode_literals, print_function
# revision identifiers, used by Alembic.
revision = '4ee4a2824244'
down_revision = '491bc3d020f3'
revision = "4ee4a2824244"
down_revision = "491bc3d020f3"
branch_labels = None
depends_on = None
......@@ -20,8 +20,8 @@ from assurancetourix.database import is_sqlite, exists_in_db
def upgrade():
op.create_index(op.f('ix_songs_date_added'), 'songs', ['date_added'], unique=False)
op.create_index(op.f("ix_songs_date_added"), "songs", ["date_added"], unique=False)
def downgrade():
op.drop_index(op.f('ix_songs_date_added'), table_name='songs')
op.drop_index(op.f("ix_songs_date_added"), table_name="songs")
......@@ -9,8 +9,8 @@ Create Date: 2017-01-12 23:16:52.964405
from __future__ import absolute_import, unicode_literals, print_function
# revision identifiers, used by Alembic.
revision = '6c8fa4d92c86'
down_revision = '3b77e67ebe60'
revision = "6c8fa4d92c86"
down_revision = "3b77e67ebe60"
branch_labels = None
depends_on = None
......@@ -20,16 +20,25 @@ from assurancetourix.database import is_sqlite, exists_in_db
def upgrade():
op.create_table('search_terms',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('song_id', sa.Integer(), nullable=False),
sa.Column('terms', sa.Unicode(length=254), nullable=False),
sa.ForeignKeyConstraint(['song_id'], ['songs.id'], name=op.f('fk_search_terms_song_id_songs'), onupdate='cascade', ondelete='cascade'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_search_terms'))
op.create_table(
"search_terms",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("song_id", sa.Integer(), nullable=False),
sa.Column("terms", sa.Unicode(length=254), nullable=False),
sa.ForeignKeyConstraint(
["song_id"],
["songs.id"],
name=op.f("fk_search_terms_song_id_songs"),
onupdate="cascade",
ondelete="cascade",
),
sa.PrimaryKeyConstraint("id", name=op.f("pk_search_terms")),
)
op.create_index(
op.f("ix_search_terms_terms"), "search_terms", ["terms"], unique=False
)
op.create_index(op.f('ix_search_terms_terms'), 'search_terms', ['terms'], unique=False)
def downgrade():
op.drop_index(op.f('ix_search_terms_terms'), table_name='search_terms')
op.drop_table('search_terms')
op.drop_index(op.f("ix_search_terms_terms"), table_name="search_terms")
op.drop_table("search_terms")
......@@ -9,8 +9,8 @@ Create Date: 2017-10-30 12:34:53.824746
from __future__ import absolute_import, unicode_literals, print_function
# revision identifiers, used by Alembic.
revision = 'dbf847d5ee78'
down_revision = '6c8fa4d92c86'
revision = "dbf847d5ee78"
down_revision = "6c8fa4d92c86"
branch_labels = None
depends_on = None
......@@ -20,24 +20,26 @@ from assurancetourix.database import is_sqlite, exists_in_db
def upgrade():
songs_table = sa.sql.table('songs',
sa.sql.column('date_added', sa.DateTime),
sa.sql.column('date_updated', sa.DateTime),
)
op.add_column('songs', sa.Column(
'date_updated', sa.DateTime(), nullable=True))
#op.add_column('songs', sa.Column(
songs_table = sa.sql.table(
"songs",
sa.sql.column("date_added", sa.DateTime),
sa.sql.column("date_updated", sa.DateTime),
)
op.add_column("songs", sa.Column("date_updated", sa.DateTime(), nullable=True))
# op.add_column('songs', sa.Column(
# 'date_updated', sa.DateTime(), nullable=False,
# default=songs_table.c.date_added))
#conn = op.get_bind()
#conn.execute(songs_table.update().values(date_updated=songs_table.c.date_added))
# conn = op.get_bind()
# conn.execute(songs_table.update().values(date_updated=songs_table.c.date_added))
op.execute(songs_table.update().values(date_updated=songs_table.c.date_added))
with op.batch_alter_table("songs") as batch_op:
batch_op.alter_column('date_updated', existing_type=sa.DateTime, nullable=False)
op.create_index(op.f('ix_songs_date_updated'), 'songs', ['date_updated'], unique=False)
batch_op.alter_column("date_updated", existing_type=sa.DateTime, nullable=False)
op.create_index(
op.f("ix_songs_date_updated"), "songs", ["date_updated"], unique=False
)
def downgrade():
op.drop_index(op.f('ix_songs_date_updated'), table_name='songs')
op.drop_index(op.f("ix_songs_date_updated"), table_name="songs")
with op.batch_alter_table("songs") as batch_op:
batch_op.drop_column('date_updated')
batch_op.drop_column("date_updated")
......@@ -27,8 +27,14 @@ import flask
import PIL
import PIL.Image
from sqlalchemy import (
Column, Integer, Unicode, Boolean, DateTime, ForeignKey,
UniqueConstraint)
Column,
Integer,
Unicode,
Boolean,
DateTime,
ForeignKey,
UniqueConstraint,
)
from sqlalchemy import event
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
......@@ -40,82 +46,87 @@ from assurancetourix.utils import get_cover_folder
Base = declarative_base()
Base.metadata.naming_convention = {
"ix": 'ix_%(column_0_label)s',
"uq": "uq_%(table_name)s_%(column_0_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"pk": "pk_%(table_name)s"
"ix": "ix_%(column_0_label)s",
"uq": "uq_%(table_name)s_%(column_0_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"pk": "pk_%(table_name)s",
}
SORT_ALLOWED = re.compile(r'[^\w ]')
SORT_ALLOWED = re.compile(r"[^\w ]")
def get_sort_string(value):
value = SORT_ALLOWED.sub("", value.strip().lower())
for ignore in ["the ", "les "]:
if value.startswith(ignore):
value = value[len(ignore):]
value = value[len(ignore) :]
return value
def strip_accents(s):
# http://stackoverflow.com/questions/517923/what-is-the-best-way-to-remove-accents-in-a-python-unicode-string
return ''.join(c for c in unicodedata.normalize('NFD', s)
if unicodedata.category(c) != 'Mn')
return "".join(
c for c in unicodedata.normalize("NFD", s) if unicodedata.category(c) != "Mn"
)
class Artist(Base):
__tablename__ = 'artists'
__tablename__ = "artists"
artist_id = Column("id", Integer, primary_key=True)
name = Column(Unicode(254), index=True, unique=True, nullable=False)
sort_as = Column(
Unicode(254), index=True, nullable=False,
default=lambda c: get_sort_string(c.current_parameters['name']))
Unicode(254),
index=True,
nullable=False,
default=lambda c: get_sort_string(c.current_parameters["name"]),
)
class Song(Base):
__tablename__ = 'songs'
__table_args__ = (
UniqueConstraint('artist_id', 'title'),
)
__tablename__ = "songs"
__table_args__ = (UniqueConstraint("artist_id", "title"),)
song_id = Column("id", Integer, primary_key=True)
artist_id = Column(
Integer,
ForeignKey("artists.id", ondelete="cascade", onupdate="cascade"),
nullable=False)
artist = relationship("Artist", backref="songs", lazy='joined')
nullable=False,
)
artist = relationship("Artist", backref="songs", lazy="joined")
title = Column(Unicode(254), index=True, nullable=False)
genre_id = Column(
Integer,
ForeignKey("genres.id", ondelete="set null", onupdate="cascade"),
nullable=True)
genre = relationship("Genre", backref="songs", lazy='joined')
nullable=True,
)
genre = relationship("Genre", backref="songs", lazy="joined")
language = Column(Unicode(254), index=True, nullable=True)
image = Column(Unicode(254), index=True, nullable=True)
active = Column(Boolean, server_default=true(), nullable=False)
date_added = Column(
DateTime, default=datetime.datetime.utcnow,
index=True, nullable=False)
DateTime, default=datetime.datetime.utcnow, index=True, nullable=False
)
date_updated = Column(
DateTime, default=datetime.datetime.utcnow,
index=True, nullable=False)
DateTime, default=datetime.datetime.utcnow, index=True, nullable=False
)
duo = Column(Boolean, server_default=false(), nullable=False)
has_karaoke = Column(Boolean, server_default=false(), nullable=False)
search_terms = relationship(
"SearchTerm", backref="song", cascade="all, delete-orphan")
"SearchTerm", backref="song", cascade="all, delete-orphan"
)
@property
def cover(self):
cover_folder = get_cover_folder()
if (not self.image or not
os.path.exists(os.path.join(cover_folder, self.image))):
if not self.image or not os.path.exists(os.path.join(cover_folder, self.image)):
return None
return flask.url_for('static', filename="/".join([
app.config["COVERS_DIR"], self.image]))
return flask.url_for(
"static", filename="/".join([app.config["COVERS_DIR"], self.image])
)
@cover.setter
def cover(self, fileobj):
......@@ -138,11 +149,14 @@ class Song(Base):
os.makedirs(image_dir)
img = PIL.Image.open(fileobj.stream)
img_width, img_height = img.size
if (img_width > app.config["COVERS_SIZE"]
or img_height > app.config["COVERS_SIZE"]):
if (
img_width > app.config["COVERS_SIZE"]
or img_height > app.config["COVERS_SIZE"]
):
img.thumbnail(
(app.config["COVERS_SIZE"], app.config["COVERS_SIZE"]),
PIL.Image.ANTIALIAS)
PIL.Image.ANTIALIAS,
)
with open(image_path, "w") as fh:
img.save(fh, "JPEG")
# Don't use os.path.join below, it's an URL
......@@ -152,8 +166,7 @@ class Song(Base):
if not self.image:
return
cover_folder = get_cover_folder()
image_path = os.path.join(
cover_folder, self.image.replace("/", os.sep))
image_path = os.path.join(cover_folder, self.image.replace("/", os.sep))
try:
os.remove(image_path)
except OSError:
......@@ -181,29 +194,30 @@ class Song(Base):
terms.append(strip_accents(self.language.lower()))
self.search_terms = [
SearchTerm(song_id=self.song_id, terms=term) for term in terms
]
]
@event.listens_for(Song, 'after_delete')
@event.listens_for(Song, "after_delete")
def receive_after_delete(mapper, connection, target):
target._delete_cover()
class SearchTerm(Base):
__tablename__ = 'search_terms'
__tablename__ = "search_terms"
id = Column(Integer, primary_key=True)
song_id = Column(
Integer,
ForeignKey("songs.id", ondelete="cascade", onupdate="cascade"),
nullable=False)
nullable=False,
)
terms = Column(Unicode(254), index=True, nullable=False)
class Genre(Base):
__tablename__ = 'genres'
__tablename__ = "genres"
genre_id = Column("id", Integer, primary_key=True)
name = Column(Unicode(254), index=True, unique=True, nullable=False)
......@@ -76,7 +76,7 @@ class Run(Command):
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config", help="Load this configuration file")
subparsers = parser.add_subparsers(help='sub-command help')
subparsers = parser.add_subparsers(help="sub-command help")
for action_class in Command.__subclasses__():
action_class().make_parser(subparsers)
args = parser.parse_args()
......@@ -89,5 +89,5 @@ def main():
sys.exit(args.func(args))
if __name__ == '__main__':
if __name__ == "__main__":
main()
......@@ -26,11 +26,7 @@ from . import app
from .models import Song, Artist, SearchTerm
__all__ = (
"index",
"latest",
"search",
)
__all__ = ("index", "latest", "search")
def limit_results(results):
......@@ -47,25 +43,21 @@ def limit_results(results):
return results[start:end], next_start
@app.route('/', methods=["GET"])
@app.route("/", methods=["GET"])
def index():
return render_template(
"index.html",
page_id="index",
load_url=url_for("data_by_name"),
)
"index.html", page_id="index", load_url=url_for("data_by_name")
)
@app.route('/latest', methods=["GET"])
@app.route("/latest", methods=["GET"])
def latest():
return render_template(
"index.html",
page_id="latest",
load_url=url_for("data_latest"),
)
"index.html", page_id="latest", load_url=url_for("data_latest")
)
@app.route('/search', methods=["GET"])
@app.route("/search", methods=["GET"])
def search():
q = request.args.get("q", "")
duo = request.args.get("duo", type=bool)
......@@ -77,8 +69,7 @@ def search():
qs["duo"] = "on"
if karaoke:
qs["karaoke"] = "on"
load_url = "{}?{}".format(
url_for("data_search"), urlencode(qs))
load_url = "{}?{}".format(url_for("data_search"), urlencode(qs))
return render_template(
"index.html",
page_id="search",
......@@ -86,34 +77,28 @@ def search():
duo=duo,
karaoke=karaoke,
load_url=load_url,
)
)
@app.route('/data/by-name', methods=["GET"])
@app.route("/data/by-name", methods=["GET"])
def data_by_name():
artists = g.db.query(Artist).order_by(Artist.sort_as)
artists, next_start = limit_results(artists)
return json.jsonify(
next=next_start,
html=render_template(
"_artist_list.html",
artists=artists),
)
next=next_start, html=render_template("_artist_list.html", artists=artists)
)
@app.route('/data/latest', methods=["GET"])
@app.route("/data/latest", methods=["GET"])
def data_latest():
songs = g.db.query(Song).order_by(desc(Song.date_added))
songs, next_start = limit_results(songs)
return json.jsonify(
next=next_start,
html=render_template(
"_song_list.html",
songs=songs),
)
next=next_start, html=render_template("_song_list.html", songs=songs)
)
@app.route('/data/search', methods=["GET"])
@app.route("/data/search", methods=["GET"])
def data_search():
q = request.args.get("q", "").strip()
duo = request.args.get("duo", type=bool)
......@@ -122,9 +107,11 @@ def data_search():
if not q and not duo and not karaoke:
return json.jsonify(html="")
if q:
songs = songs.join(SearchTerm).filter(
SearchTerm.terms.like("%{}%".format(q.lower()))
).distinct()
songs = (
songs.join(SearchTerm)
.filter(SearchTerm.terms.like("%{}%".format(q.lower())))
.distinct()
)
if duo:
songs = songs.filter(Song.duo.is_(True))
if karaoke:
......@@ -132,8 +119,5 @@ def data_search():
songs = songs.order_by(Artist.sort_as, Song.title)
songs, next_start = limit_results(songs)
return json.jsonify(
next=next_start,
html=render_template(
"_song_list.html",
songs=songs),
)
next=next_start, html=render_template("_song_list.html", songs=songs)
)
......@@ -5,14 +5,17 @@ try:
import setuptools
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
def reqfile(filepath):
"""Turns a text file into a list (one element per line)"""
result = []
import re
url_re = re.compile(".+:.+#egg=(.+)")
with open(filepath, "r") as f:
for line in f:
......@@ -30,24 +33,22 @@ setup(
name="assurancetourix",
version="0.3",
description="Organize your karaoke parties",
long_description=open('README.rst').read(),
author='Aurelien Bompard',
author_email='aurelien@bompard.org',
long_description=open("README.rst").read(),
author="Aurelien Bompard",
author_email="aurelien@bompard.org",
url="https://gitlab.com/abompard/assurancetourix",
license="AGPLv3+",
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Programming Language :: Python :: 3",
],
],
packages=find_packages(),
#include_package_data=True,
# include_package_data=True,
install_requires=reqfile("requirements.txt"),
tests_requires=["nose2"],
test_suite='nose2.collector.collector',
test_suite="nose2.collector.collector",
entry_points={
'console_scripts': [
'assurancetourix = assurancetourix.scripts:main',
],
},
)
"console_scripts": ["assurancetourix = assurancetourix.scripts:main"]
},
)