I've been struggling with Alembic lately, I don't know why it is inverting the correct upgrade/downgrade changes.
I've created 2 new fields (about_me, last_seen) in the User model, run 'flask db migrate' and then looking at the version file I can see that the proper migration is in downgrade instead of upgrade.
I've tried to run db.create_all, db.drop_all, drop the alembic_version table with no success. Every time having the same result below, when I should have the DROP_TABLE commands in downgrade and the CREATE_TABLE commands in upgrade.
The only way I'm currently able to make it work is dropping the required tables and then manually copying and pasting the code from Downgrade to Upgrade.
Migration file:
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('tutor_category')
op.drop_table('tutors')
op.drop_index('ix_categories_name', table_name='categories')
op.drop_table('categories')
op.drop_index('ix_users_email', table_name='users')
op.drop_index('ix_users_first_name', table_name='users')
op.drop_index('ix_users_last_name', table_name='users')
op.drop_index('ix_users_timestamp_joined', table_name='users')
op.drop_table('users')
op.drop_index('ix_reviews_rating', table_name='reviews')
op.drop_index('ix_reviews_timestamp', table_name='reviews')
op.drop_table('reviews')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('reviews',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('tutor_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('rating', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('timestamp', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['tutor_id'], ['tutors.id'], name='reviews_tutor_id_fkey'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name='reviews_user_id_fkey'),
sa.PrimaryKeyConstraint('id', name='reviews_pkey')
)
op.create_index('ix_reviews_timestamp', 'reviews', ['timestamp'], unique=False)
op.create_index('ix_reviews_rating', 'reviews', ['rating'], unique=False)
op.create_table('users',
sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('users_id_seq'::regclass)"), autoincrement=True, nullable=False),
sa.Column('first_name', sa.VARCHAR(length=64), autoincrement=False, nullable=False),
sa.Column('last_name', sa.VARCHAR(length=64), autoincrement=False, nullable=False),
sa.Column('email', sa.VARCHAR(length=120), autoincrement=False, nullable=False),
sa.Column('profile_img', sa.VARCHAR(length=120), autoincrement=False, nullable=True),
sa.Column('password_hash', sa.VARCHAR(length=128), autoincrement=False, nullable=False),
sa.Column('timestamp_joined', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('about_me', sa.VARCHAR(length=140), autoincrement=False, nullable=True),
sa.Column('last_seen', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='users_pkey'),
postgresql_ignore_search_path=False
)
op.create_index('ix_users_timestamp_joined', 'users', ['timestamp_joined'], unique=False)
op.create_index('ix_users_last_name', 'users', ['last_name'], unique=False)
op.create_index('ix_users_first_name', 'users', ['first_name'], unique=False)
op.create_index('ix_users_email', 'users', ['email'], unique=True)
op.create_table('categories',
sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('categories_id_seq'::regclass)"), autoincrement=True, nullable=False),
sa.Column('name', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='categories_pkey'),
postgresql_ignore_search_path=False
)
op.create_index('ix_categories_name', 'categories', ['name'], unique=False)
op.create_table('tutors',
sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('tutors_id_seq'::regclass)"), autoincrement=True, nullable=False),
sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('price', postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name='tutors_user_id_fkey'),
sa.PrimaryKeyConstraint('id', name='tutors_pkey'),
postgresql_ignore_search_path=False
)
op.create_table('tutor_category',
sa.Column('tutor_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column('category_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.ForeignKeyConstraint(['category_id'], ['categories.id'], name='tutor_category_category_id_fkey'),
sa.ForeignKeyConstraint(['tutor_id'], ['tutors.id'], name='tutor_category_tutor_id_fkey'),
sa.PrimaryKeyConstraint('tutor_id', 'category_id', name='tutor_category_pkey')
)
# ### end Alembic commands ###
Models.py
class Users(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
first_name = db.Column(db.String(64), index=True, nullable=False)
last_name = db.Column(db.String(64), index=True, nullable=False)
email = db.Column(db.String(120), index=True, unique=True, nullable=False)
profile_img = db.Column(db.String(120), nullable=True)
password_hash = db.Column(db.String(128), nullable=False)
timestamp_joined = db.Column(db.DateTime, index=True, default=datetime.utcnow)
about_me = db.Column(db.String(140), nullable=True)
last_seen = db.Column(db.DateTime(140), default=datetime.utcnow)
def set_password(self, password):
self.password_hash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password_hash, password)
def avatar(self):
digest = md5(self.email.lower().encode('utf-8')).hexdigest()
return 'https://www.gravatar.com/avatar/{}?d=identicon'.format(
digest)
def __repr__(self):
return '<User {}>'.format(self.username)
class Tutors(db.Model):
__tablename__ = 'tutors'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
price = db.Column(db.Float)
# Connecting this field to the association table.
category = db.relationship("Categories", secondary="tutor_category")
def __repr__(self):
return '<Tutor id {}'.format(self.user_id) + ' , price {}>'.format(self.price)
class Reviews(db.Model):
__tablename__ = 'reviews'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
tutor_id = db.Column(db.Integer, db.ForeignKey('tutors.id'))
rating = db.Column(db.Integer, index=True)
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)
def __repr__(self):
return '<Review User: {}'.format(self.user_id) + ' Tutor: {}'.format(self.tutor_id) + ' Rating: {}>'.format(self.rating)
class Categories(db.Model):
__tablename__ = 'categories'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), index=True)
# Connecting this field to the association table.
tutor = db.relationship('Tutors', secondary="tutor_category")
def __repr__(self):
return '<Category {}>'.format(self.name)
# Declaring the association table
tutor_category = db.Table('tutor_category',
db.Column('tutor_id', db.Integer, db.ForeignKey('tutors.id'), primary_key=True),
db.Column('category_id', db.Integer, db.ForeignKey('categories.id'), primary_key=True)
)
@login.user_loader
def load_user(id):
return Users.query.get(int(id))
Manage.py
from flask import Flask
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from flask_sqlalchemy import SQLAlchemy
from app import app
from app import models
db = SQLAlchemy(app)
POSTGRES = {
'user': 'richard',
'pw': 'richard2906',
'db': 'codetutors',
'host': 'localhost',
'port': '5432',
}
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://%(user)s:\
%(pw)s@%(host)s:%(port)s/%(db)s' % POSTGRES
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
migrate = Migrate(app, db)
manager = Manager(app)
db.init_app(app)
manager.add_command('db', MigrateCommand)
def create_app():
app = Flask(__name__)
app.config['DEBUG'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://%(user)s:\
%(pw)s@%(host)s:%(port)s/%(db)s' % POSTGRES
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
db.init_app(app)
return app
if __name__ == '__main__':
manager.run()
env.py
from __future__ import with_statement
import logging
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
from manage import db
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
# add your model's MetaData object here
# for 'autogenerate' support
# from app.models import Users, Tutors, Categories, Reviews
target_metadata = db.metadata
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from flask import current_app
config.set_main_option(
'sqlalchemy.url',
str(current_app.extensions['migrate'].db.engine.url).replace('%', '%%'))
target_metadata = current_app.extensions['migrate'].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
I appreciate the help