diff --git a/app/__init__.py b/app/__init__.py index 3c581ceeb..2ea2db1a0 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -1,4 +1,6 @@ from flask import Flask +from .routes.task_routes import bp as task_bp +from .routes.goal_routes import bp as goal_bp from .db import db, migrate from .models import task, goal import os @@ -6,9 +8,12 @@ def create_app(config=None): app = Flask(__name__) + # Register Blueprints here + app.register_blueprint(task_bp) + app.register_blueprint(goal_bp) + app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('SQLALCHEMY_DATABASE_URI') - if config: # Merge `config` into the app's configuration # to override the app's default settings for testing @@ -16,7 +21,5 @@ def create_app(config=None): db.init_app(app) migrate.init_app(app, db) - - # Register Blueprints here - return app + diff --git a/app/models/goal.py b/app/models/goal.py index 44282656b..1385b9279 100644 --- a/app/models/goal.py +++ b/app/models/goal.py @@ -1,5 +1,28 @@ -from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy.orm import Mapped, mapped_column, relationship from ..db import db class Goal(db.Model): id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) + title: Mapped[str] + tasks: Mapped[list["Task"]] = relationship(back_populates="goal") + + def to_dict(self): + return { + 'id': self.id, + 'title': self.title, + 'task_ids': [task.id for task in self.tasks], + 'tasks': [ task.to_dict() for task in self.tasks] + } + + def to_summary_dict(self): + dictionary = {"id": self.id, "title": self.title} + return dictionary + + @classmethod + def from_dict(cls, dict_data: dict): + goal = Goal( + id=dict_data.get("id"), + title=dict_data["title"], + ) + return goal + diff --git a/app/models/task.py b/app/models/task.py index 5d99666a4..8216b58a0 100644 --- a/app/models/task.py +++ b/app/models/task.py @@ -1,5 +1,34 @@ -from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy import ForeignKey from ..db import db class Task(db.Model): id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) + title: Mapped[str] + description: Mapped[str] + completed_at: Mapped[str | None] + goal_id: Mapped[int | None] = mapped_column(ForeignKey("goal.id")) + goal: Mapped["Goal"] = relationship(back_populates="tasks") + + def to_dict(self): + data = { + 'id': self.id, + 'title': self.title, + 'description': self.description, + 'is_complete': self.completed_at is not None + } + if self.goal_id: + data['goal_id'] = self.goal_id + return data + + @classmethod + def from_dict(cls, dict_data: dict): + task = Task( + id=dict_data.get("id"), + title=dict_data["title"], + description=dict_data["description"], + completed_at=dict_data.get("completed_at") + ) + return task + + \ No newline at end of file diff --git a/app/routes/goal_routes.py b/app/routes/goal_routes.py index 3aae38d49..7d1c6915e 100644 --- a/app/routes/goal_routes.py +++ b/app/routes/goal_routes.py @@ -1 +1,76 @@ -from flask import Blueprint \ No newline at end of file +from flask import Blueprint, abort, make_response, request, Response +from .route_utilities import create_model, validate_model, get_models_with_filters +from app.models.goal import Goal +from app.models.task import Task +from datetime import datetime +from ..db import db +from ..slack_api.post_message import post_message_with_slack_bot + +bp = Blueprint("goal_bp",__name__, url_prefix="/goals") + +@bp.post("") +def create_goal(): + request_body = request.get_json() + # Create goal here so we can shape the response as the tests expect + try: + new_goal = Goal.from_dict(request_body) + except KeyError: + # Match test expectation for invalid create payload + return make_response({"details": "Invalid data"}, 400) + + db.session.add(new_goal) + db.session.commit() + + return make_response({"id": new_goal.id, "title": new_goal.title}, 201) + +@bp.get("") +def get_all_goals(): + query = db.select(Goal) + goals = db.session.scalars(query.order_by(Goal.id)) + return [g.to_summary_dict() for g in goals] + +@bp.get("/") +def get_one_goal(id): + goal = validate_model(Goal, id) + return goal.to_summary_dict() + +@bp.put("/") +def update_goal(id): + goal = validate_model(Goal, id) + request_body = request.get_json() + goal.title = request_body.get("title") + db.session.commit() + return Response(status=204, mimetype="application/json") + +@bp.delete("/") +def delete_goal(id): + goal = validate_model(Goal, id) + db.session.delete(goal) + db.session.commit() + return Response(status=204, mimetype="application/json") + +@bp.post("//tasks") +def add_task_to_goal(goal_id): + goal = validate_model(Goal, goal_id) + request_body = request.get_json() + tasks = [] + for task_id in request_body['task_ids']: + task = validate_model(Task, task_id) + tasks.append(task) + goal.tasks = tasks + db.session.commit() + return {"id": goal.id, "task_ids": [task.id for task in goal.tasks]} + +@bp.get("//tasks") +def get_tasks_by_goal(goal_id): + goal = validate_model(Goal, goal_id) + return {"id": goal.id, "title": goal.title, "tasks": [task.to_dict() for task in goal.tasks]} + + + + + + + + + diff --git a/app/routes/route_utilities.py b/app/routes/route_utilities.py new file mode 100644 index 000000000..29132d3f0 --- /dev/null +++ b/app/routes/route_utilities.py @@ -0,0 +1,49 @@ +from flask import abort, make_response +from ..db import db + +def validate_model(cls, model_id): + try: + model_id = int(model_id) + except: + response = {"message": f"{cls.__name__} {model_id} invalid"} + abort(make_response(response , 400)) + + query = db.select(cls).where(cls.id == model_id) + model = db.session.scalar(query) + + if not model: + response = {"message": f"{cls.__name__} {model_id} not found"} + abort(make_response(response, 404)) + + return model + + +def create_model(cls, model_data): + try: + new_model = cls.from_dict(model_data) + except KeyError as error: + response = {"details": "Invalid data"} + abort(make_response(response, 400)) + + db.session.add(new_model) + db.session.commit() + + return new_model.to_dict(), 201 + + +def build_query_with_filters(cls, filters=None): + query = db.select(cls) + if filters: + for attribute, value in filters.items(): + if hasattr(cls, attribute) and value is not None: + query = query.where(getattr(cls, attribute).ilike(f"%{value}%")) + return query + + +def get_models_with_filters(cls, filters=None): + query = build_query_with_filters(cls, filters) + + models = db.session.scalars(query.order_by(cls.id)) + models_response = [model.to_dict() for model in models] + + return models_response diff --git a/app/routes/task_routes.py b/app/routes/task_routes.py index 3aae38d49..09c2f1307 100644 --- a/app/routes/task_routes.py +++ b/app/routes/task_routes.py @@ -1 +1,85 @@ -from flask import Blueprint \ No newline at end of file +from flask import Blueprint, abort, make_response, request, Response +from ..slack_api.post_message import post_message_with_slack_bot +from .route_utilities import create_model, validate_model, build_query_with_filters +from app.models.task import Task +from datetime import datetime, timezone +from ..db import db +bp = Blueprint("task_bp",__name__, url_prefix="/tasks") + +@bp.post("") +def create_task(): + request_body = request.get_json() + return create_model(Task, request_body) + +@bp.get("") +def get_all_tasks(): + query = db.select(Task) + title_param = request.args.get("title") + description_param = request.args.get("description") + + query = build_query_with_filters(Task, { + "title": title_param, + "description": description_param + }) + + if title_param: + query = query.where() + # Sorting logic + sort_param = request.args.get("sort") + if sort_param == "asc": + query = query.order_by(Task.title.asc()) + elif sort_param == "desc": + query = query.order_by(Task.title.desc()) + else: + query = query.order_by(Task.id) + + tasks = db.session.scalars(query) + + response = [] + for task in tasks: + response.append(task.to_dict()) + + return response + +@bp.put("/") +def update_task(id): + task = validate_model(Task, id) + request_body = request.get_json() + task.title = request_body.get("title") + task.description = request_body.get("description") + task.completed_at = request_body.get("completed_at") + db.session.commit() + + return Response(status=204, mimetype="application/json") + +@bp.delete("/") +def delete_task(id): + task = validate_model(Task,id) + db.session.delete(task) + db.session.commit() + return Response(status=204, mimetype="application/json") + +@bp.patch("//mark_complete") +def mark_complete(id): + task = validate_model(Task,id) + task.completed_at = datetime.now(timezone.utc) + db.session.commit() + post_message_with_slack_bot(f"Someone just completed the task {task.title}") + return Response(status=204, mimetype="application/json") + +@bp.patch("//mark_incomplete") +def mark_incomplete(id): + task = validate_model(Task,id) + task.completed_at = None + db.session.commit() + return Response(status=204, mimetype="application/json") + +@bp.get("/") +def get_tasks_by_id(id): + task = validate_model(Task, id) + return task.to_dict() + + + + + diff --git a/app/slack_api/post_message.py b/app/slack_api/post_message.py new file mode 100644 index 000000000..485fb092b --- /dev/null +++ b/app/slack_api/post_message.py @@ -0,0 +1,19 @@ +import os +import requests +token = os.environ.get("SLACK_BOT_TOKEN") + + + +url = os.environ.get("SLACK_API_URL") + +def post_message_with_slack_bot(text: str) -> dict: + headers = { + "Authorization": f"Bearer {token}", + "Content-Type": "application/json", + } + payload = {"channel": "C09Q6QRAJN6", "text": text} + + resp = requests.post(url, headers=headers, json=payload, timeout=10) + + data = resp.json() + return data \ No newline at end of file diff --git a/migrations/README b/migrations/README new file mode 100644 index 000000000..0e0484415 --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Single-database configuration for Flask. diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 000000000..ec9d45c26 --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,50 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic,flask_migrate + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[logger_flask_migrate] +level = INFO +handlers = +qualname = flask_migrate + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 000000000..4c9709271 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,113 @@ +import logging +from logging.config import fileConfig + +from flask import current_app + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + + +def get_engine(): + try: + # this works with Flask-SQLAlchemy<3 and Alchemical + return current_app.extensions['migrate'].db.get_engine() + except (TypeError, AttributeError): + # this works with Flask-SQLAlchemy>=3 + return current_app.extensions['migrate'].db.engine + + +def get_engine_url(): + try: + return get_engine().url.render_as_string(hide_password=False).replace( + '%', '%%') + except AttributeError: + return str(get_engine().url).replace('%', '%%') + + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +config.set_main_option('sqlalchemy.url', get_engine_url()) +target_db = current_app.extensions['migrate'].db + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def get_metadata(): + if hasattr(target_db, 'metadatas'): + return target_db.metadatas[None] + return target_db.metadata + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=get_metadata(), literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + conf_args = current_app.extensions['migrate'].configure_args + if conf_args.get("process_revision_directives") is None: + conf_args["process_revision_directives"] = process_revision_directives + + connectable = get_engine() + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=get_metadata(), + **conf_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 000000000..2c0156303 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/1337c763f5d2_add_goal_to_task.py b/migrations/versions/1337c763f5d2_add_goal_to_task.py new file mode 100644 index 000000000..3c9bcf074 --- /dev/null +++ b/migrations/versions/1337c763f5d2_add_goal_to_task.py @@ -0,0 +1,32 @@ +"""add goal to task + +Revision ID: 1337c763f5d2 +Revises: e549ce44660b +Create Date: 2025-11-05 01:20:04.108772 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '1337c763f5d2' +down_revision = 'e549ce44660b' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('goal', schema=None) as batch_op: + batch_op.drop_column('completed_at') + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('goal', schema=None) as batch_op: + batch_op.add_column(sa.Column('completed_at', sa.VARCHAR(), autoincrement=False, nullable=False)) + + # ### end Alembic commands ### diff --git a/migrations/versions/2d5cb0a14623_updated_and_add_model_goal.py b/migrations/versions/2d5cb0a14623_updated_and_add_model_goal.py new file mode 100644 index 000000000..05c77e2cb --- /dev/null +++ b/migrations/versions/2d5cb0a14623_updated_and_add_model_goal.py @@ -0,0 +1,34 @@ +"""updated and add model Goal + +Revision ID: 2d5cb0a14623 +Revises: e8d26c4e9d93 +Create Date: 2025-11-05 22:05:57.578417 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '2d5cb0a14623' +down_revision = 'e8d26c4e9d93' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('task', schema=None) as batch_op: + batch_op.add_column(sa.Column('goal_id', sa.Integer(), nullable=True)) + batch_op.create_foreign_key(None, 'goal', ['goal_id'], ['id']) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('task', schema=None) as batch_op: + batch_op.drop_constraint(None, type_='foreignkey') + batch_op.drop_column('goal_id') + + # ### end Alembic commands ### diff --git a/migrations/versions/9aa90a30ad5e_initial_migration.py b/migrations/versions/9aa90a30ad5e_initial_migration.py new file mode 100644 index 000000000..0f710cdb9 --- /dev/null +++ b/migrations/versions/9aa90a30ad5e_initial_migration.py @@ -0,0 +1,42 @@ +"""Initial migration. + +Revision ID: 9aa90a30ad5e +Revises: +Create Date: 2025-11-01 21:18:11.585357 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '9aa90a30ad5e' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('goal', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('title', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=False), + sa.Column('completed_at', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('task', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('title', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=False), + sa.Column('completed_at', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('task') + op.drop_table('goal') + # ### end Alembic commands ### diff --git a/migrations/versions/e549ce44660b_recreate_model_migrations.py b/migrations/versions/e549ce44660b_recreate_model_migrations.py new file mode 100644 index 000000000..060a68ef1 --- /dev/null +++ b/migrations/versions/e549ce44660b_recreate_model_migrations.py @@ -0,0 +1,36 @@ +"""Recreate model migrations + +Revision ID: e549ce44660b +Revises: 9aa90a30ad5e +Create Date: 2025-11-01 23:19:53.745986 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'e549ce44660b' +down_revision = '9aa90a30ad5e' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('task', schema=None) as batch_op: + batch_op.alter_column('completed_at', + existing_type=sa.VARCHAR(), + nullable=True) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('task', schema=None) as batch_op: + batch_op.alter_column('completed_at', + existing_type=sa.VARCHAR(), + nullable=False) + + # ### end Alembic commands ### diff --git a/migrations/versions/e8d26c4e9d93_recreate_model_migrations.py b/migrations/versions/e8d26c4e9d93_recreate_model_migrations.py new file mode 100644 index 000000000..12a0b2e0f --- /dev/null +++ b/migrations/versions/e8d26c4e9d93_recreate_model_migrations.py @@ -0,0 +1,36 @@ +"""Recreate model migrations + +Revision ID: e8d26c4e9d93 +Revises: 1337c763f5d2 +Create Date: 2025-11-05 03:01:20.540989 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'e8d26c4e9d93' +down_revision = '1337c763f5d2' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('goal', schema=None) as batch_op: + batch_op.alter_column('description', + existing_type=sa.VARCHAR(), + nullable=True) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('goal', schema=None) as batch_op: + batch_op.alter_column('description', + existing_type=sa.VARCHAR(), + nullable=False) + + # ### end Alembic commands ### diff --git a/tests/conftest.py b/tests/conftest.py index a01499583..4762cef2b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -27,7 +27,7 @@ def expire_session(sender, response, **extra): db.create_all() yield app - # close and remove the temporary database + with app.app_context(): db.drop_all() diff --git a/tests/test_wave_01.py b/tests/test_wave_01.py index fac95a0a3..b994830cb 100644 --- a/tests/test_wave_01.py +++ b/tests/test_wave_01.py @@ -2,7 +2,7 @@ from app.db import db import pytest -@pytest.mark.skip(reason="No way to test this feature yet") + def test_task_to_dict(): #Arrange new_task = Task(id = 1, title="Make My Bed", @@ -19,7 +19,7 @@ def test_task_to_dict(): assert task_dict["description"] == "Start the day off right!" assert task_dict["is_complete"] == False -@pytest.mark.skip(reason="No way to test this feature yet") + def test_task_to_dict_missing_id(): #Arrange new_task = Task(title="Make My Bed", @@ -36,7 +36,7 @@ def test_task_to_dict_missing_id(): assert task_dict["description"] == "Start the day off right!" assert task_dict["is_complete"] == False -@pytest.mark.skip(reason="No way to test this feature yet") + def test_task_to_dict_missing_title(): #Arrange new_task = Task(id = 1, @@ -53,7 +53,8 @@ def test_task_to_dict_missing_title(): assert task_dict["description"] == "Start the day off right!" assert task_dict["is_complete"] == False -@pytest.mark.skip(reason="No way to test this feature yet") + + def test_task_from_dict(): #Arrange task_dict = { @@ -70,7 +71,7 @@ def test_task_from_dict(): assert task_obj.description == "Start the day off right!" assert task_obj.completed_at is None -@pytest.mark.skip(reason="No way to test this feature yet") + def test_task_from_dict_no_title(): #Arrange task_dict = { @@ -82,7 +83,7 @@ def test_task_from_dict_no_title(): with pytest.raises(KeyError, match = 'title'): Task.from_dict(task_dict) -@pytest.mark.skip(reason="No way to test this feature yet") + def test_task_from_dict_no_description(): #Arrange task_dict = { @@ -94,7 +95,7 @@ def test_task_from_dict_no_description(): with pytest.raises(KeyError, match = 'description'): Task.from_dict(task_dict) -@pytest.mark.skip(reason="No way to test this feature yet") + def test_get_tasks_no_saved_tasks(client): # Act response = client.get("/tasks") @@ -105,7 +106,7 @@ def test_get_tasks_no_saved_tasks(client): assert response_body == [] -@pytest.mark.skip(reason="No way to test this feature yet") + def test_get_tasks_one_saved_tasks(client, one_task): # Act response = client.get("/tasks") @@ -124,7 +125,7 @@ def test_get_tasks_one_saved_tasks(client, one_task): ] -@pytest.mark.skip(reason="No way to test this feature yet") + def test_get_task(client, one_task): # Act response = client.get("/tasks/1") @@ -140,7 +141,7 @@ def test_get_task(client, one_task): } -@pytest.mark.skip(reason="No way to test this feature yet") + def test_get_task_not_found(client): # Act response = client.get("/tasks/1") @@ -148,14 +149,13 @@ def test_get_task_not_found(client): # Assert assert response.status_code == 404 + assert response_body == { + "message": "Task 1 not found" + } + - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** -@pytest.mark.skip(reason="No way to test this feature yet") def test_create_task(client): # Act response = client.post("/tasks", json={ @@ -181,7 +181,7 @@ def test_create_task(client): assert new_task.description == "Test Description" assert new_task.completed_at == None -@pytest.mark.skip(reason="No way to test this feature yet") + def test_update_task(client, one_task): # Act response = client.put("/tasks/1", json={ @@ -200,8 +200,6 @@ def test_update_task(client, one_task): assert task.completed_at == None - -@pytest.mark.skip(reason="No way to test this feature yet") def test_update_task_not_found(client): # Act response = client.put("/tasks/1", json={ @@ -212,14 +210,13 @@ def test_update_task_not_found(client): # Assert assert response.status_code == 404 + assert response_body == { + "message": "Task 1 not found" + } + - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** -@pytest.mark.skip(reason="No way to test this feature yet") def test_delete_task(client, one_task): # Act response = client.delete("/tasks/1") @@ -230,7 +227,7 @@ def test_delete_task(client, one_task): query = db.select(Task).where(Task.id == 1) assert db.session.scalar(query) == None -@pytest.mark.skip(reason="No way to test this feature yet") + def test_delete_task_not_found(client): # Act response = client.delete("/tasks/1") @@ -238,16 +235,15 @@ def test_delete_task_not_found(client): # Assert assert response.status_code == 404 + assert response_body == { + "message": "Task 1 not found" + } - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** assert db.session.scalars(db.select(Task)).all() == [] -@pytest.mark.skip(reason="No way to test this feature yet") + def test_create_task_must_contain_title(client): # Act response = client.post("/tasks", json={ @@ -264,7 +260,7 @@ def test_create_task_must_contain_title(client): assert db.session.scalars(db.select(Task)).all() == [] -@pytest.mark.skip(reason="No way to test this feature yet") + def test_create_task_must_contain_description(client): # Act response = client.post("/tasks", json={ diff --git a/tests/test_wave_02.py b/tests/test_wave_02.py index a087e0909..41f3eccde 100644 --- a/tests/test_wave_02.py +++ b/tests/test_wave_02.py @@ -1,7 +1,7 @@ import pytest -@pytest.mark.skip(reason="No way to test this feature yet") + def test_get_tasks_sorted_asc(client, three_tasks): # Act response = client.get("/tasks?sort=asc") @@ -29,7 +29,7 @@ def test_get_tasks_sorted_asc(client, three_tasks): ] -@pytest.mark.skip(reason="No way to test this feature yet") + def test_get_tasks_sorted_desc(client, three_tasks): # Act response = client.get("/tasks?sort=desc") diff --git a/tests/test_wave_03.py b/tests/test_wave_03.py index d7d441695..3e58c5af2 100644 --- a/tests/test_wave_03.py +++ b/tests/test_wave_03.py @@ -6,7 +6,7 @@ import pytest -@pytest.mark.skip(reason="No way to test this feature yet") + def test_mark_complete_on_incomplete_task(client, one_task): # Arrange """ @@ -34,7 +34,7 @@ def test_mark_complete_on_incomplete_task(client, one_task): assert db.session.scalar(query).completed_at -@pytest.mark.skip(reason="No way to test this feature yet") + def test_mark_incomplete_on_complete_task(client, completed_task): # Act response = client.patch("/tasks/1/mark_incomplete") @@ -46,7 +46,8 @@ def test_mark_incomplete_on_complete_task(client, completed_task): assert db.session.scalar(query).completed_at == None -@pytest.mark.skip(reason="No way to test this feature yet") + + def test_mark_complete_on_completed_task(client, completed_task): # Arrange """ @@ -74,7 +75,8 @@ def test_mark_complete_on_completed_task(client, completed_task): query = db.select(Task).where(Task.id == 1) assert db.session.scalar(query).completed_at -@pytest.mark.skip(reason="No way to test this feature yet") + + def test_mark_incomplete_on_incomplete_task(client, one_task): # Act response = client.patch("/tasks/1/mark_incomplete") @@ -86,7 +88,8 @@ def test_mark_incomplete_on_incomplete_task(client, one_task): assert db.session.scalar(query).completed_at == None -@pytest.mark.skip(reason="No way to test this feature yet") + + def test_mark_complete_missing_task(client): # Act response = client.patch("/tasks/1/mark_complete") @@ -94,14 +97,13 @@ def test_mark_complete_missing_task(client): # Assert assert response.status_code == 404 + assert response_body == { + "message": "Task 1 not found" + } + - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** -@pytest.mark.skip(reason="No way to test this feature yet") def test_mark_incomplete_missing_task(client): # Act response = client.patch("/tasks/1/mark_incomplete") @@ -109,8 +111,7 @@ def test_mark_incomplete_missing_task(client): # Assert assert response.status_code == 404 + assert response_body == { + "message": "Task 1 not found" + } - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** diff --git a/tests/test_wave_05.py b/tests/test_wave_05.py index b7cc330ae..8777597f7 100644 --- a/tests/test_wave_05.py +++ b/tests/test_wave_05.py @@ -1,7 +1,8 @@ from app.models.goal import Goal +from app.db import db import pytest -@pytest.mark.skip(reason="No way to test this feature yet") + def test_goal_to_dict(): #Arrange new_goal = Goal(id=1, title="Seize the Day!") @@ -13,7 +14,7 @@ def test_goal_to_dict(): assert goal_dict["id"] == 1 assert goal_dict["title"] == "Seize the Day!" -@pytest.mark.skip(reason="No way to test this feature yet") + def test_goal_to_dict_no_id(): #Arrange new_goal = Goal(title="Seize the Day!") @@ -25,7 +26,7 @@ def test_goal_to_dict_no_id(): assert goal_dict["id"] is None assert goal_dict["title"] == "Seize the Day!" -@pytest.mark.skip(reason="No way to test this feature yet") + def test_goal_to_dict_no_title(): #Arrange new_goal = Goal(id=1) @@ -39,7 +40,7 @@ def test_goal_to_dict_no_title(): -@pytest.mark.skip(reason="No way to test this feature yet") + def test_goal_from_dict(): #Arrange goal_dict = { @@ -52,7 +53,7 @@ def test_goal_from_dict(): #Assert assert goal_obj.title == "Seize the Day!" -@pytest.mark.skip(reason="No way to test this feature yet") + def test_goal_from_dict_no_title(): #Arrange goal_dict = { @@ -63,7 +64,7 @@ def test_goal_from_dict_no_title(): Goal.from_dict(goal_dict) -@pytest.mark.skip(reason="No way to test this feature yet") + def test_get_goals_no_saved_goals(client): # Act response = client.get("/goals") @@ -74,7 +75,7 @@ def test_get_goals_no_saved_goals(client): assert response_body == [] -@pytest.mark.skip(reason="No way to test this feature yet") + def test_get_goals_one_saved_goal(client, one_goal): # Act response = client.get("/goals") @@ -91,7 +92,7 @@ def test_get_goals_one_saved_goal(client, one_goal): ] -@pytest.mark.skip(reason="No way to test this feature yet") + def test_get_goal(client, one_goal): # Act response = client.get("/goals/1") @@ -105,22 +106,18 @@ def test_get_goal(client, one_goal): } -@pytest.mark.skip(reason="test to be completed by student") + def test_get_goal_not_found(client): - pass # Act response = client.get("/goals/1") response_body = response.get_json() - raise Exception("Complete test") # Assert - # ---- Complete Test ---- - # assertion 1 goes here - # assertion 2 goes here - # ---- Complete Test ---- + assert response.status_code == 404 + assert response_body == {"message": "Goal 1 not found"} + -@pytest.mark.skip(reason="No way to test this feature yet") def test_create_goal(client): # Act response = client.post("/goals", json={ @@ -136,34 +133,36 @@ def test_create_goal(client): } -@pytest.mark.skip(reason="test to be completed by student") + def test_update_goal(client, one_goal): - raise Exception("Complete test") # Act - # ---- Complete Act Here ---- + response = client.put("/goals/1", json={ + "title": "Updated Goal Title" + }) # Assert - # ---- Complete Assertions Here ---- - # assertion 1 goes here - # assertion 2 goes here - # assertion 3 goes here - # ---- Complete Assertions Here ---- + assert response.status_code == 204 + + query = db.select(Goal).where(Goal.id == 1) + goal = db.session.scalar(query) + + assert goal.title == "Updated Goal Title" + -@pytest.mark.skip(reason="test to be completed by student") def test_update_goal_not_found(client): - raise Exception("Complete test") # Act - # ---- Complete Act Here ---- + response = client.put("/goals/1", json={ + "title": "Updated Goal Title" + }) + response_body = response.get_json() # Assert - # ---- Complete Assertions Here ---- - # assertion 1 goes here - # assertion 2 goes here - # ---- Complete Assertions Here ---- + assert response.status_code == 404 + assert response_body == {"message": "Goal 1 not found"} + -@pytest.mark.skip(reason="No way to test this feature yet") def test_delete_goal(client, one_goal): # Act response = client.delete("/goals/1") @@ -177,28 +176,21 @@ def test_delete_goal(client, one_goal): response_body = response.get_json() assert "message" in response_body + assert response_body == {"message": "Goal 1 not found"} - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** -@pytest.mark.skip(reason="test to be completed by student") def test_delete_goal_not_found(client): - raise Exception("Complete test") - # Act - # ---- Complete Act Here ---- + response = client.delete("/goals/1") + response_body = response.get_json() # Assert - # ---- Complete Assertions Here ---- - # assertion 1 goes here - # assertion 2 goes here - # ---- Complete Assertions Here ---- + assert response.status_code == 404 + assert response_body == {"message": "Goal 1 not found"} + -@pytest.mark.skip(reason="No way to test this feature yet") def test_create_goal_missing_title(client): # Act response = client.post("/goals", json={}) diff --git a/tests/test_wave_06.py b/tests/test_wave_06.py index 727fce93a..44af74164 100644 --- a/tests/test_wave_06.py +++ b/tests/test_wave_06.py @@ -3,7 +3,7 @@ import pytest -@pytest.mark.skip(reason="No way to test this feature yet") + def test_post_task_ids_to_goal(client, one_goal, three_tasks): # Act response = client.post("/goals/1/tasks", json={ @@ -25,7 +25,7 @@ def test_post_task_ids_to_goal(client, one_goal, three_tasks): assert len(db.session.scalar(query).tasks) == 3 -@pytest.mark.skip(reason="No way to test this feature yet") + def test_post_task_ids_to_goal_overwrites_existing_tasks(client, one_task_belongs_to_one_goal, three_tasks): # Act response = client.post("/goals/1/tasks", json={ @@ -45,7 +45,7 @@ def test_post_task_ids_to_goal_overwrites_existing_tasks(client, one_task_belong assert len(db.session.scalar(query).tasks) == 2 -@pytest.mark.skip(reason="No way to test this feature yet") + def test_get_tasks_for_specific_goal_no_goal(client): # Act response = client.get("/goals/1/tasks") @@ -53,14 +53,13 @@ def test_get_tasks_for_specific_goal_no_goal(client): # Assert assert response.status_code == 404 + assert response_body == { + "message": "Goal 1 not found" + } + - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** -@pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_for_specific_goal_no_tasks(client, one_goal): # Act response = client.get("/goals/1/tasks") @@ -77,7 +76,7 @@ def test_get_tasks_for_specific_goal_no_tasks(client, one_goal): } -@pytest.mark.skip(reason="No way to test this feature yet") + def test_get_tasks_for_specific_goal(client, one_task_belongs_to_one_goal): # Act response = client.get("/goals/1/tasks") @@ -102,7 +101,7 @@ def test_get_tasks_for_specific_goal(client, one_task_belongs_to_one_goal): } -@pytest.mark.skip(reason="No way to test this feature yet") + def test_get_task_includes_goal_id(client, one_task_belongs_to_one_goal): response = client.get("/tasks/1") response_body = response.get_json() diff --git a/tests/test_wave_07.py b/tests/test_wave_07.py index 7e7cef55a..562294830 100644 --- a/tests/test_wave_07.py +++ b/tests/test_wave_07.py @@ -4,7 +4,7 @@ from app.models.task import Task from app.routes.route_utilities import create_model, validate_model -@pytest.mark.skip(reason="No way to test this feature yet") + def test_route_utilities_validate_model_with_task(client, three_tasks): #Act task_1 = validate_model(Task, 1) @@ -24,7 +24,7 @@ def test_route_utilities_validate_model_with_task(client, three_tasks): assert task_3.title == "Pay my outstanding tickets 😭" -@pytest.mark.skip(reason="No way to test this feature yet") + def test_route_utilities_validate_model_with_task_invalid_id(client, three_tasks): #Act & Assert # Calling `validate_model` without being invoked by a route will @@ -35,25 +35,23 @@ def test_route_utilities_validate_model_with_task_invalid_id(client, three_tasks # Test that the correct status code and response message are returned response = e.value.get_response() assert response.status_code == 400 - - raise Exception("Complete test with an assertion about the response body") + assert response.get_json() == {"message": "Task One invalid"} # ***************************************************************************** # ** Complete test with an assertion about the response body **************** # ***************************************************************************** -@pytest.mark.skip(reason="No way to test this feature yet") + def test_route_utilities_validate_model_with_task_missing_id(client, three_tasks): #Act & Assert with pytest.raises(HTTPException) as e: result_task = validate_model(Task, 4) - - raise Exception("Complete test with assertion status code and response body") - # ***************************************************************************** - # **Complete test with assertion about status code response body*************** - # ***************************************************************************** + response = e.value.get_response() + assert response.status_code == 404 + assert response.get_json() == {"message": "Task 4 not found"} + -@pytest.mark.skip(reason="No way to test this feature yet") + def test_route_utilities_validate_model_with_goal(client, one_goal): #Act goal_1 = validate_model(Goal, 1) @@ -62,29 +60,27 @@ def test_route_utilities_validate_model_with_goal(client, one_goal): assert goal_1.id == 1 assert goal_1.title == "Build a habit of going outside daily" -@pytest.mark.skip(reason="No way to test this feature yet") + def test_route_utilities_validate_model_with_goal_invalid_id(client, one_goal): #Act & Assert with pytest.raises(HTTPException) as e: result_task = validate_model(Goal, "One") - - raise Exception("Complete test with assertion status code and response body") - # ***************************************************************************** - # **Complete test with assertion about status code response body*************** - # ***************************************************************************** + response = e.value.get_response() + assert response.status_code == 400 + assert response.get_json() == {"message": "Goal One invalid"} + + -@pytest.mark.skip(reason="No way to test this feature yet") def test_route_utilities_validate_model_with_goal_missing_id(client, one_goal): #Act & Assert with pytest.raises(HTTPException) as e: result_task = validate_model(Goal, 4) - - raise Exception("Complete test with assertion status code and response body") - # ***************************************************************************** - # **Complete test with assertion about status code response body*************** - # ***************************************************************************** + response = e.value.get_response() + assert response.status_code == 404 + assert response.get_json() == {"message": "Goal 4 not found"} + + -@pytest.mark.skip(reason="No way to test this feature yet") def test_route_utilities_create_model_with_task(client): #Arrange request_body = { @@ -103,7 +99,7 @@ def test_route_utilities_create_model_with_task(client): assert response[0]["is_complete"] == False assert response[1] == 201 -@pytest.mark.skip(reason="No way to test this feature yet") + def test_route_utilities_create_model_with_task_missing_title(client): #Arrange request_body = { @@ -120,7 +116,7 @@ def test_route_utilities_create_model_with_task_missing_title(client): assert response.get_json() == {"details": "Invalid data"} -@pytest.mark.skip(reason="No way to test this feature yet") + def test_route_utilities_create_model_with_goal(client): #Arrange request_body = { @@ -135,7 +131,7 @@ def test_route_utilities_create_model_with_goal(client): assert response[0]["title"] == "Seize the Day!" assert response[1] == 201 -@pytest.mark.skip(reason="No way to test this feature yet") + def test_route_utilities_create_model_with_goal_missing_title(client): #Arrange request_body = { @@ -144,8 +140,7 @@ def test_route_utilities_create_model_with_goal_missing_title(client): #Act with pytest.raises(HTTPException) as e: create_model(Goal, request_body) - - raise Exception("Complete test with assertion status code and response body") - # ***************************************************************************** - # **Complete test with assertion about status code response body*************** - # ***************************************************************************** + response = e.value.get_response() + assert response.status_code == 400 + assert response.get_json() == {"details": "Invalid data"} +