Merge pull request #238 from dod-ccpo/request-schema-#159719829
Request schema #159719829
This commit is contained in:
commit
4b25cc19d9
79
alembic/versions/04fe150da553_add_request_revision.py
Normal file
79
alembic/versions/04fe150da553_add_request_revision.py
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
"""add request revision
|
||||||
|
|
||||||
|
Revision ID: 04fe150da553
|
||||||
|
Revises: 2c2a2af465d3
|
||||||
|
Create Date: 2018-08-30 13:28:16.928946
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects.postgresql import ARRAY as Array
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '04fe150da553'
|
||||||
|
down_revision = '2c2a2af465d3'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('request_revisions',
|
||||||
|
sa.Column('time_created', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||||
|
sa.Column('time_updated', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||||
|
sa.Column('id', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||||
|
sa.Column('request_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('am_poc', sa.Boolean(), nullable=True),
|
||||||
|
sa.Column('dodid_poc', sa.String(), nullable=True),
|
||||||
|
sa.Column('email_poc', sa.String(), nullable=True),
|
||||||
|
sa.Column('fname_poc', sa.String(), nullable=True),
|
||||||
|
sa.Column('lname_poc', sa.String(), nullable=True),
|
||||||
|
sa.Column('jedi_usage', sa.String(), nullable=True),
|
||||||
|
sa.Column('start_date', sa.Date(), nullable=True),
|
||||||
|
sa.Column('cloud_native', sa.String(), nullable=True),
|
||||||
|
sa.Column('dollar_value', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('dod_component', sa.String(), nullable=True),
|
||||||
|
sa.Column('data_transfers', sa.String(), nullable=True),
|
||||||
|
sa.Column('expected_completion_date', sa.String(), nullable=True),
|
||||||
|
sa.Column('jedi_migration', sa.String(), nullable=True),
|
||||||
|
sa.Column('num_software_systems', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('number_user_sessions', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('average_daily_traffic', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('engineering_assessment', sa.String(), nullable=True),
|
||||||
|
sa.Column('technical_support_team', sa.String(), nullable=True),
|
||||||
|
sa.Column('estimated_monthly_spend', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('average_daily_traffic_gb', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('rationalization_software_systems', sa.String(), nullable=True),
|
||||||
|
sa.Column('organization_providing_assistance', sa.String(), nullable=True),
|
||||||
|
sa.Column('citizenship', sa.String(), nullable=True),
|
||||||
|
sa.Column('designation', sa.String(), nullable=True),
|
||||||
|
sa.Column('phone_number', sa.String(), nullable=True),
|
||||||
|
sa.Column('email_request', sa.String(), nullable=True),
|
||||||
|
sa.Column('fname_request', sa.String(), nullable=True),
|
||||||
|
sa.Column('lname_request', sa.String(), nullable=True),
|
||||||
|
sa.Column('service_branch', sa.String(), nullable=True),
|
||||||
|
sa.Column('date_latest_training', sa.Date(), nullable=True),
|
||||||
|
sa.Column('pe_id', sa.String(), nullable=True),
|
||||||
|
sa.Column('task_order_number', sa.String(), nullable=True),
|
||||||
|
sa.Column('fname_co', sa.String(), nullable=True),
|
||||||
|
sa.Column('lname_co', sa.String(), nullable=True),
|
||||||
|
sa.Column('email_co', sa.String(), nullable=True),
|
||||||
|
sa.Column('office_co', sa.String(), nullable=True),
|
||||||
|
sa.Column('fname_cor', sa.String(), nullable=True),
|
||||||
|
sa.Column('lname_cor', sa.String(), nullable=True),
|
||||||
|
sa.Column('email_cor', sa.String(), nullable=True),
|
||||||
|
sa.Column('office_cor', sa.String(), nullable=True),
|
||||||
|
sa.Column('uii_ids', Array(sa.String()), nullable=True),
|
||||||
|
sa.Column('treasury_code', sa.String(), nullable=True),
|
||||||
|
sa.Column('ba_code', sa.String(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['request_id'], ['requests.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_table('request_revisions')
|
||||||
|
# ### end Alembic commands ###
|
@ -0,0 +1,32 @@
|
|||||||
|
"""make status event relation to revision non-nullable
|
||||||
|
|
||||||
|
Revision ID: 06aa23166ca9
|
||||||
|
Revises: e66a49285f23
|
||||||
|
Create Date: 2018-09-04 15:03:20.299607
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '06aa23166ca9'
|
||||||
|
down_revision = 'e66a49285f23'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.alter_column('request_status_events', 'request_revision_id',
|
||||||
|
existing_type=postgresql.UUID(),
|
||||||
|
nullable=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.alter_column('request_status_events', 'request_revision_id',
|
||||||
|
existing_type=postgresql.UUID(),
|
||||||
|
nullable=True)
|
||||||
|
# ### end Alembic commands ###
|
@ -0,0 +1,79 @@
|
|||||||
|
"""remove revisions body column
|
||||||
|
|
||||||
|
Revision ID: 090e1bd0d7ce
|
||||||
|
Revises: a903ebe91ad5
|
||||||
|
Create Date: 2018-08-31 12:08:52.376027
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
from atst.models.request import Request
|
||||||
|
from atst.utils import deep_merge
|
||||||
|
from atst.domain.requests import create_revision_from_request_body
|
||||||
|
from atst.domain.task_orders import TaskOrders
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '090e1bd0d7ce'
|
||||||
|
down_revision = 'a903ebe91ad5'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def delete_two_deep(body, key1, key2):
|
||||||
|
result = body.get(key1, {}).get(key2)
|
||||||
|
if result:
|
||||||
|
del(body[key1][key2])
|
||||||
|
|
||||||
|
return body
|
||||||
|
|
||||||
|
|
||||||
|
TASK_ORDER_DATA = TaskOrders.TASK_ORDER_DATA + ["task_order_id", "csrf_token"]
|
||||||
|
|
||||||
|
def create_revision(body):
|
||||||
|
financials = body.get("financial_verification")
|
||||||
|
if financials:
|
||||||
|
for column in TASK_ORDER_DATA:
|
||||||
|
if column in financials:
|
||||||
|
del(financials[column])
|
||||||
|
|
||||||
|
return create_revision_from_request_body(body)
|
||||||
|
|
||||||
|
|
||||||
|
def massaged_revision(body):
|
||||||
|
try:
|
||||||
|
return create_revision(body)
|
||||||
|
except ValueError:
|
||||||
|
# some of the data on staging has out-of-range dates like "02/29/2019";
|
||||||
|
# we don't know how to coerce them to valid dates, so we remove those
|
||||||
|
# fields.
|
||||||
|
body = delete_two_deep(body, "details_of_use", "start_date")
|
||||||
|
body = delete_two_deep(body, "information_about_you", "date_latest_training")
|
||||||
|
|
||||||
|
return create_revision(body)
|
||||||
|
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
Session = sessionmaker(bind=op.get_bind())
|
||||||
|
session = Session()
|
||||||
|
for request in session.query(Request).all():
|
||||||
|
(body,) = session.execute("SELECT body from requests WHERE id='{}'".format(request.id)).fetchone()
|
||||||
|
|
||||||
|
revision = massaged_revision(body)
|
||||||
|
request.revisions.append(revision)
|
||||||
|
|
||||||
|
session.add(revision)
|
||||||
|
session.add(request)
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
op.drop_column('requests', 'body')
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.add_column('requests', sa.Column('body', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True))
|
||||||
|
|
@ -0,0 +1,28 @@
|
|||||||
|
"""add sequence to request revision
|
||||||
|
|
||||||
|
Revision ID: a903ebe91ad5
|
||||||
|
Revises: 04fe150da553
|
||||||
|
Create Date: 2018-08-30 13:45:35.561657
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'a903ebe91ad5'
|
||||||
|
down_revision = '04fe150da553'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
db = op.get_bind()
|
||||||
|
op.add_column('request_revisions', sa.Column('sequence', sa.BigInteger(), nullable=False))
|
||||||
|
db.execute("CREATE SEQUENCE request_revisions_sequence_seq OWNED BY request_revisions.sequence;")
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column('request_revisions', 'sequence')
|
@ -0,0 +1,42 @@
|
|||||||
|
"""add relationship between revision and status event
|
||||||
|
|
||||||
|
Revision ID: e66a49285f23
|
||||||
|
Revises: 090e1bd0d7ce
|
||||||
|
Create Date: 2018-09-04 14:01:31.548665
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
from atst.models.request import Request
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'e66a49285f23'
|
||||||
|
down_revision = '090e1bd0d7ce'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column('request_status_events', sa.Column('request_revision_id', postgresql.UUID(as_uuid=True)))
|
||||||
|
op.create_foreign_key(None, 'request_status_events', 'request_revisions', ['request_revision_id'], ['id'])
|
||||||
|
|
||||||
|
Session = sessionmaker(bind=op.get_bind())
|
||||||
|
session = Session()
|
||||||
|
for request in session.query(Request).all():
|
||||||
|
for status in request.status_events:
|
||||||
|
status.revision = request.latest_revision
|
||||||
|
session.add(status)
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint(None, 'request_status_events', type_='foreignkey')
|
||||||
|
op.drop_column('request_status_events', 'request_revision_id')
|
||||||
|
# ### end Alembic commands ###
|
@ -1,12 +0,0 @@
|
|||||||
import pendulum
|
|
||||||
|
|
||||||
|
|
||||||
def parse_date(data):
|
|
||||||
date_formats = ["YYYY-MM-DD", "MM/DD/YYYY"]
|
|
||||||
for _format in date_formats:
|
|
||||||
try:
|
|
||||||
return pendulum.from_format(data, _format).date()
|
|
||||||
except (ValueError, pendulum.parsing.exceptions.ParserError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
raise ValueError("Unable to parse string {}".format(data))
|
|
@ -2,35 +2,31 @@ from enum import Enum
|
|||||||
from sqlalchemy import exists, and_, exc
|
from sqlalchemy import exists, and_, exc
|
||||||
from sqlalchemy.sql import text
|
from sqlalchemy.sql import text
|
||||||
from sqlalchemy.orm.exc import NoResultFound
|
from sqlalchemy.orm.exc import NoResultFound
|
||||||
from sqlalchemy.orm.attributes import flag_modified
|
|
||||||
from werkzeug.datastructures import FileStorage
|
from werkzeug.datastructures import FileStorage
|
||||||
|
import dateutil
|
||||||
|
|
||||||
from atst.database import db
|
from atst.database import db
|
||||||
from atst.domain.authz import Authorization
|
from atst.domain.authz import Authorization
|
||||||
from atst.domain.task_orders import TaskOrders
|
from atst.domain.task_orders import TaskOrders
|
||||||
from atst.domain.workspaces import Workspaces
|
from atst.domain.workspaces import Workspaces
|
||||||
from atst.models.request import Request
|
from atst.models.request import Request
|
||||||
|
from atst.models.request_revision import RequestRevision
|
||||||
from atst.models.request_status_event import RequestStatusEvent, RequestStatus
|
from atst.models.request_status_event import RequestStatusEvent, RequestStatus
|
||||||
|
from atst.utils import deep_merge
|
||||||
|
|
||||||
from .exceptions import NotFoundError, UnauthorizedError
|
from .exceptions import NotFoundError, UnauthorizedError
|
||||||
|
|
||||||
|
|
||||||
def deep_merge(source, destination: dict):
|
def create_revision_from_request_body(body):
|
||||||
"""
|
body = {k: v for p in body.values() for k, v in p.items()}
|
||||||
Merge source dict into destination dict recursively.
|
DATES = ["start_date", "date_latest_training"]
|
||||||
"""
|
coerced_timestamps = {
|
||||||
|
k: dateutil.parser.parse(v)
|
||||||
def _deep_merge(a, b):
|
for k, v in body.items()
|
||||||
for key, value in a.items():
|
if k in DATES and isinstance(v, str)
|
||||||
if isinstance(value, dict):
|
}
|
||||||
node = b.setdefault(key, {})
|
body = {**body, **coerced_timestamps}
|
||||||
_deep_merge(value, node)
|
return RequestRevision(**body)
|
||||||
else:
|
|
||||||
b[key] = value
|
|
||||||
|
|
||||||
return b
|
|
||||||
|
|
||||||
return _deep_merge(source, dict(destination))
|
|
||||||
|
|
||||||
|
|
||||||
class Requests(object):
|
class Requests(object):
|
||||||
@ -39,7 +35,8 @@ class Requests(object):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, creator, body):
|
def create(cls, creator, body):
|
||||||
request = Request(creator=creator, body=body)
|
revision = create_revision_from_request_body(body)
|
||||||
|
request = Request(creator=creator, revisions=[revision])
|
||||||
request = Requests.set_status(request, RequestStatus.STARTED)
|
request = Requests.set_status(request, RequestStatus.STARTED)
|
||||||
|
|
||||||
db.session.add(request)
|
db.session.add(request)
|
||||||
@ -105,7 +102,10 @@ class Requests(object):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def update(cls, request_id, request_delta):
|
def update(cls, request_id, request_delta):
|
||||||
request = Requests._get_with_lock(request_id)
|
request = Requests._get_with_lock(request_id)
|
||||||
request = Requests._merge_body(request, request_delta)
|
|
||||||
|
new_body = deep_merge(request_delta, request.body)
|
||||||
|
revision = create_revision_from_request_body(new_body)
|
||||||
|
request.revisions.append(revision)
|
||||||
|
|
||||||
db.session.add(request)
|
db.session.add(request)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
@ -127,16 +127,6 @@ class Requests(object):
|
|||||||
except NoResultFound:
|
except NoResultFound:
|
||||||
raise NotFoundError()
|
raise NotFoundError()
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _merge_body(cls, request, request_delta):
|
|
||||||
request.body = deep_merge(request_delta, request.body)
|
|
||||||
|
|
||||||
# Without this, sqlalchemy won't notice the change to request.body,
|
|
||||||
# since it doesn't track dictionary mutations by default.
|
|
||||||
flag_modified(request, "body")
|
|
||||||
|
|
||||||
return request
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def approve_and_create_workspace(cls, request):
|
def approve_and_create_workspace(cls, request):
|
||||||
approved_request = Requests.set_status(request, RequestStatus.APPROVED)
|
approved_request = Requests.set_status(request, RequestStatus.APPROVED)
|
||||||
@ -149,7 +139,9 @@ class Requests(object):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def set_status(cls, request: Request, status: RequestStatus):
|
def set_status(cls, request: Request, status: RequestStatus):
|
||||||
status_event = RequestStatusEvent(new_status=status)
|
status_event = RequestStatusEvent(
|
||||||
|
new_status=status, revision=request.latest_revision
|
||||||
|
)
|
||||||
request.status_events.append(status_event)
|
request.status_events.append(status_event)
|
||||||
return request
|
return request
|
||||||
|
|
||||||
@ -256,12 +248,7 @@ WHERE requests_with_status.status = :status
|
|||||||
if task_order:
|
if task_order:
|
||||||
request.task_order = task_order
|
request.task_order = task_order
|
||||||
|
|
||||||
request = Requests._merge_body(
|
request = Requests.update(request.id, {"financial_verification": request_data})
|
||||||
request, {"financial_verification": request_data}
|
|
||||||
)
|
|
||||||
|
|
||||||
db.session.add(request)
|
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
return request
|
return request
|
||||||
|
|
||||||
|
@ -1,23 +1,6 @@
|
|||||||
from wtforms.fields.html5 import DateField
|
|
||||||
from wtforms.fields import Field, SelectField as SelectField_
|
from wtforms.fields import Field, SelectField as SelectField_
|
||||||
from wtforms.widgets import TextArea
|
from wtforms.widgets import TextArea
|
||||||
|
|
||||||
from atst.domain.date import parse_date
|
|
||||||
|
|
||||||
|
|
||||||
class DateField(DateField):
|
|
||||||
def _value(self):
|
|
||||||
if self.data:
|
|
||||||
return parse_date(self.data)
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def process_formdata(self, values):
|
|
||||||
if values:
|
|
||||||
self.data = values[0]
|
|
||||||
else:
|
|
||||||
self.data = []
|
|
||||||
|
|
||||||
|
|
||||||
class NewlineListField(Field):
|
class NewlineListField(Field):
|
||||||
widget = TextArea()
|
widget = TextArea()
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
from wtforms.fields.html5 import EmailField, TelField
|
from wtforms.fields.html5 import DateField, EmailField, TelField
|
||||||
from wtforms.fields import RadioField, StringField
|
from wtforms.fields import RadioField, StringField
|
||||||
from wtforms.validators import Required, Email
|
from wtforms.validators import Required, Email
|
||||||
import pendulum
|
import pendulum
|
||||||
|
|
||||||
from .fields import DateField, SelectField
|
from .fields import SelectField
|
||||||
from .forms import ValidatedForm
|
from .forms import ValidatedForm
|
||||||
from .validators import DateRange, PhoneNumber, Alphabet
|
from .validators import DateRange, PhoneNumber, Alphabet
|
||||||
from .data import SERVICE_BRANCHES
|
from .data import SERVICE_BRANCHES
|
||||||
@ -60,4 +60,5 @@ class OrgForm(ValidatedForm):
|
|||||||
message="Must be a date within the last year.",
|
message="Must be a date within the last year.",
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
|
format="%m/%d/%Y",
|
||||||
)
|
)
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
from wtforms.fields.html5 import IntegerField
|
from wtforms.fields.html5 import DateField, IntegerField
|
||||||
from wtforms.fields import RadioField, TextAreaField
|
from wtforms.fields import RadioField, TextAreaField
|
||||||
from wtforms.validators import Optional, Required
|
from wtforms.validators import Optional, Required
|
||||||
|
|
||||||
from .fields import DateField, SelectField
|
from .fields import SelectField
|
||||||
from .forms import ValidatedForm
|
from .forms import ValidatedForm
|
||||||
from .data import (
|
from .data import (
|
||||||
SERVICE_BRANCHES,
|
SERVICE_BRANCHES,
|
||||||
@ -135,4 +135,5 @@ class RequestForm(ValidatedForm):
|
|||||||
start_date = DateField(
|
start_date = DateField(
|
||||||
description="When do you expect to start using the JEDI Cloud (not for billing purposes)?",
|
description="When do you expect to start using the JEDI Cloud (not for billing purposes)?",
|
||||||
validators=[Required()],
|
validators=[Required()],
|
||||||
|
format="%m/%d/%Y",
|
||||||
)
|
)
|
||||||
|
@ -2,13 +2,11 @@ import re
|
|||||||
from wtforms.validators import ValidationError
|
from wtforms.validators import ValidationError
|
||||||
import pendulum
|
import pendulum
|
||||||
|
|
||||||
from atst.domain.date import parse_date
|
|
||||||
|
|
||||||
|
|
||||||
def DateRange(lower_bound=None, upper_bound=None, message=None):
|
def DateRange(lower_bound=None, upper_bound=None, message=None):
|
||||||
def _date_range(form, field):
|
def _date_range(form, field):
|
||||||
now = pendulum.now().date()
|
now = pendulum.now().date()
|
||||||
date = parse_date(field.data)
|
date = field.data
|
||||||
|
|
||||||
if lower_bound is not None:
|
if lower_bound is not None:
|
||||||
if (now - lower_bound) > date:
|
if (now - lower_bound) > date:
|
||||||
|
@ -14,3 +14,4 @@ from .workspace import Workspace
|
|||||||
from .project import Project
|
from .project import Project
|
||||||
from .environment import Environment
|
from .environment import Environment
|
||||||
from .attachment import Attachment
|
from .attachment import Attachment
|
||||||
|
from .request_revision import RequestRevision
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
from sqlalchemy import Column, func, ForeignKey
|
from sqlalchemy import Column, func, ForeignKey
|
||||||
from sqlalchemy.types import DateTime
|
from sqlalchemy.types import DateTime
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
|
||||||
from sqlalchemy.orm import relationship
|
from sqlalchemy.orm import relationship
|
||||||
import pendulum
|
import pendulum
|
||||||
|
|
||||||
@ -8,6 +7,23 @@ from atst.models import Base
|
|||||||
from atst.models.types import Id
|
from atst.models.types import Id
|
||||||
from atst.models.request_status_event import RequestStatus
|
from atst.models.request_status_event import RequestStatus
|
||||||
from atst.utils import first_or_none
|
from atst.utils import first_or_none
|
||||||
|
from atst.models.request_revision import RequestRevision
|
||||||
|
|
||||||
|
|
||||||
|
def map_properties_to_dict(properties, instance):
|
||||||
|
return {
|
||||||
|
field: getattr(instance, field)
|
||||||
|
for field in properties
|
||||||
|
if getattr(instance, field) is not None
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def update_dict_with_properties(instance, body, top_level_key, properties):
|
||||||
|
new_properties = map_properties_to_dict(properties, instance)
|
||||||
|
if new_properties:
|
||||||
|
body[top_level_key] = new_properties
|
||||||
|
|
||||||
|
return body
|
||||||
|
|
||||||
|
|
||||||
class Request(Base):
|
class Request(Base):
|
||||||
@ -15,7 +31,6 @@ class Request(Base):
|
|||||||
|
|
||||||
id = Id()
|
id = Id()
|
||||||
time_created = Column(DateTime(timezone=True), server_default=func.now())
|
time_created = Column(DateTime(timezone=True), server_default=func.now())
|
||||||
body = Column(JSONB)
|
|
||||||
status_events = relationship(
|
status_events = relationship(
|
||||||
"RequestStatusEvent", backref="request", order_by="RequestStatusEvent.sequence"
|
"RequestStatusEvent", backref="request", order_by="RequestStatusEvent.sequence"
|
||||||
)
|
)
|
||||||
@ -28,6 +43,78 @@ class Request(Base):
|
|||||||
task_order_id = Column(ForeignKey("task_order.id"))
|
task_order_id = Column(ForeignKey("task_order.id"))
|
||||||
task_order = relationship("TaskOrder")
|
task_order = relationship("TaskOrder")
|
||||||
|
|
||||||
|
revisions = relationship(
|
||||||
|
"RequestRevision", back_populates="request", order_by="RequestRevision.sequence"
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latest_revision(self):
|
||||||
|
if self.revisions:
|
||||||
|
return self.revisions[-1]
|
||||||
|
|
||||||
|
else:
|
||||||
|
return RequestRevision(request=self)
|
||||||
|
|
||||||
|
PRIMARY_POC_FIELDS = ["am_poc", "dodid_poc", "email_poc", "fname_poc", "lname_poc"]
|
||||||
|
DETAILS_OF_USE_FIELDS = [
|
||||||
|
"jedi_usage",
|
||||||
|
"start_date",
|
||||||
|
"cloud_native",
|
||||||
|
"dollar_value",
|
||||||
|
"dod_component",
|
||||||
|
"data_transfers",
|
||||||
|
"expected_completion_date",
|
||||||
|
"jedi_migration",
|
||||||
|
"num_software_systems",
|
||||||
|
"number_user_sessions",
|
||||||
|
"average_daily_traffic",
|
||||||
|
"engineering_assessment",
|
||||||
|
"technical_support_team",
|
||||||
|
"estimated_monthly_spend",
|
||||||
|
"average_daily_traffic_gb",
|
||||||
|
"rationalization_software_systems",
|
||||||
|
"organization_providing_assistance",
|
||||||
|
]
|
||||||
|
INFORMATION_ABOUT_YOU_FIELDS = [
|
||||||
|
"citizenship",
|
||||||
|
"designation",
|
||||||
|
"phone_number",
|
||||||
|
"email_request",
|
||||||
|
"fname_request",
|
||||||
|
"lname_request",
|
||||||
|
"service_branch",
|
||||||
|
"date_latest_training",
|
||||||
|
]
|
||||||
|
FINANCIAL_VERIFICATION_FIELDS = [
|
||||||
|
"pe_id",
|
||||||
|
"task_order_number",
|
||||||
|
"fname_co",
|
||||||
|
"lname_co",
|
||||||
|
"email_co",
|
||||||
|
"office_co",
|
||||||
|
"fname_cor",
|
||||||
|
"lname_cor",
|
||||||
|
"email_cor",
|
||||||
|
"office_cor",
|
||||||
|
"uii_ids",
|
||||||
|
"treasury_code",
|
||||||
|
"ba_code",
|
||||||
|
]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def body(self):
|
||||||
|
current = self.latest_revision
|
||||||
|
body = {}
|
||||||
|
for top_level_key, properties in [
|
||||||
|
("primary_poc", Request.PRIMARY_POC_FIELDS),
|
||||||
|
("details_of_use", Request.DETAILS_OF_USE_FIELDS),
|
||||||
|
("information_about_you", Request.INFORMATION_ABOUT_YOU_FIELDS),
|
||||||
|
("financial_verification", Request.FINANCIAL_VERIFICATION_FIELDS),
|
||||||
|
]:
|
||||||
|
body = update_dict_with_properties(current, body, top_level_key, properties)
|
||||||
|
|
||||||
|
return body
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def status(self):
|
def status(self):
|
||||||
return self.status_events[-1].new_status
|
return self.status_events[-1].new_status
|
||||||
@ -38,7 +125,7 @@ class Request(Base):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def annual_spend(self):
|
def annual_spend(self):
|
||||||
monthly = self.body.get("details_of_use", {}).get("estimated_monthly_spend", 0)
|
monthly = self.latest_revision.estimated_monthly_spend or 0
|
||||||
return monthly * 12
|
return monthly * 12
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
78
atst/models/request_revision.py
Normal file
78
atst/models/request_revision.py
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
from sqlalchemy import (
|
||||||
|
Column,
|
||||||
|
ForeignKey,
|
||||||
|
String,
|
||||||
|
Boolean,
|
||||||
|
Integer,
|
||||||
|
Date,
|
||||||
|
BigInteger,
|
||||||
|
Sequence,
|
||||||
|
)
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
from sqlalchemy.dialects.postgresql import ARRAY
|
||||||
|
|
||||||
|
from atst.models import Base
|
||||||
|
from atst.models.mixins import TimestampsMixin
|
||||||
|
from atst.models.types import Id
|
||||||
|
|
||||||
|
|
||||||
|
class RequestRevision(Base, TimestampsMixin):
|
||||||
|
__tablename__ = "request_revisions"
|
||||||
|
|
||||||
|
id = Id()
|
||||||
|
request_id = Column(ForeignKey("requests.id"), nullable=False)
|
||||||
|
request = relationship("Request", back_populates="revisions")
|
||||||
|
sequence = Column(
|
||||||
|
BigInteger, Sequence("request_revisions_sequence_seq"), nullable=False
|
||||||
|
)
|
||||||
|
|
||||||
|
# primary_poc
|
||||||
|
am_poc = Column(Boolean, default=False)
|
||||||
|
dodid_poc = Column(String)
|
||||||
|
email_poc = Column(String)
|
||||||
|
fname_poc = Column(String)
|
||||||
|
lname_poc = Column(String)
|
||||||
|
|
||||||
|
# details_of_use
|
||||||
|
jedi_usage = Column(String)
|
||||||
|
start_date = Column(Date())
|
||||||
|
cloud_native = Column(String)
|
||||||
|
dollar_value = Column(Integer)
|
||||||
|
dod_component = Column(String)
|
||||||
|
data_transfers = Column(String)
|
||||||
|
expected_completion_date = Column(String)
|
||||||
|
jedi_migration = Column(String)
|
||||||
|
num_software_systems = Column(Integer)
|
||||||
|
number_user_sessions = Column(Integer)
|
||||||
|
average_daily_traffic = Column(Integer)
|
||||||
|
engineering_assessment = Column(String)
|
||||||
|
technical_support_team = Column(String)
|
||||||
|
estimated_monthly_spend = Column(Integer)
|
||||||
|
average_daily_traffic_gb = Column(Integer)
|
||||||
|
rationalization_software_systems = Column(String)
|
||||||
|
organization_providing_assistance = Column(String)
|
||||||
|
|
||||||
|
# information_about_you
|
||||||
|
citizenship = Column(String)
|
||||||
|
designation = Column(String)
|
||||||
|
phone_number = Column(String)
|
||||||
|
email_request = Column(String)
|
||||||
|
fname_request = Column(String)
|
||||||
|
lname_request = Column(String)
|
||||||
|
service_branch = Column(String)
|
||||||
|
date_latest_training = Column(Date())
|
||||||
|
|
||||||
|
# financial_verification
|
||||||
|
pe_id = Column(String)
|
||||||
|
task_order_number = Column(String)
|
||||||
|
fname_co = Column(String)
|
||||||
|
lname_co = Column(String)
|
||||||
|
email_co = Column(String)
|
||||||
|
office_co = Column(String)
|
||||||
|
fname_cor = Column(String)
|
||||||
|
lname_cor = Column(String)
|
||||||
|
email_cor = Column(String)
|
||||||
|
office_cor = Column(String)
|
||||||
|
uii_ids = Column(ARRAY(String))
|
||||||
|
treasury_code = Column(String)
|
||||||
|
ba_code = Column(String)
|
@ -1,5 +1,6 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from sqlalchemy import Column, func, ForeignKey, Enum as SQLAEnum
|
from sqlalchemy import Column, func, ForeignKey, Enum as SQLAEnum
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
from sqlalchemy.types import DateTime, BigInteger
|
from sqlalchemy.types import DateTime, BigInteger
|
||||||
from sqlalchemy.schema import Sequence
|
from sqlalchemy.schema import Sequence
|
||||||
from sqlalchemy.dialects.postgresql import UUID
|
from sqlalchemy.dialects.postgresql import UUID
|
||||||
@ -31,6 +32,8 @@ class RequestStatusEvent(Base):
|
|||||||
sequence = Column(
|
sequence = Column(
|
||||||
BigInteger, Sequence("request_status_events_sequence_seq"), nullable=False
|
BigInteger, Sequence("request_status_events_sequence_seq"), nullable=False
|
||||||
)
|
)
|
||||||
|
request_revision_id = Column(ForeignKey("request_revisions.id"), nullable=False)
|
||||||
|
revision = relationship("RequestRevision")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def displayname(self):
|
def displayname(self):
|
||||||
|
@ -1,2 +1,20 @@
|
|||||||
def first_or_none(predicate, lst):
|
def first_or_none(predicate, lst):
|
||||||
return next((x for x in lst if predicate(x)), None)
|
return next((x for x in lst if predicate(x)), None)
|
||||||
|
|
||||||
|
|
||||||
|
def deep_merge(source, destination: dict):
|
||||||
|
"""
|
||||||
|
Merge source dict into destination dict recursively.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _deep_merge(a, b):
|
||||||
|
for key, value in a.items():
|
||||||
|
if isinstance(value, dict):
|
||||||
|
node = b.setdefault(key, {})
|
||||||
|
_deep_merge(value, node)
|
||||||
|
else:
|
||||||
|
b[key] = value
|
||||||
|
|
||||||
|
return b
|
||||||
|
|
||||||
|
return _deep_merge(source, dict(destination))
|
||||||
|
10
templates/components/date_input.html
Normal file
10
templates/components/date_input.html
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
{% from "components/text_input.html" import TextInput %}
|
||||||
|
|
||||||
|
{% macro DateInput(field, tooltip='', placeholder='', validation='anything', paragraph=False) -%}
|
||||||
|
{% if field.data %}
|
||||||
|
{% set input_value=field.data.strftime("%m/%d/%Y") %}
|
||||||
|
{% else %}
|
||||||
|
{% set input_value=None %}
|
||||||
|
{% endif %}
|
||||||
|
{{ TextInput(field, initial_value=input_value, tooltip=tooltip, placeholder=placeholder, validation=validation, paragraph=paragraph) }}
|
||||||
|
{% endmacro %}
|
@ -1,12 +1,12 @@
|
|||||||
{% from "components/icon.html" import Icon %}
|
{% from "components/icon.html" import Icon %}
|
||||||
{% from "components/tooltip.html" import Tooltip %}
|
{% from "components/tooltip.html" import Tooltip %}
|
||||||
|
|
||||||
{% macro TextInput(field, tooltip='', placeholder='', validation='anything', paragraph=False) -%}
|
{% macro TextInput(field, tooltip='', placeholder='', validation='anything', paragraph=False, initial_value='') -%}
|
||||||
<textinput
|
<textinput
|
||||||
name='{{ field.name }}'
|
name='{{ field.name }}'
|
||||||
validation='{{ validation }}'
|
validation='{{ validation }}'
|
||||||
{% if paragraph %}paragraph='true'{% endif %}
|
{% if paragraph %}paragraph='true'{% endif %}
|
||||||
{% if field.data %}initial-value='{{ field.data }}'{% endif %}
|
{% if initial_value or field.data %}initial-value='{{ initial_value or field.data }}'{% endif %}
|
||||||
{% if field.errors %}v-bind:initial-errors='{{ field.errors }}'{% endif %}
|
{% if field.errors %}v-bind:initial-errors='{{ field.errors }}'{% endif %}
|
||||||
key='{{ field.name }}'
|
key='{{ field.name }}'
|
||||||
inline-template>
|
inline-template>
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
{% from "components/alert.html" import Alert %}
|
{% from "components/alert.html" import Alert %}
|
||||||
{% from "components/text_input.html" import TextInput %}
|
{% from "components/text_input.html" import TextInput %}
|
||||||
{% from "components/options_input.html" import OptionsInput %}
|
{% from "components/options_input.html" import OptionsInput %}
|
||||||
|
{% from "components/date_input.html" import DateInput %}
|
||||||
|
|
||||||
{% block subtitle %}
|
{% block subtitle %}
|
||||||
<h2>Details of Use</h2>
|
<h2>Details of Use</h2>
|
||||||
@ -76,7 +77,7 @@
|
|||||||
</transition>
|
</transition>
|
||||||
|
|
||||||
{{ TextInput(f.dollar_value, validation='dollars', placeholder='$0') }}
|
{{ TextInput(f.dollar_value, validation='dollars', placeholder='$0') }}
|
||||||
{{ TextInput(f.start_date, placeholder='MM / DD / YYYY', validation='date') }}
|
{{ DateInput(f.start_date, placeholder='MM / DD / YYYY', validation='date') }}
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
</details-of-use>
|
</details-of-use>
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
{% from "components/alert.html" import Alert %}
|
{% from "components/alert.html" import Alert %}
|
||||||
{% from "components/text_input.html" import TextInput %}
|
{% from "components/text_input.html" import TextInput %}
|
||||||
{% from "components/options_input.html" import OptionsInput %}
|
{% from "components/options_input.html" import OptionsInput %}
|
||||||
|
{% from "components/date_input.html" import DateInput %}
|
||||||
|
|
||||||
{% block subtitle %}
|
{% block subtitle %}
|
||||||
<h2>Information About You</h2>
|
<h2>Information About You</h2>
|
||||||
@ -29,5 +30,5 @@
|
|||||||
{{ OptionsInput(f.service_branch) }}
|
{{ OptionsInput(f.service_branch) }}
|
||||||
{{ OptionsInput(f.citizenship) }}
|
{{ OptionsInput(f.citizenship) }}
|
||||||
{{ OptionsInput(f.designation) }}
|
{{ OptionsInput(f.designation) }}
|
||||||
{{ TextInput(f.date_latest_training,tooltip="When was the last time you completed the IA training? <br> Information Assurance (IA) training is an important step in cyber awareness.",placeholder="MM / DD / YYYY", validation="date") }}
|
{{ DateInput(f.date_latest_training,tooltip="When was the last time you completed the IA training? <br> Information Assurance (IA) training is an important step in cyber awareness.",placeholder="MM / DD / YYYY", validation="date") }}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
@ -1,20 +0,0 @@
|
|||||||
import pytest
|
|
||||||
import pendulum
|
|
||||||
|
|
||||||
from atst.domain.date import parse_date
|
|
||||||
|
|
||||||
|
|
||||||
def test_date_with_slashes():
|
|
||||||
date_str = "1/2/2020"
|
|
||||||
assert parse_date(date_str) == pendulum.date(2020, 1, 2)
|
|
||||||
|
|
||||||
|
|
||||||
def test_date_with_dashes():
|
|
||||||
date_str = "2020-1-2"
|
|
||||||
assert parse_date(date_str) == pendulum.date(2020, 1, 2)
|
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_date():
|
|
||||||
date_str = "This is not a valid data"
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
parse_date(date_str)
|
|
@ -12,6 +12,7 @@ from tests.factories import (
|
|||||||
UserFactory,
|
UserFactory,
|
||||||
RequestStatusEventFactory,
|
RequestStatusEventFactory,
|
||||||
TaskOrderFactory,
|
TaskOrderFactory,
|
||||||
|
RequestRevisionFactory,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -20,10 +21,11 @@ def new_request(session):
|
|||||||
return RequestFactory.create()
|
return RequestFactory.create()
|
||||||
|
|
||||||
|
|
||||||
def test_can_get_request(new_request):
|
def test_can_get_request():
|
||||||
request = Requests.get(new_request.creator, new_request.id)
|
factory_req = RequestFactory.create()
|
||||||
|
request = Requests.get(factory_req.creator, factory_req.id)
|
||||||
|
|
||||||
assert request.id == new_request.id
|
assert request.id == factory_req.id
|
||||||
|
|
||||||
|
|
||||||
def test_nonexistent_request_raises():
|
def test_nonexistent_request_raises():
|
||||||
@ -37,36 +39,41 @@ def test_new_request_has_started_status():
|
|||||||
assert request.status == RequestStatus.STARTED
|
assert request.status == RequestStatus.STARTED
|
||||||
|
|
||||||
|
|
||||||
def test_auto_approve_less_than_1m(new_request):
|
def test_auto_approve_less_than_1m():
|
||||||
new_request.body = {"details_of_use": {"dollar_value": 999999}}
|
new_request = RequestFactory.create(initial_revision={"dollar_value": 999999})
|
||||||
request = Requests.submit(new_request)
|
request = Requests.submit(new_request)
|
||||||
|
|
||||||
assert request.status == RequestStatus.PENDING_FINANCIAL_VERIFICATION
|
assert request.status == RequestStatus.PENDING_FINANCIAL_VERIFICATION
|
||||||
|
|
||||||
|
|
||||||
def test_dont_auto_approve_if_dollar_value_is_1m_or_above(new_request):
|
def test_dont_auto_approve_if_dollar_value_is_1m_or_above():
|
||||||
new_request.body = {"details_of_use": {"dollar_value": 1000000}}
|
new_request = RequestFactory.create(initial_revision={"dollar_value": 1000000})
|
||||||
request = Requests.submit(new_request)
|
request = Requests.submit(new_request)
|
||||||
|
|
||||||
assert request.status == RequestStatus.PENDING_CCPO_APPROVAL
|
assert request.status == RequestStatus.PENDING_CCPO_APPROVAL
|
||||||
|
|
||||||
|
|
||||||
def test_dont_auto_approve_if_no_dollar_value_specified(new_request):
|
def test_dont_auto_approve_if_no_dollar_value_specified():
|
||||||
new_request.body = {"details_of_use": {}}
|
new_request = RequestFactory.create(initial_revision={})
|
||||||
request = Requests.submit(new_request)
|
request = Requests.submit(new_request)
|
||||||
|
|
||||||
assert request.status == RequestStatus.PENDING_CCPO_APPROVAL
|
assert request.status == RequestStatus.PENDING_CCPO_APPROVAL
|
||||||
|
|
||||||
|
|
||||||
def test_should_allow_submission(new_request):
|
def test_should_allow_submission():
|
||||||
|
new_request = RequestFactory.create()
|
||||||
|
|
||||||
assert Requests.should_allow_submission(new_request)
|
assert Requests.should_allow_submission(new_request)
|
||||||
|
|
||||||
RequestStatusEventFactory.create(
|
RequestStatusEventFactory.create(
|
||||||
request=new_request, new_status=RequestStatus.CHANGES_REQUESTED
|
request=new_request,
|
||||||
|
new_status=RequestStatus.CHANGES_REQUESTED,
|
||||||
|
revision=new_request.latest_revision,
|
||||||
)
|
)
|
||||||
assert Requests.should_allow_submission(new_request)
|
assert Requests.should_allow_submission(new_request)
|
||||||
|
|
||||||
del new_request.body["details_of_use"]
|
# new, blank revision
|
||||||
|
RequestRevisionFactory.create(request=new_request)
|
||||||
assert not Requests.should_allow_submission(new_request)
|
assert not Requests.should_allow_submission(new_request)
|
||||||
|
|
||||||
|
|
||||||
@ -96,6 +103,7 @@ def test_status_count(session):
|
|||||||
RequestStatusEventFactory.create(
|
RequestStatusEventFactory.create(
|
||||||
sequence=2,
|
sequence=2,
|
||||||
request_id=request2.id,
|
request_id=request2.id,
|
||||||
|
revision=request2.latest_revision,
|
||||||
new_status=RequestStatus.PENDING_FINANCIAL_VERIFICATION,
|
new_status=RequestStatus.PENDING_FINANCIAL_VERIFICATION,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -164,3 +172,9 @@ def test_update_financial_verification_with_invalid_task_order():
|
|||||||
request.body["financial_verification"]["task_order_number"]
|
request.body["financial_verification"]["task_order_number"]
|
||||||
== request_financial_data["task_order_number"]
|
== request_financial_data["task_order_number"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_set_status_sets_revision():
|
||||||
|
request = RequestFactory.create()
|
||||||
|
Requests.set_status(request, RequestStatus.APPROVED)
|
||||||
|
assert request.latest_revision == request.status_events[-1].revision
|
||||||
|
@ -2,9 +2,11 @@ import random
|
|||||||
import string
|
import string
|
||||||
import factory
|
import factory
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
import datetime
|
||||||
|
|
||||||
from atst.forms.data import SERVICE_BRANCHES
|
from atst.forms.data import SERVICE_BRANCHES
|
||||||
from atst.models.request import Request
|
from atst.models.request import Request
|
||||||
|
from atst.models.request_revision import RequestRevision
|
||||||
from atst.models.request_status_event import RequestStatusEvent, RequestStatus
|
from atst.models.request_status_event import RequestStatusEvent, RequestStatus
|
||||||
from atst.models.pe_number import PENumber
|
from atst.models.pe_number import PENumber
|
||||||
from atst.models.task_order import TaskOrder
|
from atst.models.task_order import TaskOrder
|
||||||
@ -46,57 +48,83 @@ class RequestStatusEventFactory(factory.alchemy.SQLAlchemyModelFactory):
|
|||||||
sequence = 1
|
sequence = 1
|
||||||
|
|
||||||
|
|
||||||
|
class RequestRevisionFactory(factory.alchemy.SQLAlchemyModelFactory):
|
||||||
|
class Meta:
|
||||||
|
model = RequestRevision
|
||||||
|
|
||||||
|
id = factory.Sequence(lambda x: uuid4())
|
||||||
|
|
||||||
|
|
||||||
class RequestFactory(factory.alchemy.SQLAlchemyModelFactory):
|
class RequestFactory(factory.alchemy.SQLAlchemyModelFactory):
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Request
|
model = Request
|
||||||
|
|
||||||
id = factory.Sequence(lambda x: uuid4())
|
id = factory.Sequence(lambda x: uuid4())
|
||||||
status_events = factory.RelatedFactory(
|
|
||||||
RequestStatusEventFactory, "request", new_status=RequestStatus.STARTED
|
|
||||||
)
|
|
||||||
creator = factory.SubFactory(UserFactory)
|
creator = factory.SubFactory(UserFactory)
|
||||||
body = factory.LazyAttribute(lambda r: RequestFactory.build_request_body(r.creator))
|
revisions = factory.LazyAttribute(
|
||||||
|
lambda r: [RequestFactory.create_initial_revision(r)]
|
||||||
|
)
|
||||||
|
status_events = factory.RelatedFactory(
|
||||||
|
RequestStatusEventFactory,
|
||||||
|
"request",
|
||||||
|
new_status=RequestStatus.STARTED,
|
||||||
|
revision=factory.LazyAttribute(lambda se: se.factory_parent.revisions[-1]),
|
||||||
|
)
|
||||||
|
|
||||||
|
class Params:
|
||||||
|
initial_revision = None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def build_request_body(cls, user, dollar_value=1000000):
|
def create_initial_status_event(cls, request):
|
||||||
return {
|
return RequestStatusEventFactory(
|
||||||
"primary_poc": {
|
request=request,
|
||||||
"am_poc": False,
|
new_status=RequestStatus.STARTED,
|
||||||
"dodid_poc": user.dod_id,
|
revision=request.revisions,
|
||||||
"email_poc": user.email,
|
)
|
||||||
"fname_poc": user.first_name,
|
|
||||||
"lname_poc": user.last_name,
|
@classmethod
|
||||||
},
|
def create_initial_revision(cls, request, dollar_value=1000000):
|
||||||
"details_of_use": {
|
user = request.creator
|
||||||
"jedi_usage": "adf",
|
default_data = dict(
|
||||||
"start_date": "2018-08-08",
|
am_poc=False,
|
||||||
"cloud_native": "yes",
|
dodid_poc=user.dod_id,
|
||||||
"dollar_value": dollar_value,
|
email_poc=user.email,
|
||||||
"dod_component": SERVICE_BRANCHES[2][1],
|
fname_poc=user.first_name,
|
||||||
"data_transfers": "Less than 100GB",
|
lname_poc=user.last_name,
|
||||||
"expected_completion_date": "Less than 1 month",
|
jedi_usage="adf",
|
||||||
"jedi_migration": "yes",
|
start_date=datetime.date(2018, 8, 8),
|
||||||
"num_software_systems": 1,
|
cloud_native="yes",
|
||||||
"number_user_sessions": 2,
|
dollar_value=dollar_value,
|
||||||
"average_daily_traffic": 1,
|
dod_component=SERVICE_BRANCHES[2][1],
|
||||||
"engineering_assessment": "yes",
|
data_transfers="Less than 100GB",
|
||||||
"technical_support_team": "yes",
|
expected_completion_date="Less than 1 month",
|
||||||
"estimated_monthly_spend": 100,
|
jedi_migration="yes",
|
||||||
"average_daily_traffic_gb": 4,
|
num_software_systems=1,
|
||||||
"rationalization_software_systems": "yes",
|
number_user_sessions=2,
|
||||||
"organization_providing_assistance": "In-house staff",
|
average_daily_traffic=1,
|
||||||
},
|
engineering_assessment="yes",
|
||||||
"information_about_you": {
|
technical_support_team="yes",
|
||||||
"citizenship": "United States",
|
estimated_monthly_spend=100,
|
||||||
"designation": "military",
|
average_daily_traffic_gb=4,
|
||||||
"phone_number": "1234567890",
|
rationalization_software_systems="yes",
|
||||||
"email_request": user.email,
|
organization_providing_assistance="In-house staff",
|
||||||
"fname_request": user.first_name,
|
citizenship="United States",
|
||||||
"lname_request": user.last_name,
|
designation="military",
|
||||||
"service_branch": SERVICE_BRANCHES[1][1],
|
phone_number="1234567890",
|
||||||
"date_latest_training": "2018-08-06",
|
email_request=user.email,
|
||||||
},
|
fname_request=user.first_name,
|
||||||
}
|
lname_request=user.last_name,
|
||||||
|
service_branch=SERVICE_BRANCHES[1][1],
|
||||||
|
date_latest_training=datetime.date(2018, 8, 6),
|
||||||
|
)
|
||||||
|
|
||||||
|
data = (
|
||||||
|
request.initial_revision
|
||||||
|
if request.initial_revision is not None
|
||||||
|
else default_data
|
||||||
|
)
|
||||||
|
|
||||||
|
return RequestRevisionFactory.build(**data)
|
||||||
|
|
||||||
|
|
||||||
class PENumberFactory(factory.alchemy.SQLAlchemyModelFactory):
|
class PENumberFactory(factory.alchemy.SQLAlchemyModelFactory):
|
||||||
|
@ -3,33 +3,13 @@ from wtforms import Form
|
|||||||
import pendulum
|
import pendulum
|
||||||
from werkzeug.datastructures import ImmutableMultiDict
|
from werkzeug.datastructures import ImmutableMultiDict
|
||||||
|
|
||||||
from atst.forms.fields import DateField, NewlineListField
|
from atst.forms.fields import NewlineListField
|
||||||
|
|
||||||
|
|
||||||
class DateForm(Form):
|
|
||||||
date = DateField()
|
|
||||||
|
|
||||||
|
|
||||||
class NewlineListForm(Form):
|
class NewlineListForm(Form):
|
||||||
newline_list = NewlineListField()
|
newline_list = NewlineListField()
|
||||||
|
|
||||||
|
|
||||||
def test_date_ie_format():
|
|
||||||
form = DateForm(data={"date": "12/24/2018"})
|
|
||||||
assert form.date._value() == pendulum.date(2018, 12, 24)
|
|
||||||
|
|
||||||
|
|
||||||
def test_date_sane_format():
|
|
||||||
form = DateForm(data={"date": "2018-12-24"})
|
|
||||||
assert form.date._value() == pendulum.date(2018, 12, 24)
|
|
||||||
|
|
||||||
|
|
||||||
def test_date_insane_format():
|
|
||||||
form = DateForm(data={"date": "hello"})
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
form.date._value()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"input_,expected",
|
"input_,expected",
|
||||||
[
|
[
|
||||||
|
@ -2,9 +2,7 @@ from tests.factories import RequestFactory, UserFactory
|
|||||||
|
|
||||||
|
|
||||||
MOCK_USER = UserFactory.build()
|
MOCK_USER = UserFactory.build()
|
||||||
MOCK_REQUEST = RequestFactory.build(
|
MOCK_REQUEST = RequestFactory.build(creator=MOCK_USER)
|
||||||
creator=MOCK_USER.id, body={"financial_verification": {"pe_id": "0203752A"}}
|
|
||||||
)
|
|
||||||
DOD_SDN_INFO = {"first_name": "ART", "last_name": "GARFUNKEL", "dod_id": "5892460358"}
|
DOD_SDN_INFO = {"first_name": "ART", "last_name": "GARFUNKEL", "dod_id": "5892460358"}
|
||||||
DOD_SDN = f"CN={DOD_SDN_INFO['last_name']}.{DOD_SDN_INFO['first_name']}.G.{DOD_SDN_INFO['dod_id']},OU=OTHER,OU=PKI,OU=DoD,O=U.S. Government,C=US"
|
DOD_SDN = f"CN={DOD_SDN_INFO['last_name']}.{DOD_SDN_INFO['first_name']}.G.{DOD_SDN_INFO['dod_id']},OU=OTHER,OU=PKI,OU=DoD,O=U.S. Government,C=US"
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ class TestPENumberInForm:
|
|||||||
return user
|
return user
|
||||||
|
|
||||||
def submit_data(self, client, user, data, extended=False):
|
def submit_data(self, client, user, data, extended=False):
|
||||||
request = RequestFactory.create(creator=user, body=MOCK_REQUEST.body)
|
request = RequestFactory.create(creator=user)
|
||||||
url_kwargs = {"request_id": request.id}
|
url_kwargs = {"request_id": request.id}
|
||||||
if extended:
|
if extended:
|
||||||
url_kwargs["extended"] = True
|
url_kwargs["extended"] = True
|
||||||
@ -58,7 +58,7 @@ class TestPENumberInForm:
|
|||||||
user = self._set_monkeypatches(monkeypatch)
|
user = self._set_monkeypatches(monkeypatch)
|
||||||
|
|
||||||
data = dict(self.required_data)
|
data = dict(self.required_data)
|
||||||
data["pe_id"] = MOCK_REQUEST.body["financial_verification"]["pe_id"]
|
data["pe_id"] = "0101110F"
|
||||||
|
|
||||||
response = self.submit_data(client, user, data)
|
response = self.submit_data(client, user, data)
|
||||||
|
|
||||||
@ -95,7 +95,7 @@ class TestPENumberInForm:
|
|||||||
user_session(user)
|
user_session(user)
|
||||||
|
|
||||||
data = dict(self.required_data)
|
data = dict(self.required_data)
|
||||||
data["pe_id"] = MOCK_REQUEST.body["financial_verification"]["pe_id"]
|
data["pe_id"] = "0101110F"
|
||||||
data["task_order_number"] = "1234"
|
data["task_order_number"] = "1234"
|
||||||
|
|
||||||
response = self.submit_data(client, user, data)
|
response = self.submit_data(client, user, data)
|
||||||
@ -112,7 +112,7 @@ class TestPENumberInForm:
|
|||||||
user_session(user)
|
user_session(user)
|
||||||
|
|
||||||
data = dict(self.required_data)
|
data = dict(self.required_data)
|
||||||
data["pe_id"] = MOCK_REQUEST.body["financial_verification"]["pe_id"]
|
data["pe_id"] = "0101110F"
|
||||||
data["task_order_number"] = MockEDAClient.MOCK_CONTRACT_NUMBER
|
data["task_order_number"] = MockEDAClient.MOCK_CONTRACT_NUMBER
|
||||||
|
|
||||||
response = self.submit_data(client, user, data)
|
response = self.submit_data(client, user, data)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import re
|
import re
|
||||||
from tests.factories import RequestFactory, UserFactory
|
from tests.factories import RequestFactory, UserFactory, RequestRevisionFactory
|
||||||
from atst.domain.roles import Roles
|
from atst.domain.roles import Roles
|
||||||
from atst.domain.requests import Requests
|
from atst.domain.requests import Requests
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
@ -75,10 +75,12 @@ def test_nonexistent_request(client, user_session):
|
|||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
def test_creator_info_is_autopopulated(monkeypatch, client, user_session):
|
def test_creator_info_is_autopopulated_for_existing_request(
|
||||||
|
monkeypatch, client, user_session
|
||||||
|
):
|
||||||
user = UserFactory.create()
|
user = UserFactory.create()
|
||||||
user_session(user)
|
user_session(user)
|
||||||
request = RequestFactory.create(creator=user, body={"information_about_you": {}})
|
request = RequestFactory.create(creator=user, initial_revision={})
|
||||||
|
|
||||||
response = client.get("/requests/new/2/{}".format(request.id))
|
response = client.get("/requests/new/2/{}".format(request.id))
|
||||||
body = response.data.decode()
|
body = response.data.decode()
|
||||||
@ -104,7 +106,7 @@ def test_non_creator_info_is_not_autopopulated(monkeypatch, client, user_session
|
|||||||
user = UserFactory.create()
|
user = UserFactory.create()
|
||||||
creator = UserFactory.create()
|
creator = UserFactory.create()
|
||||||
user_session(user)
|
user_session(user)
|
||||||
request = RequestFactory.create(creator=creator, body={"information_about_you": {}})
|
request = RequestFactory.create(creator=creator, initial_revision={})
|
||||||
|
|
||||||
response = client.get("/requests/new/2/{}".format(request.id))
|
response = client.get("/requests/new/2/{}".format(request.id))
|
||||||
body = response.data.decode()
|
body = response.data.decode()
|
||||||
@ -116,7 +118,7 @@ def test_non_creator_info_is_not_autopopulated(monkeypatch, client, user_session
|
|||||||
def test_am_poc_causes_poc_to_be_autopopulated(client, user_session):
|
def test_am_poc_causes_poc_to_be_autopopulated(client, user_session):
|
||||||
creator = UserFactory.create()
|
creator = UserFactory.create()
|
||||||
user_session(creator)
|
user_session(creator)
|
||||||
request = RequestFactory.create(creator=creator, body={})
|
request = RequestFactory.create(creator=creator, initial_revision={})
|
||||||
client.post(
|
client.post(
|
||||||
"/requests/new/3/{}".format(request.id),
|
"/requests/new/3/{}".format(request.id),
|
||||||
headers={"Content-Type": "application/x-www-form-urlencoded"},
|
headers={"Content-Type": "application/x-www-form-urlencoded"},
|
||||||
@ -129,7 +131,7 @@ def test_am_poc_causes_poc_to_be_autopopulated(client, user_session):
|
|||||||
def test_not_am_poc_requires_poc_info_to_be_completed(client, user_session):
|
def test_not_am_poc_requires_poc_info_to_be_completed(client, user_session):
|
||||||
creator = UserFactory.create()
|
creator = UserFactory.create()
|
||||||
user_session(creator)
|
user_session(creator)
|
||||||
request = RequestFactory.create(creator=creator, body={})
|
request = RequestFactory.create(creator=creator, initial_revision={})
|
||||||
response = client.post(
|
response = client.post(
|
||||||
"/requests/new/3/{}".format(request.id),
|
"/requests/new/3/{}".format(request.id),
|
||||||
headers={"Content-Type": "application/x-www-form-urlencoded"},
|
headers={"Content-Type": "application/x-www-form-urlencoded"},
|
||||||
@ -142,7 +144,7 @@ def test_not_am_poc_requires_poc_info_to_be_completed(client, user_session):
|
|||||||
def test_not_am_poc_allows_user_to_fill_in_poc_info(client, user_session):
|
def test_not_am_poc_allows_user_to_fill_in_poc_info(client, user_session):
|
||||||
creator = UserFactory.create()
|
creator = UserFactory.create()
|
||||||
user_session(creator)
|
user_session(creator)
|
||||||
request = RequestFactory.create(creator=creator, body={})
|
request = RequestFactory.create(creator=creator, initial_revision={})
|
||||||
poc_input = {
|
poc_input = {
|
||||||
"am_poc": "no",
|
"am_poc": "no",
|
||||||
"fname_poc": "test",
|
"fname_poc": "test",
|
||||||
@ -177,13 +179,11 @@ def test_poc_autofill_checks_information_about_you_form_first(client, user_sessi
|
|||||||
user_session(creator)
|
user_session(creator)
|
||||||
request = RequestFactory.create(
|
request = RequestFactory.create(
|
||||||
creator=creator,
|
creator=creator,
|
||||||
body={
|
initial_revision=dict(
|
||||||
"information_about_you": {
|
fname_request="Alice",
|
||||||
"fname_request": "Alice",
|
lname_request="Adams",
|
||||||
"lname_request": "Adams",
|
email_request="alice.adams@mail.mil",
|
||||||
"email_request": "alice.adams@mail.mil",
|
),
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
poc_input = {"am_poc": "yes"}
|
poc_input = {"am_poc": "yes"}
|
||||||
client.post(
|
client.post(
|
||||||
|
@ -12,10 +12,25 @@ def screens(app):
|
|||||||
return JEDIRequestFlow(3).screens
|
return JEDIRequestFlow(3).screens
|
||||||
|
|
||||||
|
|
||||||
|
def serialize_dates(data):
|
||||||
|
if not data:
|
||||||
|
return data
|
||||||
|
|
||||||
|
dates = {
|
||||||
|
k: v.strftime("%m/%d/%Y") for k, v in data.items() if hasattr(v, "strftime")
|
||||||
|
}
|
||||||
|
|
||||||
|
new_data = data.copy()
|
||||||
|
new_data.update(dates)
|
||||||
|
|
||||||
|
return new_data
|
||||||
|
|
||||||
|
|
||||||
def test_stepthrough_request_form(user_session, screens, client):
|
def test_stepthrough_request_form(user_session, screens, client):
|
||||||
user = UserFactory.create()
|
user = UserFactory.create()
|
||||||
user_session(user)
|
user_session(user)
|
||||||
mock_request = RequestFactory.stub()
|
mock_request = RequestFactory.create()
|
||||||
|
mock_body = mock_request.body
|
||||||
|
|
||||||
def post_form(url, redirects=False, data=""):
|
def post_form(url, redirects=False, data=""):
|
||||||
return client.post(
|
return client.post(
|
||||||
@ -33,6 +48,7 @@ def test_stepthrough_request_form(user_session, screens, client):
|
|||||||
# destination url
|
# destination url
|
||||||
prelim_resp = post_form(req_url, data=data)
|
prelim_resp = post_form(req_url, data=data)
|
||||||
response = post_form(req_url, True, data=data)
|
response = post_form(req_url, True, data=data)
|
||||||
|
assert prelim_resp.status_code == 302
|
||||||
return (prelim_resp.headers.get("Location"), response)
|
return (prelim_resp.headers.get("Location"), response)
|
||||||
|
|
||||||
# GET the initial form
|
# GET the initial form
|
||||||
@ -44,7 +60,8 @@ def test_stepthrough_request_form(user_session, screens, client):
|
|||||||
for i in range(1, len(screens)):
|
for i in range(1, len(screens)):
|
||||||
# get appropriate form data to POST for this section
|
# get appropriate form data to POST for this section
|
||||||
section = screens[i - 1]["section"]
|
section = screens[i - 1]["section"]
|
||||||
post_data = urlencode(mock_request.body[section])
|
massaged = serialize_dates(mock_body[section])
|
||||||
|
post_data = urlencode(massaged)
|
||||||
|
|
||||||
effective_url, resp = take_a_step(i, req=req_id, data=post_data)
|
effective_url, resp = take_a_step(i, req=req_id, data=post_data)
|
||||||
req_id = effective_url.split("/")[-1]
|
req_id = effective_url.split("/")[-1]
|
||||||
@ -55,7 +72,7 @@ def test_stepthrough_request_form(user_session, screens, client):
|
|||||||
|
|
||||||
# at this point, the real request we made and the mock_request bodies
|
# at this point, the real request we made and the mock_request bodies
|
||||||
# should be equivalent
|
# should be equivalent
|
||||||
assert Requests.get(user, req_id).body == mock_request.body
|
assert Requests.get(user, req_id).body == mock_body
|
||||||
|
|
||||||
# finish the review and submit step
|
# finish the review and submit step
|
||||||
client.post(
|
client.post(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user