provision portfolio state machine

This commit is contained in:
Philip Kalinsky 2020-01-08 11:01:55 -05:00 committed by tomdds
parent ad82706bd4
commit 69bd2f43a5
22 changed files with 1122 additions and 224 deletions

View File

@ -9,6 +9,8 @@ Unipath = "*"
pendulum = "*"
redis = "*"
sqlalchemy = ">=1.3.12"
sqlalchemy-json = "*"
pydantic = "*"
alembic = "*"
"psycopg2-binary" = "*"
flask = "*"

192
Pipfile.lock generated
View File

@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "1bf62937e2d8187deb11c56188ec763f56ec055d65c87773c945384ffff68dcc"
"sha256": "ec8e6c93c581a81539a773d6814e4b19f44eedc655c8cdd3043d1a6115b1c2d1"
},
"pipfile-spec": 6,
"requires": {
@ -257,11 +257,11 @@
},
"importlib-metadata": {
"hashes": [
"sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45",
"sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f"
"sha256:bdd9b7c397c273bcc9a11d6629a38487cd07154fa255a467bf704cd2c258e359",
"sha256:f17c015735e1a88296994c0697ecea7e11db24290941983b08c9feb30921e6d8"
],
"markers": "python_version < '3.8'",
"version": "==1.3.0"
"version": "==1.4.0"
},
"isodate": {
"hashes": [
@ -340,10 +340,10 @@
},
"more-itertools": {
"hashes": [
"sha256:b84b238cce0d9adad5ed87e745778d20a3f8487d0f0cb8b8a586816c7496458d",
"sha256:c833ef592a0324bcc6a60e48440da07645063c453880c9477ceb22490aec1564"
"sha256:1a2a32c72400d365000412fe08eb4a24ebee89997c18d3d147544f70f5403b39",
"sha256:c468adec578380b6281a114cb8a5db34eb1116277da92d7c46f904f0b52d3288"
],
"version": "==8.0.2"
"version": "==8.1.0"
},
"msrest": {
"hashes": [
@ -423,6 +423,26 @@
],
"version": "==2.19"
},
"pydantic": {
"hashes": [
"sha256:176885123dfdd8f7ab6e7ba1b66d4197de75ba830bb44d921af88b3d977b8aa5",
"sha256:2b32a5f14558c36e39aeefda0c550bfc0f47fc32b4ce16d80dc4df2b33838ed8",
"sha256:2eab7d548b0e530bf65bee7855ad8164c2f6a889975d5e9c4eefd1e7c98245dc",
"sha256:479ca8dc7cc41418751bf10302ee0a1b1f8eedb2de6c4f4c0f3cf8372b204f9a",
"sha256:59235324dd7dc5363a654cd14271ea8631f1a43de5d4fc29c782318fcc498002",
"sha256:87673d1de790c8d5282153cab0b09271be77c49aabcedf3ac5ab1a1fd4dcbac0",
"sha256:8a8e089aec18c26561e09ee6daf15a3cc06df05bdc67de60a8684535ef54562f",
"sha256:b60f2b3b0e0dd74f1800a57d1bbd597839d16faf267e45fa4a5407b15d311085",
"sha256:c0da48978382c83f9488c6bbe4350e065ea5c83e85ca5cfb8fa14ac11de3c296",
"sha256:cbe284bd5ad67333d49ecc0dc27fa52c25b4c2fe72802a5c060b5f922db58bef",
"sha256:d03df07b7611004140b0fef91548878c2b5f48c520a8cb76d11d20e9887a495e",
"sha256:d4bb6a75abc2f04f6993124f1ed4221724c9dc3bd9df5cb54132e0b68775d375",
"sha256:dacb79144bb3fdb57cf9435e1bd16c35586bc44256215cfaa33bf21565d926ae",
"sha256:dd9359db7644317898816f6142f378aa48848dcc5cf14a481236235fde11a148"
],
"index": "pypi",
"version": "==1.3"
},
"pyjwt": {
"hashes": [
"sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e",
@ -521,6 +541,19 @@
"index": "pypi",
"version": "==1.3.12"
},
"sqlalchemy-json": {
"hashes": [
"sha256:d17952e771eecd9023c0f683d2a6aaa27ce1a6dbf57b0fe2bf4d5aef4c5dad1c"
],
"index": "pypi",
"version": "==0.2.3"
},
"sqlalchemy-utils": {
"hashes": [
"sha256:4e637c88bf3ac5f99b7d72342092a1f636bea1287b2e3e17d441b0413771f86e"
],
"version": "==0.36.1"
},
"unipath": {
"hashes": [
"sha256:09839adcc72e8a24d4f76d63656f30b5a1f721fc40c9bcd79d8c67bdd8b47dae",
@ -568,10 +601,10 @@
},
"zipp": {
"hashes": [
"sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e",
"sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"
"sha256:8dda78f06bd1674bd8720df8a50bb47b6e1233c503a4eed8e7810686bde37656",
"sha256:d38fbe01bbf7a3593a32bc35a9c4453c32bc42b98c377f9bff7e9f8da157786c"
],
"version": "==0.6.0"
"version": "==1.0.0"
}
},
"develop": {
@ -687,39 +720,39 @@
},
"coverage": {
"hashes": [
"sha256:189aac76d6e0d7af15572c51892e7326ee451c076c5a50a9d266406cd6c49708",
"sha256:1bf7ba2af1d373a1750888724f84cffdfc697738f29a353c98195f98fc011509",
"sha256:1f4ee8e2e4243971618bc16fcc4478317405205f135e95226c2496e2a3b8dbbf",
"sha256:225e79a5d485bc1642cb7ba02281419c633c216cdc6b26c26494ba959f09e69f",
"sha256:23688ff75adfa8bfa2a67254d889f9bdf9302c27241d746e17547c42c732d3f4",
"sha256:28f7f73b34a05e23758e860a89a7f649b85c6749e252eff60ebb05532d180e86",
"sha256:2d0cb9b1fe6ad0d915d45ad3d87f03a38e979093a98597e755930db1f897afae",
"sha256:47874b4711c5aeb295c31b228a758ce3d096be83dc37bd56da48ed99efb8813b",
"sha256:511ec0c00840e12fb4e852e4db58fa6a01ca4da72f36a9766fae344c3d502033",
"sha256:53e7438fef0c97bc248f88ba1edd10268cd94d5609970aaf87abbe493691af87",
"sha256:569f9ee3025682afda6e9b0f5bb14897c0db03f1a1dc088b083dd36e743f92bb",
"sha256:593853aa1ac6dcc6405324d877544c596c9d948ef20d2e9512a0f5d2d3202356",
"sha256:5b0a07158360d22492f9abd02a0f2ee7981b33f0646bf796598b7673f6bbab14",
"sha256:7ca3db38a61f3655a2613ee2c190d63639215a7a736d3c64cc7bbdb002ce6310",
"sha256:7d1cc7acc9ce55179616cf72154f9e648136ea55987edf84addbcd9886ffeba2",
"sha256:88b51153657612aea68fa684a5b88037597925260392b7bb4509d4f9b0bdd889",
"sha256:955ec084f549128fa2702f0b2dc696392001d986b71acd8fd47424f28289a9c3",
"sha256:b251c7092cbb6d789d62dc9c9e7c4fb448c9138b51285c36aeb72462cad3600e",
"sha256:bd82b684bb498c60ef47bb1541a50e6d006dde8579934dcbdbc61d67d1ea70d9",
"sha256:bfe102659e2ec13b86c7f3b1db6c9a4e7beea4255058d006351339e6b342d5d2",
"sha256:c1e4e39e43057396a5e9d069bfbb6ffeee892e40c5d2effbd8cd71f34ee66c4d",
"sha256:cb2b74c123f65e8166f7e1265829a6c8ed755c3cd16d7f50e75a83456a5f3fd7",
"sha256:cca38ded59105f7705ef6ffe1e960b8db6c7d8279c1e71654a4775ab4454ca15",
"sha256:cf908840896f7aa62d0ec693beb53264b154f972eb8226fb864ac38975590c4f",
"sha256:d095a7b473f8a95f7efe821f92058c8a2ecfb18f8db6677ae3819e15dc11aaae",
"sha256:d22b4297e7e4225ccf01f1aa55e7a96412ea0796b532dd614c3fcbafa341128e",
"sha256:d4a2b578a7a70e0c71f662705262f87a456f1e6c1e40ada7ea699abaf070a76d",
"sha256:ddeb42a3d5419434742bf4cc71c9eaa22df3b76808e23a82bd0b0bd360f1a9f1",
"sha256:e65a5aa1670db6263f19fdc03daee1d7dbbadb5cb67fd0a1f16033659db13c1d",
"sha256:eaad65bd20955131bcdb3967a4dea66b4e4d4ca488efed7c00d91ee0173387e8",
"sha256:f45fba420b94165c17896861bb0e8b27fb7abdcedfeb154895d8553df90b7b00"
"sha256:15cf13a6896048d6d947bf7d222f36e4809ab926894beb748fc9caa14605d9c3",
"sha256:1daa3eceed220f9fdb80d5ff950dd95112cd27f70d004c7918ca6dfc6c47054c",
"sha256:1e44a022500d944d42f94df76727ba3fc0a5c0b672c358b61067abb88caee7a0",
"sha256:25dbf1110d70bab68a74b4b9d74f30e99b177cde3388e07cc7272f2168bd1477",
"sha256:3230d1003eec018ad4a472d254991e34241e0bbd513e97a29727c7c2f637bd2a",
"sha256:3dbb72eaeea5763676a1a1efd9b427a048c97c39ed92e13336e726117d0b72bf",
"sha256:5012d3b8d5a500834783689a5d2292fe06ec75dc86ee1ccdad04b6f5bf231691",
"sha256:51bc7710b13a2ae0c726f69756cf7ffd4362f4ac36546e243136187cfcc8aa73",
"sha256:527b4f316e6bf7755082a783726da20671a0cc388b786a64417780b90565b987",
"sha256:722e4557c8039aad9592c6a4213db75da08c2cd9945320220634f637251c3894",
"sha256:76e2057e8ffba5472fd28a3a010431fd9e928885ff480cb278877c6e9943cc2e",
"sha256:77afca04240c40450c331fa796b3eab6f1e15c5ecf8bf2b8bee9706cd5452fef",
"sha256:7afad9835e7a651d3551eab18cbc0fdb888f0a6136169fbef0662d9cdc9987cf",
"sha256:9bea19ac2f08672636350f203db89382121c9c2ade85d945953ef3c8cf9d2a68",
"sha256:a8b8ac7876bc3598e43e2603f772d2353d9931709345ad6c1149009fd1bc81b8",
"sha256:b0840b45187699affd4c6588286d429cd79a99d509fe3de0f209594669bb0954",
"sha256:b26aaf69713e5674efbde4d728fb7124e429c9466aeaf5f4a7e9e699b12c9fe2",
"sha256:b63dd43f455ba878e5e9f80ba4f748c0a2156dde6e0e6e690310e24d6e8caf40",
"sha256:be18f4ae5a9e46edae3f329de2191747966a34a3d93046dbdf897319923923bc",
"sha256:c312e57847db2526bc92b9bfa78266bfbaabac3fdcd751df4d062cd4c23e46dc",
"sha256:c60097190fe9dc2b329a0eb03393e2e0829156a589bd732e70794c0dd804258e",
"sha256:c62a2143e1313944bf4a5ab34fd3b4be15367a02e9478b0ce800cb510e3bbb9d",
"sha256:cc1109f54a14d940b8512ee9f1c3975c181bbb200306c6d8b87d93376538782f",
"sha256:cd60f507c125ac0ad83f05803063bed27e50fa903b9c2cfee3f8a6867ca600fc",
"sha256:d513cc3db248e566e07a0da99c230aca3556d9b09ed02f420664e2da97eac301",
"sha256:d649dc0bcace6fcdb446ae02b98798a856593b19b637c1b9af8edadf2b150bea",
"sha256:d7008a6796095a79544f4da1ee49418901961c97ca9e9d44904205ff7d6aa8cb",
"sha256:da93027835164b8223e8e5af2cf902a4c80ed93cb0909417234f4a9df3bcd9af",
"sha256:e69215621707119c6baf99bda014a45b999d37602cb7043d943c76a59b05bf52",
"sha256:ea9525e0fef2de9208250d6c5aeeee0138921057cd67fcef90fbed49c4d62d37",
"sha256:fca1669d464f0c9831fd10be2eef6b86f5ebd76c724d1e0706ebdff86bb4adf0"
],
"version": "==5.0.2"
"version": "==5.0.3"
},
"decorator": {
"hashes": [
@ -752,10 +785,10 @@
},
"faker": {
"hashes": [
"sha256:202ad3b2ec16ae7c51c02904fb838831f8d2899e61bf18db1e91a5a582feab11",
"sha256:92c84a10bec81217d9cb554ee12b3838c8986ce0b5d45f72f769da22e4bb5432"
"sha256:047d4d1791bfb3756264da670d99df13d799bb36e7d88774b1585a82d05dbaec",
"sha256:1b1a58961683b30c574520d0c739c4443e0ef6a185c04382e8cc888273dbebed"
],
"version": "==3.0.0"
"version": "==4.0.0"
},
"flask": {
"hashes": [
@ -796,11 +829,11 @@
},
"importlib-metadata": {
"hashes": [
"sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45",
"sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f"
"sha256:bdd9b7c397c273bcc9a11d6629a38487cd07154fa255a467bf704cd2c258e359",
"sha256:f17c015735e1a88296994c0697ecea7e11db24290941983b08c9feb30921e6d8"
],
"markers": "python_version < '3.8'",
"version": "==1.3.0"
"version": "==1.4.0"
},
"ipdb": {
"hashes": [
@ -920,10 +953,10 @@
},
"more-itertools": {
"hashes": [
"sha256:b84b238cce0d9adad5ed87e745778d20a3f8487d0f0cb8b8a586816c7496458d",
"sha256:c833ef592a0324bcc6a60e48440da07645063c453880c9477ceb22490aec1564"
"sha256:1a2a32c72400d365000412fe08eb4a24ebee89997c18d3d147544f70f5403b39",
"sha256:c468adec578380b6281a114cb8a5db34eb1116277da92d7c46f904f0b52d3288"
],
"version": "==8.0.2"
"version": "==8.1.0"
},
"mypy": {
"hashes": [
@ -1143,12 +1176,12 @@
},
"rope": {
"hashes": [
"sha256:6b728fdc3e98a83446c27a91fc5d56808a004f8beab7a31ab1d7224cecc7d969",
"sha256:c5c5a6a87f7b1a2095fb311135e2a3d1f194f5ecb96900fdd0a9100881f48aaf",
"sha256:f0dcf719b63200d492b85535ebe5ea9b29e0d0b8aebeb87fe03fc1a65924fdaf"
"sha256:52423a7eebb5306a6d63bdc91a7c657db51ac9babfb8341c9a1440831ecf3203",
"sha256:ae1fa2fd56f64f4cc9be46493ce54bed0dd12dee03980c61a4393d89d84029ad",
"sha256:d2830142c2e046f5fc26a022fe680675b6f48f81c7fc1f03a950706e746e9dfe"
],
"index": "pypi",
"version": "==0.14.0"
"version": "==0.16.0"
},
"selenium": {
"hashes": [
@ -1209,29 +1242,30 @@
},
"typed-ast": {
"hashes": [
"sha256:1170afa46a3799e18b4c977777ce137bb53c7485379d9706af8a59f2ea1aa161",
"sha256:18511a0b3e7922276346bcb47e2ef9f38fb90fd31cb9223eed42c85d1312344e",
"sha256:262c247a82d005e43b5b7f69aff746370538e176131c32dda9cb0f324d27141e",
"sha256:2b907eb046d049bcd9892e3076c7a6456c93a25bebfe554e931620c90e6a25b0",
"sha256:354c16e5babd09f5cb0ee000d54cfa38401d8b8891eefa878ac772f827181a3c",
"sha256:48e5b1e71f25cfdef98b013263a88d7145879fbb2d5185f2a0c79fa7ebbeae47",
"sha256:4e0b70c6fc4d010f8107726af5fd37921b666f5b31d9331f0bd24ad9a088e631",
"sha256:630968c5cdee51a11c05a30453f8cd65e0cc1d2ad0d9192819df9978984529f4",
"sha256:66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34",
"sha256:71211d26ffd12d63a83e079ff258ac9d56a1376a25bc80b1cdcdf601b855b90b",
"sha256:7954560051331d003b4e2b3eb822d9dd2e376fa4f6d98fee32f452f52dd6ebb2",
"sha256:838997f4310012cf2e1ad3803bce2f3402e9ffb71ded61b5ee22617b3a7f6b6e",
"sha256:95bd11af7eafc16e829af2d3df510cecfd4387f6453355188342c3e79a2ec87a",
"sha256:bc6c7d3fa1325a0c6613512a093bc2a2a15aeec350451cbdf9e1d4bffe3e3233",
"sha256:cc34a6f5b426748a507dd5d1de4c1978f2eb5626d51326e43280941206c209e1",
"sha256:d755f03c1e4a51e9b24d899561fec4ccaf51f210d52abdf8c07ee2849b212a36",
"sha256:d7c45933b1bdfaf9f36c579671fec15d25b06c8398f113dab64c18ed1adda01d",
"sha256:d896919306dd0aa22d0132f62a1b78d11aaf4c9fc5b3410d3c666b818191630a",
"sha256:fdc1c9bbf79510b76408840e009ed65958feba92a88833cdceecff93ae8fff66",
"sha256:ffde2fbfad571af120fcbfbbc61c72469e72f550d676c3342492a9dfdefb8f12"
"sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355",
"sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919",
"sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa",
"sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652",
"sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75",
"sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01",
"sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d",
"sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1",
"sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907",
"sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c",
"sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3",
"sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b",
"sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614",
"sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb",
"sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b",
"sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41",
"sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6",
"sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34",
"sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe",
"sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4",
"sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"
],
"markers": "implementation_name == 'cpython' and python_version < '3.8'",
"version": "==1.4.0"
"version": "==1.4.1"
},
"typing-extensions": {
"hashes": [
@ -1277,10 +1311,10 @@
},
"zipp": {
"hashes": [
"sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e",
"sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"
"sha256:8dda78f06bd1674bd8720df8a50bb47b6e1233c503a4eed8e7810686bde37656",
"sha256:d38fbe01bbf7a3593a32bc35a9c4453c32bc42b98c377f9bff7e9f8da157786c"
],
"version": "==0.6.0"
"version": "==1.0.0"
}
}
}

70
PortfolioProvision.md Normal file
View File

@ -0,0 +1,70 @@
Each CSP will have a set of "stages" that are required to be completed before the provisioning process can be considered complete.
Azure Stages:
tenant,
billing profile,
admin subscription
etc.
`atst.models.mixins.state_machines` module contains:
python Enum classes that define the stages for a CSP
class AzureStages(Enum):
TENANT = "tenant"
BILLING_PROFILE = "billing profile"
ADMIN_SUBSCRIPTION = "admin subscription"
there are two types of python dataclass subclasses defined in `atst.models.portoflio_state_machine` module.
one holds the data that is submitted to the CSP
@dataclass
class TenantCSPPayload():
user_id: str
password: str
etc.
the other holds the results of the call to the CSP
@dataclass
class TenantCSPResult():
user_id: str
tenant_id: str
user_object_id: str
etc.
A Finite State Machine `atst.models.portoflio_state_machine.PortfolioStateMachine` is created for each provisioning process and tied to an instance of Portfolio class.
Aach time the FSM is created/accessed it will generate a list of States and Transitions between the states.
There is a set of "system" states such as UNSTARTED, STARTING, STARTED, COMPLETED, FAILED etc
There is a set of CSP specific states generated for each "stage" in the FSM.
TENANT_IN_PROGRESS
TENANT_IN_COMPLETED
TENANT_IN_FAILED
BILLING_PROFILE_IN_PROGRESS
BILLING_PROFILE_IN_COMPLETED
BILLING_PROFILE_IN_FAILED
etc.
There is a set of callbacks defined that are triggered as the process transitions between stages.
callback `PortfolioStateMachine.after_in_progress_callback`
The CSP api call is made as the process transitions into IN_PROGESS state for each state.
callback `PortfolioStateMachine.is_csp_data_valid`
validates the collected data.
A transition into the next state can be triggered using PortfolioStateMachine.trigger_next_transition`

View File

@ -0,0 +1,62 @@
"""portfolio state machine table.
Revision ID: 59973fa17ded
Revises: 02ac8bdcf16f
Create Date: 2020-01-08 10:37:32.924245
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
import sqlalchemy_json
# revision identifiers, used by Alembic.
revision = '59973fa17ded' # pragma: allowlist secret
down_revision = '02ac8bdcf16f' # pragma: allowlist secret
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('portfolio_job_failures',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('task_id', sa.String(), nullable=False),
sa.Column('portfolio_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.ForeignKeyConstraint(['portfolio_id'], ['portfolios.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('portfolio_state_machines',
sa.Column('time_created', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('time_updated', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('deleted', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('id', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('portfolio_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('state', sa.Enum('UNSTARTED', 'STARTING', 'STARTED', 'COMPLETED', 'FAILED', 'TENANT_CREATED', 'TENANT_IN_PROGRESS', 'TENANT_FAILED', 'BILLING_PROFILE_CREATED', 'BILLING_PROFILE_IN_PROGRESS', 'BILLING_PROFILE_FAILED', 'ADMIN_SUBSCRIPTION_CREATED', 'ADMIN_SUBSCRIPTION_IN_PROGRESS', 'ADMIN_SUBSCRIPTION_FAILED', name='fsmstates', native_enum=False), nullable=False),
sa.ForeignKeyConstraint(['portfolio_id'], ['portfolios.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.add_column('portfolios', sa.Column('app_migration', sa.String(), nullable=True))
op.add_column('portfolios', sa.Column('complexity', sa.ARRAY(sa.String()), nullable=True))
op.add_column('portfolios', sa.Column('complexity_other', sa.String(), nullable=True))
op.add_column('portfolios', sa.Column('csp_data', sqlalchemy_json.NestedMutableJson(), nullable=True))
op.add_column('portfolios', sa.Column('dev_team', sa.ARRAY(sa.String()), nullable=True))
op.add_column('portfolios', sa.Column('dev_team_other', sa.String(), nullable=True))
op.add_column('portfolios', sa.Column('native_apps', sa.String(), nullable=True))
op.add_column('portfolios', sa.Column('team_experience', sa.String(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('portfolios', 'team_experience')
op.drop_column('portfolios', 'native_apps')
op.drop_column('portfolios', 'dev_team_other')
op.drop_column('portfolios', 'dev_team')
op.drop_column('portfolios', 'csp_data')
op.drop_column('portfolios', 'complexity_other')
op.drop_column('portfolios', 'complexity')
op.drop_column('portfolios', 'app_migration')
op.drop_table('portfolio_state_machines')
op.drop_table('portfolio_job_failures')
# ### end Alembic commands ###

View File

@ -1,3 +1,5 @@
import importlib
from .cloud import MockCloudProvider
from .file_uploads import AzureUploader, MockUploader
from .reports import MockReportingProvider
@ -29,3 +31,19 @@ def make_csp_provider(app, csp=None):
app.csp = MockCSP(app, test_mode=True)
else:
app.csp = MockCSP(app)
def _stage_to_classname(stage):
return "".join(map(lambda word: word.capitalize(), stage.replace('_', ' ').split(" ")))
def get_stage_csp_class(stage, class_type):
"""
given a stage name and class_type return the class
class_type is either 'payload' or 'result'
"""
cls_name = "".join([_stage_to_classname(stage), "CSP", class_type.capitalize()])
try:
return getattr(importlib.import_module("atst.domain.csp.cloud"), cls_name)
except AttributeError:
print("could not import CSP Result class <%s>" % cls_name)

View File

@ -1,12 +1,12 @@
from typing import Dict
import re
from typing import Dict
from uuid import uuid4
from pydantic import BaseModel
from atst.models.user import User
from atst.models.application import Application
from atst.models.environment import Environment
from atst.models.environment_role import EnvironmentRole
from .policy import AzurePolicyManager
class GeneralCSPException(Exception):
@ -142,6 +142,92 @@ class BaselineProvisionException(GeneralCSPException):
)
class BaseCSPPayload(BaseModel):
#{"username": "mock-cloud", "pass": "shh"}
creds: Dict
class TenantCSPPayload(BaseCSPPayload):
user_id: str
password: str
domain_name: str
first_name: str
last_name: str
country_code: str
password_recovery_email_address: str
class TenantCSPResult(BaseModel):
user_id: str
tenant_id: str
user_object_id: str
class BillingProfileAddress(BaseModel):
address: Dict
"""
"address": {
"firstName": "string",
"lastName": "string",
"companyName": "string",
"addressLine1": "string",
"addressLine2": "string",
"addressLine3": "string",
"city": "string",
"region": "string",
"country": "string",
"postalCode": "string"
},
"""
class BillingProfileCLINBudget(BaseModel):
clinBudget: Dict
"""
"clinBudget": {
"amount": 0,
"startDate": "2019-12-18T16:47:40.909Z",
"endDate": "2019-12-18T16:47:40.909Z",
"externalReferenceId": "string"
}
"""
class BillingProfileCSPPayload(BaseCSPPayload, BillingProfileAddress, BillingProfileCLINBudget):
displayName: str
poNumber: str
invoiceEmailOptIn: str
"""
{
"displayName": "string",
"poNumber": "string",
"address": {
"firstName": "string",
"lastName": "string",
"companyName": "string",
"addressLine1": "string",
"addressLine2": "string",
"addressLine3": "string",
"city": "string",
"region": "string",
"country": "string",
"postalCode": "string"
},
"invoiceEmailOptIn": true,
Note: These last 2 are also the body for adding/updating new TOs/clins
"enabledAzurePlans": [
{
"skuId": "string"
}
],
"clinBudget": {
"amount": 0,
"startDate": "2019-12-18T16:47:40.909Z",
"endDate": "2019-12-18T16:47:40.909Z",
"externalReferenceId": "string"
}
}
"""
class CloudProviderInterface:
def root_creds(self) -> Dict:
raise NotImplementedError()
@ -325,6 +411,71 @@ class MockCloudProvider(CloudProviderInterface):
return {"id": self._id(), "credentials": self._auth_credentials}
def create_tenant(self, payload):
"""
payload is an instance of TenantCSPPayload data class
"""
self._authorize(payload.creds)
self._delay(1, 5)
self._maybe_raise(self.NETWORK_FAILURE_PCT, self.NETWORK_EXCEPTION)
self._maybe_raise(self.SERVER_FAILURE_PCT, self.SERVER_EXCEPTION)
self._maybe_raise(self.UNAUTHORIZED_RATE, self.AUTHORIZATION_EXCEPTION)
# return tenant id, tenant owner id and tenant owner object id from:
response = {"tenantId": "string", "userId": "string", "objectId": "string"}
return {
"tenant_id": response["tenantId"],
"user_id": response["userId"],
"user_object_id": response["objectId"],
}
def create_billing_profile(self, creds, tenant_admin_details, billing_owner_id):
# call billing profile creation endpoint, specifying owner
# Payload:
"""
{
"displayName": "string",
"poNumber": "string",
"address": {
"firstName": "string",
"lastName": "string",
"companyName": "string",
"addressLine1": "string",
"addressLine2": "string",
"addressLine3": "string",
"city": "string",
"region": "string",
"country": "string",
"postalCode": "string"
},
"invoiceEmailOptIn": true,
Note: These last 2 are also the body for adding/updating new TOs/clins
"enabledAzurePlans": [
{
"skuId": "string"
}
],
"clinBudget": {
"amount": 0,
"startDate": "2019-12-18T16:47:40.909Z",
"endDate": "2019-12-18T16:47:40.909Z",
"externalReferenceId": "string"
}
}
"""
# response will be mostly the same as the body, but we only really care about the id
self._maybe_raise(self.NETWORK_FAILURE_PCT, self.NETWORK_EXCEPTION)
self._maybe_raise(self.SERVER_FAILURE_PCT, self.SERVER_EXCEPTION)
self._maybe_raise(self.UNAUTHORIZED_RATE, self.AUTHORIZATION_EXCEPTION)
response = {"id": "string"}
return {"billing_profile_id": response["id"]}
def create_or_update_user(self, auth_credentials, user_info, csp_role_id):
self._authorize(auth_credentials)
@ -401,18 +552,15 @@ REMOTE_ROOT_ROLE_DEF_ID = "/providers/Microsoft.Authorization/roleDefinitions/00
class AzureSDKProvider(object):
def __init__(self):
from azure.mgmt import subscription, authorization, managementgroups
from azure.mgmt.resource import policy
from azure.mgmt import subscription, authorization
import azure.graphrbac as graphrbac
import azure.common.credentials as credentials
from msrestazure.azure_cloud import AZURE_PUBLIC_CLOUD
self.subscription = subscription
self.authorization = authorization
self.managementgroups = managementgroups
self.graphrbac = graphrbac
self.credentials = credentials
self.policy = policy
# may change to a JEDI cloud
self.cloud = AZURE_PUBLIC_CLOUD
@ -430,28 +578,45 @@ class AzureCloudProvider(CloudProviderInterface):
else:
self.sdk = azure_sdk_provider
self.policy_manager = AzurePolicyManager(config["AZURE_POLICY_LOCATION"])
def create_environment(
self, auth_credentials: Dict, user: User, environment: Environment
):
# since this operation would only occur within a tenant, should we source the tenant
# via lookup from environment once we've created the portfolio csp data schema
# something like this:
# environment_tenant = environment.application.portfolio.csp_data.get('tenant_id', None)
# though we'd probably source the whole credentials for these calls from the portfolio csp
# data, as it would have to be where we store the creds for the at-at user within the portfolio tenant
# credentials = self._get_credential_obj(environment.application.portfolio.csp_data.get_creds())
credentials = self._get_credential_obj(self._root_creds)
display_name = f"{environment.application.name}_{environment.name}_{environment.id}" # proposed format
management_group_id = "?" # management group id chained from environment
parent_id = "?" # from environment.application
sub_client = self.sdk.subscription.SubscriptionClient(credentials)
management_group = self._create_management_group(
credentials, management_group_id, display_name, parent_id,
display_name = f"{environment.application.name}_{environment.name}_{environment.id}" # proposed format
billing_profile_id = "?" # something chained from environment?
sku_id = AZURE_SKU_ID
# we want to set AT-AT as an owner here
# we could potentially associate subscriptions with "management groups" per DOD component
body = self.sdk.subscription.models.ModernSubscriptionCreationParameters(
display_name,
billing_profile_id,
sku_id,
# owner=<AdPrincipal: for AT-AT user>
)
return management_group
# These 2 seem like something that might be worthwhile to allow tiebacks to
# TOs filed for the environment
billing_account_name = "?"
invoice_section_name = "?"
# We may also want to create billing sections in the enrollment account
sub_creation_operation = sub_client.subscription_factory.create_subscription(
billing_account_name, invoice_section_name, body
)
# the resulting object from this process is a link to the new subscription
# not a subscription model, so we'll have to unpack the ID
new_sub = sub_creation_operation.result()
subscription_id = self._extract_subscription_id(new_sub.subscription_link)
if subscription_id:
return subscription_id
else:
# troublesome error, subscription should exist at this point
# but we just don't have a valid ID
pass
def create_atat_admin_user(
self, auth_credentials: Dict, csp_environment_id: str
@ -490,126 +655,136 @@ class AzureCloudProvider(CloudProviderInterface):
"role_name": role_assignment_id,
}
def _create_application(self, auth_credentials: Dict, application: Application):
management_group_name = str(uuid4()) # can be anything, not just uuid
display_name = application.name # Does this need to be unique?
credentials = self._get_credential_obj(auth_credentials)
parent_id = "?" # application.portfolio.csp_details.management_group_id
return self._create_management_group(
credentials, management_group_name, display_name, parent_id,
def create_tenant(self, payload):
# auth as SP that is allowed to create tenant? (tenant creation sp creds)
# create tenant with owner details (populated from portfolio point of contact, pw is generated)
# return tenant id, tenant owner id and tenant owner object id from:
response = {"tenantId": "string", "userId": "string", "objectId": "string"}
return self._ok(
{
"tenant_id": response["tenantId"],
"user_id": response["userId"],
"user_object_id": response["objectId"],
}
)
def _create_management_group(
self, credentials, management_group_id, display_name, parent_id=None,
):
mgmgt_group_client = self.sdk.managementgroups.ManagementGroupsAPI(credentials)
create_parent_grp_info = self.sdk.managementgroups.models.CreateParentGroupInfo(
id=parent_id
)
create_mgmt_grp_details = self.sdk.managementgroups.models.CreateManagementGroupDetails(
parent=create_parent_grp_info
)
mgmt_grp_create = self.sdk.managementgroups.models.CreateManagementGroupRequest(
name=management_group_id,
display_name=display_name,
details=create_mgmt_grp_details,
)
create_request = mgmgt_group_client.management_groups.create_or_update(
management_group_id, mgmt_grp_create
)
def create_billing_owner(self, creds, tenant_admin_details):
# authenticate as tenant_admin
# create billing owner identity
# result is a synchronous wait, might need to do a poll instead to handle first mgmt group create
# since we were told it could take 10+ minutes to complete, unless this handles that polling internally
return create_request.result()
# TODO: Lookup response format
# Managed service identity?
response = {"id": "string"}
return self._ok({"billing_owner_id": response["id"]})
def _create_subscription(
self,
credentials,
display_name,
billing_profile_id,
sku_id,
management_group_id,
billing_account_name,
invoice_section_name,
):
sub_client = self.sdk.subscription.SubscriptionClient(credentials)
billing_profile_id = "?" # where do we source this?
sku_id = AZURE_SKU_ID
# These 2 seem like something that might be worthwhile to allow tiebacks to
# TOs filed for the environment
billing_account_name = "?" # from TO?
invoice_section_name = "?" # from TO?
body = self.sdk.subscription.models.ModernSubscriptionCreationParameters(
display_name=display_name,
billing_profile_id=billing_profile_id,
sku_id=sku_id,
management_group_id=management_group_id,
)
# We may also want to create billing sections in the enrollment account
sub_creation_operation = sub_client.subscription_factory.create_subscription(
billing_account_name, invoice_section_name, body
)
# the resulting object from this process is a link to the new subscription
# not a subscription model, so we'll have to unpack the ID
new_sub = sub_creation_operation.result()
subscription_id = self._extract_subscription_id(new_sub.subscription_link)
if subscription_id:
return subscription_id
else:
# troublesome error, subscription should exist at this point
# but we just don't have a valid ID
pass
AZURE_MANAGEMENT_API = "https://management.azure.com"
def _create_policy_definition(
self, credentials, subscription_id, management_group_id, properties,
):
def assign_billing_owner(self, creds, billing_owner_id, tenant_id):
# TODO: Do we source role definition ID from config, api or self-defined?
# TODO: If from api,
"""
Requires credentials that have AZURE_MANAGEMENT_API
specified as the resource. The Service Principal
specified in the credentials must have the "Resource
Policy Contributor" role assigned with a scope at least
as high as the management group specified by
management_group_id.
Arguments:
credentials -- ServicePrincipalCredentials
subscription_id -- str, ID of the subscription (just the UUID, not the path)
management_group_id -- str, ID of the management group (just the UUID, not the path)
properties -- dictionary, the "properties" section of a valid Azure policy definition document
Returns:
azure.mgmt.resource.policy.[api version].models.PolicyDefinition: the PolicyDefinition object provided to Azure
Raises:
TBD
{
"principalId": "string",
"principalTenantId": "string",
"billingRoleDefinitionId": "string"
}
"""
# TODO: which subscription would this be?
client = self.sdk.policy.PolicyClient(credentials, subscription_id)
definition = client.policy_definitions.models.PolicyDefinition(
policy_type=properties.get("policyType"),
mode=properties.get("mode"),
display_name=properties.get("displayName"),
description=properties.get("description"),
policy_rule=properties.get("policyRule"),
parameters=properties.get("parameters"),
return self.ok()
def create_billing_profile(self, creds, tenant_admin_details, billing_owner_id):
# call billing profile creation endpoint, specifying owner
# Payload:
"""
{
"displayName": "string",
"poNumber": "string",
"address": {
"firstName": "string",
"lastName": "string",
"companyName": "string",
"addressLine1": "string",
"addressLine2": "string",
"addressLine3": "string",
"city": "string",
"region": "string",
"country": "string",
"postalCode": "string"
},
"invoiceEmailOptIn": true,
Note: These last 2 are also the body for adding/updating new TOs/clins
"enabledAzurePlans": [
{
"skuId": "string"
}
],
"clinBudget": {
"amount": 0,
"startDate": "2019-12-18T16:47:40.909Z",
"endDate": "2019-12-18T16:47:40.909Z",
"externalReferenceId": "string"
}
}
"""
# response will be mostly the same as the body, but we only really care about the id
response = {"id": "string"}
return self._ok({"billing_profile_id": response["id"]})
def report_clin(self, creds, clin_id, clin_amount, clin_start, clin_end, clin_to):
# should consumer be responsible for reporting each clin or
# should this take a list and manage the sequential reporting?
""" Payload
{
"enabledAzurePlans": [
{
"skuId": "string"
}
],
"clinBudget": {
"amount": 0,
"startDate": "2019-12-18T16:47:40.909Z",
"endDate": "2019-12-18T16:47:40.909Z",
"externalReferenceId": "string"
}
}
"""
# we don't need any of the returned info for this
return self._ok()
def create_remote_admin(self, creds, tenant_details):
# create app/service principal within tenant, with name constructed from tenant details
# assign principal global admin
# needs to call out to CLI with tenant owner username/password, prototyping for that underway
# return identifier and creds to consumer for storage
response = {"clientId": "string", "secretKey": "string", "tenantId": "string"}
return self._ok(
{
"client_id": response["clientId"],
"secret_key": response["secret_key"],
"tenant_id": response["tenantId"],
}
)
name = properties.get("displayName")
def force_tenant_admin_pw_update(self, creds, tenant_owner_id):
# use creds to update to force password recovery?
# not sure what the endpoint/method for this is, yet
return client.policy_definitions.create_or_update_at_management_group(
policy_definition_name=name,
parameters=definition,
management_group_id=management_group_id,
)
return self._ok()
def create_billing_alerts(self, TBD):
# TODO: Add azure-mgmt-consumption for Budget and Notification entities/operations
# TODO: Determine how to auth against that API using the SDK, doesn't seeem possible at the moment
# TODO: billing alerts are registered as Notifications on Budget objects, which have start/end dates
# TODO: determine what the keys in the Notifications dict are supposed to be
# we may need to rotate budget objects when new TOs/CLINs are reported?
# we likely only want the budget ID, can be updated or replaced?
response = {"id": "id"}
return self._ok({"budget_id": response["id"]})
def _get_management_service_principal(self):
# we really should be using graph.microsoft.com, but i'm getting
@ -663,6 +838,7 @@ class AzureCloudProvider(CloudProviderInterface):
return sub_id_match.group(1)
def _get_credential_obj(self, creds, resource=None):
return self.sdk.credentials.ServicePrincipalCredentials(
client_id=creds.get("client_id"),
secret=creds.get("secret_key"),
@ -671,6 +847,27 @@ class AzureCloudProvider(CloudProviderInterface):
cloud_environment=self.sdk.cloud,
)
def _make_tenant_admin_cred_obj(self, username, password):
return self.sdk.credentials.UserPassCredentials(username, password)
def _ok(self, body=None):
return self._make_response("ok", body)
def _error(self, body=None):
return self._make_response("error", body)
def _make_response(self, status, body=dict()):
"""Create body for responses from API
Arguments:
status {string} -- "ok" or "error"
body {dict} -- dict containing details of response or error, if applicable
Returns:
dict -- status of call with body containing details
"""
return {"status": status, "body": body}
@property
def _root_creds(self):
return {

View File

@ -2,4 +2,5 @@ from .portfolios import (
Portfolios,
PortfolioError,
PortfolioDeletionApplicationsExistError,
PortfolioStateMachines,
)

View File

@ -1,11 +1,15 @@
from typing import List
from uuid import UUID
from atst.database import db
from atst.domain.permission_sets import PermissionSets
from atst.domain.authz import Authorization
from atst.domain.portfolio_roles import PortfolioRoles
from atst.domain.invitations import PortfolioInvitations
from atst.models import Permissions, PortfolioRole, PortfolioRoleStatus
from .query import PortfoliosQuery
from atst.domain.invitations import PortfolioInvitations
from atst.models import Portfolio, PortfolioStateMachine, FSMStates, Permissions, PortfolioRole, PortfolioRoleStatus
from .query import PortfoliosQuery, PortfolioStateMachinesQuery
from .scopes import ScopedPortfolio
@ -17,7 +21,24 @@ class PortfolioDeletionApplicationsExistError(Exception):
pass
class PortfolioStateMachines(object):
@classmethod
def create(cls, portfolio, **sm_attrs):
sm_attrs.update({'portfolio': portfolio})
sm = PortfolioStateMachinesQuery.create(**sm_attrs)
return sm
class Portfolios(object):
@classmethod
def get_or_create_state_machine(cls, portfolio):
"""
get or create Portfolio State Machine for a Portfolio
"""
return portfolio.state_machine or PortfolioStateMachines.create(portfolio)
@classmethod
def create(cls, user, portfolio_attrs):
portfolio = PortfoliosQuery.create(**portfolio_attrs)
@ -111,3 +132,40 @@ class Portfolios(object):
portfolio.description = new_data["description"]
PortfoliosQuery.add_and_commit(portfolio)
@classmethod
def base_provision_query(cls):
return (
db.session.query(Portfolio.id)
)
@classmethod
def get_portfolios_pending_provisioning(cls) -> List[UUID]:
"""
Any portfolio with a corresponding State Machine that is either:
not started yet,
failed in creating a tenant
failed
"""
results = (
cls.base_provision_query().\
join(PortfolioStateMachine).\
filter(
or_(
PortfolioStateMachine.state == FSMStates.UNSTARTED,
PortfolioStateMachine.state == FSMStates.FAILED,
PortfolioStateMachine.state == FSMStates.TENANT_CREATION_FAILED,
)
)
)
return [id_ for id_, in results]
#db.session.query(PortfolioStateMachine).\
# filter(
# or_(
# PortfolioStateMachine.state==FSMStates.UNSTARTED,
# PortfolioStateMachine.state==FSMStates.UNSTARTED,
# )
# ).all()

View File

@ -8,6 +8,12 @@ from atst.models.application_role import (
Status as ApplicationRoleStatus,
)
from atst.models.application import Application
from atst.models.portfolio_state_machine import PortfolioStateMachine
#from atst.models.application import Application
class PortfolioStateMachinesQuery(Query):
model = PortfolioStateMachine
class PortfoliosQuery(Query):

View File

@ -7,14 +7,28 @@ from atst.models import (
EnvironmentJobFailure,
EnvironmentRoleJobFailure,
EnvironmentRole,
PortfolioJobFailure,
FSMStates,
)
from atst.domain.csp.cloud import CloudProviderInterface, GeneralCSPException
from atst.domain.environments import Environments
from atst.domain.portfolios import Portfolios
from atst.domain.portfolios.query import PortfolioStateMachinesQuery
from atst.domain.environment_roles import EnvironmentRoles
from atst.models.utils import claim_for_update
from atst.utils.localization import translate
class RecordPortfolioFailure(celery.Task):
def on_failure(self, exc, task_id, args, kwargs, einfo):
if "portfolio_id" in kwargs:
failure = PortfolioJobFailure(
portfolio_id=kwargs["portfolio_id"], task_id=task_id
)
db.session.add(failure)
db.session.commit()
class RecordEnvironmentFailure(celery.Task):
def on_failure(self, exc, task_id, args, kwargs, einfo):
if "environment_id" in kwargs:
@ -50,6 +64,7 @@ def send_notification_mail(recipients, subject, body):
app.mailer.send(recipients, subject, body)
def do_create_environment(csp: CloudProviderInterface, environment_id=None):
environment = Environments.get(environment_id)
@ -125,6 +140,16 @@ def do_work(fn, task, csp, **kwargs):
raise task.retry(exc=e)
def do_provision_portfolio(csp: CloudProviderInterface, portfolio_id=None):
portfolio = Portfolios.get_for_update(portfolio_id)
fsm = Portfolios.get_or_create_state_machine(portfolio)
fsm.trigger_next_transition()
@celery.task(bind=True, base=RecordPortfolioFailure)
def provision_portfolio(self, portfolio_id=None):
do_work(do_provision_portfolio, self, app.csp.cloud, portfolio_id=portfolio_id)
@celery.task(bind=True, base=RecordEnvironmentFailure)
def create_environment(self, environment_id=None):
do_work(do_create_environment, self, app.csp.cloud, environment_id=environment_id)
@ -144,6 +169,15 @@ def provision_user(self, environment_role_id=None):
)
@celery.task(bind=True)
def dispatch_provision_portfolio(self):
"""
Iterate over portfolios with a corresponding State Machine that have not completed.
"""
for portfolio_id in Portfolios.get_portfolios_pending_provisioning():
provision_portfolio.delay(portfolio_id=portfolio_id)
@celery.task(bind=True)
def dispatch_create_environment(self):
for environment_id in Environments.get_environments_pending_creation(

View File

@ -7,11 +7,12 @@ from .audit_event import AuditEvent
from .clin import CLIN, JEDICLINType
from .environment import Environment
from .environment_role import EnvironmentRole, CSPRole
from .job_failure import EnvironmentJobFailure, EnvironmentRoleJobFailure
from .job_failure import EnvironmentJobFailure, EnvironmentRoleJobFailure, PortfolioJobFailure
from .notification_recipient import NotificationRecipient
from .permissions import Permissions
from .permission_set import PermissionSet
from .portfolio import Portfolio
from .portfolio_state_machine import PortfolioStateMachine, FSMStates
from .portfolio_invitation import PortfolioInvitation
from .portfolio_role import PortfolioRole, Status as PortfolioRoleStatus
from .task_order import TaskOrder

View File

@ -14,3 +14,9 @@ class EnvironmentRoleJobFailure(Base, mixins.JobFailureMixin):
__tablename__ = "environment_role_job_failures"
environment_role_id = Column(ForeignKey("environment_roles.id"), nullable=False)
class PortfolioJobFailure(Base, mixins.JobFailureMixin):
__tablename__ = "portfolio_job_failures"
portfolio_id = Column(ForeignKey("portfolios.id"), nullable=False)

View File

@ -4,3 +4,4 @@ from .permissions import PermissionsMixin
from .deletable import DeletableMixin
from .invites import InvitesMixin
from .job_failure import JobFailureMixin
from .state_machines import FSMMixin

View File

@ -0,0 +1,109 @@
from enum import Enum
from atst.database import db
class StageStates(Enum):
CREATED = "created"
IN_PROGRESS = "in progress"
FAILED = "failed"
class AzureStages(Enum):
TENANT = "tenant"
BILLING_PROFILE = "billing profile"
ADMIN_SUBSCRIPTION = "admin subscription"
def _build_csp_states(csp_stages):
states = {
'UNSTARTED' : "unstarted",
'STARTING' : "starting",
'STARTED' : "started",
'COMPLETED' : "completed",
'FAILED' : "failed",
}
for csp_stage in csp_stages:
for state in StageStates:
states[csp_stage.name+"_"+state.name] = csp_stage.value+" "+state.value
return states
FSMStates = Enum('FSMStates', _build_csp_states(AzureStages))
def _build_transitions(csp_stages):
transitions = []
states = []
compose_state = lambda csp_stage, state: getattr(FSMStates, "_".join([csp_stage.name, state.name]))
for stage_i, csp_stage in enumerate(csp_stages):
for state in StageStates:
states.append(dict(name=compose_state(csp_stage, state), tags=[csp_stage.name, state.name]))
if state == StageStates.CREATED:
if stage_i > 0:
src = compose_state(list(csp_stages)[stage_i-1] , StageStates.CREATED)
else:
src = FSMStates.STARTED
transitions.append(
dict(
trigger='create_'+csp_stage.name.lower(),
source=src,
dest=compose_state(csp_stage, StageStates.IN_PROGRESS),
after='after_in_progress_callback',
)
)
if state == StageStates.IN_PROGRESS:
transitions.append(
dict(
trigger='finish_'+csp_stage.name.lower(),
source=compose_state(csp_stage, state),
dest=compose_state(csp_stage, StageStates.CREATED),
conditions=['is_csp_data_valid'],
)
)
if state == StageStates.FAILED:
transitions.append(
dict(
trigger='fail_'+csp_stage.name.lower(),
source=compose_state(csp_stage, StageStates.IN_PROGRESS),
dest=compose_state(csp_stage, StageStates.FAILED),
)
)
return states, transitions
class FSMMixin():
system_states = [
{'name': FSMStates.UNSTARTED.name, 'tags': ['system']},
{'name': FSMStates.STARTING.name, 'tags': ['system']},
{'name': FSMStates.STARTED.name, 'tags': ['system']},
{'name': FSMStates.FAILED.name, 'tags': ['system']},
{'name': FSMStates.COMPLETED.name, 'tags': ['system']},
]
system_transitions = [
{'trigger': 'init', 'source': FSMStates.UNSTARTED, 'dest': FSMStates.STARTING},
{'trigger': 'start', 'source': FSMStates.STARTING, 'dest': FSMStates.STARTED},
{'trigger': 'reset', 'source': '*', 'dest': FSMStates.UNSTARTED},
{'trigger': 'fail', 'source': '*', 'dest': FSMStates.FAILED,}
]
def prepare_init(self, event): pass
def before_init(self, event): pass
def after_init(self, event): pass
def prepare_start(self, event): pass
def before_start(self, event): pass
def after_start(self, event): pass
def prepare_reset(self, event): pass
def before_reset(self, event): pass
def after_reset(self, event): pass
def fail_stage(self, stage):
fail_trigger = 'fail'+stage
if fail_trigger in self.machine.get_triggers(self.current_state.name):
self.trigger(fail_trigger)
def finish_stage(self, stage):
finish_trigger = 'finish_'+stage
if finish_trigger in self.machine.get_triggers(self.current_state.name):
self.trigger(finish_trigger)

View File

@ -11,6 +11,9 @@ from atst.domain.permission_sets import PermissionSets
from atst.utils import first_or_none
from atst.database import db
from sqlalchemy_json import NestedMutableJson
class Portfolio(
Base, mixins.TimestampsMixin, mixins.AuditableMixin, mixins.DeletableMixin
@ -19,16 +22,30 @@ class Portfolio(
id = types.Id()
name = Column(String, nullable=False)
description = Column(String)
defense_component = Column(
ARRAY(String), nullable=False
String, nullable=False
) # Department of Defense Component
app_migration = Column(String) # App Migration
complexity = Column(ARRAY(String)) # Application Complexity
complexity_other = Column(String)
description = Column(String)
dev_team = Column(ARRAY(String)) # Development Team
dev_team_other = Column(String)
native_apps = Column(String) # Native Apps
team_experience = Column(String) # Team Experience
csp_data = Column(NestedMutableJson, nullable=True)
applications = relationship(
"Application",
back_populates="portfolio",
primaryjoin="and_(Application.portfolio_id == Portfolio.id, Application.deleted == False)",
)
state_machine = relationship("PortfolioStateMachine",
uselist=False, back_populates="portfolio")
roles = relationship("PortfolioRole")
task_orders = relationship("TaskOrder")
@ -77,7 +94,7 @@ class Portfolio(
"""
Return the earliest period of performance start date and latest period
of performance end date for all active task orders in a portfolio.
@return: (datetime.date or None, datetime.date or None)
@return: (datetime.date or None, datetime.date or None)
"""
start_dates = (
task_order.start_date

View File

@ -0,0 +1,182 @@
import importlib
from sqlalchemy import Column, ForeignKey, Enum as SQLAEnum
from sqlalchemy.orm import relationship, reconstructor
from sqlalchemy.dialects.postgresql import UUID
from pydantic import ValidationError as PydanticValidationError
from transitions import Machine
from transitions.extensions.states import add_state_features, Tags
from flask import current_app as app
from atst.domain.csp.cloud import ConnectionException, UnknownServerException
from atst.domain.csp import MockCSP, AzureCSP, get_stage_csp_class
from atst.database import db
from atst.queue import celery
from atst.models.types import Id
from atst.models.base import Base
import atst.models.mixins as mixins
from atst.models.mixins.state_machines import (
FSMStates, AzureStages, _build_transitions
)
@add_state_features(Tags)
class StateMachineWithTags(Machine):
pass
class PortfolioStateMachine(
Base, mixins.TimestampsMixin, mixins.AuditableMixin, mixins.DeletableMixin, mixins.FSMMixin,
):
__tablename__ = "portfolio_state_machines"
id = Id()
portfolio_id = Column(
UUID(as_uuid=True),
ForeignKey("portfolios.id"),
)
portfolio = relationship("Portfolio", back_populates="state_machine")
state = Column(
SQLAEnum(FSMStates, native_enum=False, create_constraint=False),
default=FSMStates.UNSTARTED, nullable=False
)
def __init__(self, portfolio, csp=None, **kwargs):
self.portfolio = portfolio
self.attach_machine()
def after_state_change(self, event):
db.session.add(self)
db.session.commit()
@reconstructor
def attach_machine(self):
"""
This is called as a result of a sqlalchemy query.
Attach a machine depending on the current state.
"""
self.machine = StateMachineWithTags(
model = self,
send_event=True,
initial=self.current_state if self.state else FSMStates.UNSTARTED,
auto_transitions=False,
after_state_change='after_state_change',
)
states, transitions = _build_transitions(AzureStages)
self.machine.add_states(self.system_states+states)
self.machine.add_transitions(self.system_transitions+transitions)
@property
def current_state(self):
if isinstance(self.state, str):
return getattr(FSMStates, self.state)
return self.state
def trigger_next_transition(self):
state_obj = self.machine.get_state(self.state)
if state_obj.is_system:
if self.current_state in (FSMStates.UNSTARTED, FSMStates.STARTING):
# call the first trigger availabe for these two system states
trigger_name = self.machine.get_triggers(self.current_state.name)[0]
self.trigger(trigger_name)
elif self.current_state == FSMStates.STARTED:
# get the first trigger that starts with 'create_'
create_trigger = list(filter(lambda trigger: trigger.startswith('create_'),
self.machine.get_triggers(FSMStates.STARTED.name)))[0]
self.trigger(create_trigger)
elif state_obj.is_IN_PROGRESS:
pass
#elif state_obj.is_TENANT:
# pass
#elif state_obj.is_BILLING_PROFILE:
# pass
#@with_payload
def after_in_progress_callback(self, event):
stage = self.current_state.name.split('_IN_PROGRESS')[0].lower()
if stage == 'tenant':
payload = dict(
creds={"username": "mock-cloud", "pass": "shh"},
user_id='123',
password='123',
domain_name='123',
first_name='john',
last_name='doe',
country_code='US',
password_recovery_email_address='password@email.com'
)
elif stage == 'billing_profile':
payload = dict(
creds={"username": "mock-cloud", "pass": "shh"},
)
payload_data_cls = get_stage_csp_class(stage, "payload")
if not payload_data_cls:
self.fail_stage(stage)
try:
payload_data = payload_data_cls(**payload)
except PydanticValidationError as exc:
print(exc.json())
self.fail_stage(stage)
csp = event.kwargs.get('csp')
if csp is not None:
self.csp = AzureCSP(app).cloud
else:
self.csp = MockCSP(app).cloud
for attempt in range(5):
try:
response = getattr(self.csp, 'create_'+stage)(payload_data)
except (ConnectionException, UnknownServerException) as exc:
print('caught exception. retry', attempt)
continue
else: break
else:
# failed all attempts
self.fail_stage(stage)
if self.portfolio.csp_data is None:
self.portfolio.csp_data = {}
self.portfolio.csp_data[stage+"_data"] = response
db.session.add(self.portfolio)
db.session.commit()
self.finish_stage(stage)
def is_csp_data_valid(self, event):
# check portfolio csp details json field for fields
if self.portfolio.csp_data is None or \
not isinstance(self.portfolio.csp_data, dict):
return False
stage = self.current_state.name.split('_IN_PROGRESS')[0].lower()
stage_data = self.portfolio.csp_data.get(stage+"_data")
cls = get_stage_csp_class(stage, "result")
if not cls:
return False
try:
cls(**stage_data)
except PydanticValidationError as exc:
print(exc.json())
return False
return True
#print('failed condition', self.portfolio.csp_data)
@property
def application_id(self):
return None

View File

@ -7,6 +7,10 @@ celery = Celery(__name__)
def update_celery(celery, app):
celery.conf.update(app.config)
celery.conf.CELERYBEAT_SCHEDULE = {
"beat-dispatch_provision_portfolio": {
"task": "atst.jobs.dispatch_provision_portfolio",
"schedule": 60,
},
"beat-dispatch_create_environment": {
"task": "atst.jobs.dispatch_create_environment",
"schedule": 60,

View File

@ -6,4 +6,6 @@ app = make_app(make_config())
ctx = app.app_context()
ctx.push()
print("\nWelcome to atst. This shell has all models in scope, and a SQLAlchemy session called db.")
print(
"\nWelcome to atst. This shell has all models in scope, and a SQLAlchemy session called db."
)

View File

@ -0,0 +1,38 @@
import pytest
from tests.factories import (
PortfolioFactory,
PortfolioStateMachineFactory,
)
from atst.models import FSMStates
@pytest.fixture(scope="function")
def portfolio():
portfolio = PortfolioFactory.create()
return portfolio
def test_fsm_creation(portfolio):
sm = PortfolioStateMachineFactory.create(portfolio=portfolio)
assert sm.portfolio
def test_fsm_transition_start(portfolio):
sm = PortfolioStateMachineFactory.create(portfolio=portfolio)
assert sm.portfolio
assert sm.state == FSMStates.UNSTARTED
# next_state does not create the trigger callbacks !!!
#sm.next_state()
sm.init()
assert sm.state == FSMStates.STARTING
sm.start()
assert sm.state == FSMStates.STARTED
#import ipdb;ipdb.set_trace()
sm.create_tenant(a=1, b=2)
assert sm.state == FSMStates.TENANT_CREATED

View File

@ -1,5 +1,4 @@
import pytest
import random
from uuid import uuid4
from atst.domain.exceptions import NotFoundError, UnauthorizedError
@ -15,6 +14,7 @@ from atst.domain.environments import Environments
from atst.domain.permission_sets import PermissionSets, PORTFOLIO_PERMISSION_SETS
from atst.models.application_role import Status as ApplicationRoleStatus
from atst.models.portfolio_role import Status as PortfolioRoleStatus
from atst.models import FSMStates
from tests.factories import (
ApplicationFactory,
@ -22,6 +22,7 @@ from tests.factories import (
UserFactory,
PortfolioRoleFactory,
PortfolioFactory,
PortfolioStateMachineFactory,
get_all_portfolio_permission_sets,
)
@ -98,7 +99,7 @@ def test_scoped_portfolio_returns_all_applications_for_portfolio_admin(
Applications.create(
portfolio.owner,
portfolio,
"My Application %s" % (random.randrange(1, 1000)),
"My Application",
"My application",
["dev", "staging", "prod"],
)
@ -121,7 +122,7 @@ def test_scoped_portfolio_returns_all_applications_for_portfolio_owner(
Applications.create(
portfolio.owner,
portfolio,
"My Application %s" % (random.randrange(1, 1000)),
"My Application",
"My application",
["dev", "staging", "prod"],
)
@ -254,3 +255,17 @@ def test_for_user_does_not_include_deleted_application_roles():
status=ApplicationRoleStatus.ACTIVE, user=user2, application=app, deleted=True
)
assert len(Portfolios.for_user(user2)) == 0
def test_create_state_machine(portfolio):
fsm = Portfolios.create_state_machine(portfolio)
assert fsm
def test_get_portfolios_pending_provisioning(session):
for x in range(5):
portfolio = PortfolioFactory.create()
sm = PortfolioStateMachineFactory.create(portfolio=portfolio)
if x == 2: sm.state = FSMStates.COMPLETED
assert len(Portfolios.get_portfolios_pending_provisioning()) == 4

View File

@ -342,3 +342,16 @@ class NotificationRecipientFactory(Base):
model = NotificationRecipient
email = factory.Faker("email")
class PortfolioStateMachineFactory(Base):
class Meta:
model = PortfolioStateMachine
portfolio = factory.SubFactory(PortfolioFactory)
@classmethod
def _create(cls, model_class, *args, **kwargs):
portfolio = kwargs.pop("portfolio", PortfolioFactory.create())
kwargs.update({'portfolio': portfolio})
fsm = super()._create(model_class, *args, **kwargs)
return fsm

View File

@ -5,16 +5,18 @@ from unittest.mock import Mock
from threading import Thread
from atst.domain.csp.cloud import MockCloudProvider
from atst.domain.portfolios import Portfolios
from atst.jobs import (
RecordEnvironmentFailure,
RecordEnvironmentRoleFailure,
do_create_environment,
do_create_atat_admin_user,
dispatch_create_environment,
dispatch_create_atat_admin_user,
create_environment,
dispatch_provision_portfolio,
dispatch_provision_user,
create_environment,
do_provision_user,
do_provision_portfolio,
)
from atst.models.utils import claim_for_update
from atst.domain.exceptions import ClaimFailedException
@ -22,6 +24,7 @@ from tests.factories import (
EnvironmentFactory,
EnvironmentRoleFactory,
PortfolioFactory,
PortfolioStateMachineFactory,
ApplicationRoleFactory,
)
from atst.models import CSPRole, EnvironmentRole, ApplicationRoleStatus
@ -31,6 +34,11 @@ from atst.models import CSPRole, EnvironmentRole, ApplicationRoleStatus
def csp():
return Mock(wraps=MockCloudProvider({}, with_delay=False, with_failure=False))
@pytest.fixture(scope="function")
def portfolio():
portfolio = PortfolioFactory.create()
return portfolio
def test_environment_job_failure(celery_app, celery_worker):
@celery_app.task(bind=True, base=RecordEnvironmentFailure)
@ -248,6 +256,7 @@ def test_claim_for_update(session):
def test_dispatch_provision_user(csp, session, celery_app, celery_worker, monkeypatch):
# Given that I have four environment roles:
# (A) one of which has a completed status
# (B) one of which has an environment that has not been provisioned
@ -306,3 +315,22 @@ def test_do_provision_user(csp, session):
)
# I expect that the EnvironmentRole now has a csp_user_id
assert environment_role.csp_user_id
def test_dispatch_provision_portfolio(csp, session, portfolio, celery_app, celery_worker, monkeypatch):
sm = PortfolioStateMachineFactory.create(portfolio=portfolio)
mock = Mock()
monkeypatch.setattr("atst.jobs.provision_portfolio", mock)
dispatch_provision_portfolio.run()
mock.delay.assert_called_once_with(portfolio_id=portfolio.id)
def test_do_provision_portfolio(csp, session, portfolio):
do_provision_portfolio(csp=csp, portfolio_id=portfolio.id)
session.refresh(portfolio)
assert portfolio.state_machine
def test_provision_portfolio_create_tenant(csp, session, portfolio, celery_app, celery_worker, monkeypatch):
sm = PortfolioStateMachineFactory.create(portfolio=portfolio)
#mock = Mock()
#monkeypatch.setattr("atst.jobs.provision_portfolio", mock)
#dispatch_provision_portfolio.run()
#mock.delay.assert_called_once_with(portfolio_id=portfolio.id)