diff --git a/Dockerfile b/Dockerfile index 40f3468bf..2a1ec56c0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -20,7 +20,7 @@ COPY . ./ ENV APP_HOST=0.0.0.0 ENV APP_PORT=80 -ENV RELOAD="" +ENV RELOAD="true" ENTRYPOINT ["pipenv", "run"] CMD if [ "$RELOAD" ]; then uvicorn stac_api.app:app --host=${APP_HOST} --port=${APP_PORT} --reload ; \ diff --git a/Pipfile.lock b/Pipfile.lock index 087a50acd..fa9fd222f 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -45,52 +45,52 @@ }, "boto3": { "hashes": [ - "sha256:2fd3c2f42006988dc8ddae43c988aea481d11e2af7ab1deb83b293640357986c", - "sha256:4a499cc2f53dd557a88c6db6a552748a2abd83ffeda70ceb71dc8db39a027314" + "sha256:550a513315194292651bb6cc96e94185bfc4dc6b299c3cf1594882bdd16b3905", + "sha256:f8a2f0bf929af92c4d254d1e495f6642dd335818cc7172e1bdc3dfe28655fb94" ], - "version": "==1.16.57" + "version": "==1.16.59" }, "botocore": { "hashes": [ - "sha256:c756d65ffa989c5c0e92178175e41abf7b18ad19b2fe2e82e192f085e264e03a", - "sha256:cf7d108a4d67a0fe670379111927b5d9e0ff1160146c81c326bb9e54c2b8cb19" + "sha256:33959aa19cb6d336c47495c871b00d8670de0023b53bbbbd25790ba0bc5cefe9", + "sha256:67d273b5dcc5033edb2def244ecab51ca24351becf5c1644de279e5653e4e932" ], - "version": "==1.19.57" + "version": "==1.19.59" }, "brotli": { "hashes": [ - "sha256:1c48472a6ba3b113452355b9af0a60da5c2ae60477f8feda8346f8fd48e3e87c", - "sha256:c83aa123d56f2e060644427a882a36b3c12db93727ad7a7b9efd7d7f3e9cc2c4", - "sha256:3b78a24b5fd13c03ee2b7b86290ed20efdc95da75a3557cc06811764d5ad1126", - "sha256:93130612b837103e15ac3f9cbacb4613f9e348b58b3aad53721d92e57f96d46a", - "sha256:f909bbbc433048b499cb9db9e713b5d8d949e8c109a2a548502fb9aa8630f0b1", + "sha256:160c78292e98d21e73a4cc7f76a234390e516afcd982fa17e1422f7c6a9ce9c8", "sha256:16d528a45c2e1909c2798f27f7bf0a3feec1dc9e50948e738b961618e38b6a7b", - "sha256:db844eb158a87ccab83e868a762ea8024ae27337fc7ddcbfcddd157f841fdfe7", - "sha256:5913a1177fc36e30fcf6dc868ce23b0453952c78c04c266d3149b3d39e1410d6", - "sha256:afde17ae04d90fbe53afb628f7f2d4ca022797aa093e809de5c3cf276f61bbfa", - "sha256:b663f1e02de5d0573610756398e44c130add0eb9a3fc912a09665332942a2efb", - "sha256:cfc391f4429ee0a9370aa93d812a52e1fee0f37a81861f4fdd1f4fb28e8547c3", - "sha256:defed7ea5f218a9f2336301e6fd379f55c655bea65ba2476346340a0ce6f74a1", - "sha256:35a3edbe18e876e596553c4007a087f8bcfd538f19bc116917b3c7522fca0429", - "sha256:c2415d9d082152460f2bd4e382a1e85aed233abc92db5a3880da2257dc7daf7b", - "sha256:97f715cf371b16ac88b8c19da00029804e20e25f30d80203417255d239f228b5", - "sha256:9d12cf2851759b8de8ca5fde36a59c08210a97ffca0eb94c532ce7b17c6a3d1d", + "sha256:1c48472a6ba3b113452355b9af0a60da5c2ae60477f8feda8346f8fd48e3e87c", + "sha256:268fe94547ba25b58ebc724680609c8ee3e5a843202e9a381f6f9c5e8bdb5c70", "sha256:269a5743a393c65db46a7bb982644c67ecba4b8d91b392403ad8a861ba6f495f", - "sha256:56d027eace784738457437df7331965473f2c0da2c70e1a1f6fdbae5402e0389", + "sha256:35a3edbe18e876e596553c4007a087f8bcfd538f19bc116917b3c7522fca0429", + "sha256:3b78a24b5fd13c03ee2b7b86290ed20efdc95da75a3557cc06811764d5ad1126", + "sha256:40d15c79f42e0a2c72892bf407979febd9cf91f36f495ffb333d1d04cebb34e4", + "sha256:4d1b810aa0ed773f81dceda2cc7b403d01057458730e309856356d4ef4188438", "sha256:503fa6af7da9f4b5780bb7e4cbe0c639b010f12be85d02c99452825dd0feef3f", - "sha256:68715970f16b6e92c574c30747c95cf8cf62804569647386ff032195dc89a430", - "sha256:854c33dad5ba0fbd6ab69185fec8dab89e13cda6b7d191ba111987df74f38761", - "sha256:268fe94547ba25b58ebc724680609c8ee3e5a843202e9a381f6f9c5e8bdb5c70", - "sha256:88c63a1b55f352b02c6ffd24b15ead9fc0e8bf781dbe070213039324922a2eea", - "sha256:7cb81373984cc0e4682f31bc3d6be9026006d96eecd07ea49aafb06897746452", - "sha256:160c78292e98d21e73a4cc7f76a234390e516afcd982fa17e1422f7c6a9ce9c8", - "sha256:61a7ee1f13ab913897dac7da44a73c6d44d48a4adff42a5701e3239791c96e14", + "sha256:56d027eace784738457437df7331965473f2c0da2c70e1a1f6fdbae5402e0389", + "sha256:5913a1177fc36e30fcf6dc868ce23b0453952c78c04c266d3149b3d39e1410d6", "sha256:5b6ef7d9f9c38292df3690fe3e302b5b530999fa90014853dcd0d6902fb59f26", "sha256:5cb1e18167792d7d21e21365d7650b72d5081ed476123ff7b8cac7f45189c0c7", + "sha256:61a7ee1f13ab913897dac7da44a73c6d44d48a4adff42a5701e3239791c96e14", + "sha256:68715970f16b6e92c574c30747c95cf8cf62804569647386ff032195dc89a430", "sha256:6b2ae9f5f67f89aade1fab0f7fd8f2832501311c363a21579d02defa844d9296", - "sha256:40d15c79f42e0a2c72892bf407979febd9cf91f36f495ffb333d1d04cebb34e4", "sha256:6c772d6c0a79ac0f414a9f8947cc407e119b8598de7621f39cacadae3cf57d12", - "sha256:4d1b810aa0ed773f81dceda2cc7b403d01057458730e309856356d4ef4188438" + "sha256:7cb81373984cc0e4682f31bc3d6be9026006d96eecd07ea49aafb06897746452", + "sha256:854c33dad5ba0fbd6ab69185fec8dab89e13cda6b7d191ba111987df74f38761", + "sha256:88c63a1b55f352b02c6ffd24b15ead9fc0e8bf781dbe070213039324922a2eea", + "sha256:93130612b837103e15ac3f9cbacb4613f9e348b58b3aad53721d92e57f96d46a", + "sha256:97f715cf371b16ac88b8c19da00029804e20e25f30d80203417255d239f228b5", + "sha256:9d12cf2851759b8de8ca5fde36a59c08210a97ffca0eb94c532ce7b17c6a3d1d", + "sha256:afde17ae04d90fbe53afb628f7f2d4ca022797aa093e809de5c3cf276f61bbfa", + "sha256:b663f1e02de5d0573610756398e44c130add0eb9a3fc912a09665332942a2efb", + "sha256:c2415d9d082152460f2bd4e382a1e85aed233abc92db5a3880da2257dc7daf7b", + "sha256:c83aa123d56f2e060644427a882a36b3c12db93727ad7a7b9efd7d7f3e9cc2c4", + "sha256:cfc391f4429ee0a9370aa93d812a52e1fee0f37a81861f4fdd1f4fb28e8547c3", + "sha256:db844eb158a87ccab83e868a762ea8024ae27337fc7ddcbfcddd157f841fdfe7", + "sha256:defed7ea5f218a9f2336301e6fd379f55c655bea65ba2476346340a0ce6f74a1", + "sha256:f909bbbc433048b499cb9db9e713b5d8d949e8c109a2a548502fb9aa8630f0b1" ], "version": "==1.0.9" }, @@ -103,11 +103,11 @@ }, "cachetools": { "hashes": [ - "sha256:3796e1de094f0eaca982441c92ce96c68c89cced4cd97721ab297ea4b16db90e", - "sha256:c6b07a6ded8c78bf36730b3dc452dfff7d95f2a12a2fed856b1a0cb13ca78c61" + "sha256:1d9d5f567be80f7c07d765e21b814326d78c61eb0c3a637dffc0e5d1796cb2e2", + "sha256:f469e29e7aa4cff64d8de4aad95ce76de8ea1125a16c68e0d93f65c3c3dc92e9" ], "markers": "python_version ~= '3.5'", - "version": "==4.2.0" + "version": "==4.2.1" }, "certifi": { "hashes": [ @@ -177,6 +177,14 @@ "markers": "python_version >= '3.6'", "version": "==0.63.0" }, + "fastapi-utils": { + "hashes": [ + "sha256:0e6c7fc1870b80e681494957abf65d4f4f42f4c7f70005918e9181b22f1bd759", + "sha256:dd0be7dc7f03fa681b25487a206651d99f2330d5a567fb8ab6cb5f8a06a29360" + ], + "markers": "python_version >= '3.6' and python_version < '4.0'", + "version": "==0.2.1" + }, "geoalchemy2": { "hashes": [ "sha256:379b0fc4ca5f9b5ef625719f47e22c9b8abd347aa78344e85f99d32594cfccd4", @@ -400,6 +408,14 @@ "markers": "python_version >= '3.6'", "version": "==1.19.5" }, + "packaging": { + "hashes": [ + "sha256:24e0da08660a87484d1602c30bb4902d74816b6985b93de36926f5bc95741858", + "sha256:78598185a7008a470d64526a8059de9aaa449238f280fc9eb6b13ba6c4109093" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.8" + }, "psycopg2-binary": { "hashes": [ "sha256:0deac2af1a587ae12836aa07970f5cb91964f05a7c6cdb69d8425ff4c15d4e2c", @@ -474,25 +490,26 @@ }, "pygeos": { "hashes": [ - "sha256:0df97a1cc15fef561c36596f065cac8e05e1b0de08292ad7a07140a5fa6b4015", - "sha256:1e95c79e34abad166824f3e0bf2412c9ea9873fcf51e0cacfb483f8955505aec", - "sha256:35b36a819a663a09f8585c6a67fc7c016004769ae08496b6de10b176e7ba61e1", - "sha256:36e2b60a987b2ce0ce988a334f2afa8925864456dec2c03891af8fd900a89504", - "sha256:45b7e1aaa5fc9ff53565ef089fb75c53c419ace8cee18385ec1d7c1515c17cbc", - "sha256:5294e7c8e732325a1f7d370ba0fea1faabc6d14926fbbb2b67a4ae618ed47ed2", - "sha256:650e56de79eceb75b216c3c08c6d703680d845395f5ea9fd67201bfa8240f6a0", - "sha256:6585a6adaad8ad3ebcf67e7854211fe9a2bc9beb59dc945b60db6ad40a3a9bd6", - "sha256:696f2106b7c4a4f33b813cac141cb8483d420d4209f8e1dbde94e5e572cdac16", - "sha256:914fe40a9da4a4bd280448f173b48e9232aa652d5ff38e23c7039633fe57e92f", - "sha256:92bae740bc2ccbb9d03f86995e936ff5a6e41b19a9da0499e8985e6e940a7a93", - "sha256:97a16e1ea243ac65176890d642597276c2c413ba8442a8b4f52cbe1b24ba5f68", - "sha256:c2bcf0aa4ecad93617d35072230ec9215ca6f9537bf75f959cd4084117269369", - "sha256:e1ecb3d9edf56eb55208f8d9ff0d314926af3b0527e024172f2fe142b046f4ea", - "sha256:efb21aa3b01957c21237aaffbecfabc76f6411a9b7c75d359f1b9eb2d7239181", - "sha256:ff14eff6248b65b46481f06d4141b7b0dd9e3a0a6b9bf10b1a2c0eddccc94ad6" + "sha256:006aee5215d305afa96ce3e930a5fc00cfe9cc5e7c79f194922cd2eeb5547e2a", + "sha256:3eaee0e9f9fcc7b6370bcfe839b15fcaa93dab3c894681df3e3437692decc4f1", + "sha256:3f9126534e9bc88e872eae267032d0fab35f351d469caf6ffef4e1002bad4550", + "sha256:425221deda0e355166fcf7ce69613de4a52d6cff6ca667ccd1dc446fbad097c4", + "sha256:4341501c725c8945319970f1e572a514719a17ddec6571d319561bb4ae6edf17", + "sha256:531661dc547b0a9e03cf3cd8166a11d6a0e8cb3ecb71b8e6b240f6f28314023e", + "sha256:82ddf8a6f4ea93d952678e9523f08debea5361d908490b25ac5fd46ed712a652", + "sha256:845567e866dbd8821675dce32d63bc9e6aa3a0384e8d5a7f259baf3bafbfd0c9", + "sha256:8e5b246dd7f1c5c554245a545424cac226443babfd7d6ef71a02b5635da9e8da", + "sha256:aaa10b3a29a6fce9036f80ccfdf96ddb75575ecef8f0dbdbb0a8c456a3793084", + "sha256:b17fe2ed942e3b83c4afb4d0bb5870ac21a26f8353b137444f49a6b6beb71d2f", + "sha256:b55e9bb4e03a30b809bdda102574d7e09e79907fe4f09c5b53be8c47585e1185", + "sha256:bbf661e2d2aa1fcbe10b4f6d739384a4f58e1006d51ef8d0e577fc62618aec16", + "sha256:bc017fe4521577f8a4f62baf6c4f0882b8c8a0b0fbf65befe5060b1921dfa80e", + "sha256:c0584b20e95f80ee57277a6eb1e5d7f86600f8b1ef3c627d238e243afdcc0cc7", + "sha256:c3d127091b18d98cd460cc5b20e2acabcf74668e8170146f40a158c01bbad055", + "sha256:fae860732c532697f3708e8da669e057ce6692ed88740f493a522c5ed2f87aac" ], "markers": "python_version >= '3'", - "version": "==0.8" + "version": "==0.9" }, "pyparsing": { "hashes": [ @@ -536,29 +553,29 @@ }, "pyyaml": { "hashes": [ - "sha256:02c78d77281d8f8d07a255e57abdbf43b02257f59f50cc6b636937d68efa5dd0", - "sha256:0dc9f2eb2e3c97640928dec63fd8dc1dd91e6b6ed236bd5ac00332b99b5c2ff9", - "sha256:124fd7c7bc1e95b1eafc60825f2daf67c73ce7b33f1194731240d24b0d1bf628", - "sha256:26fcb33776857f4072601502d93e1a619f166c9c00befb52826e7b774efaa9db", - "sha256:31ba07c54ef4a897758563e3a0fcc60077698df10180abe4b8165d9895c00ebf", - "sha256:3c49e39ac034fd64fd576d63bb4db53cda89b362768a67f07749d55f128ac18a", - "sha256:52bf0930903818e600ae6c2901f748bc4869c0c406056f679ab9614e5d21a166", - "sha256:5a3f345acff76cad4aa9cb171ee76c590f37394186325d53d1aa25318b0d4a09", - "sha256:5e7ac4e0e79a53451dc2814f6876c2fa6f71452de1498bbe29c0b54b69a986f4", - "sha256:7242790ab6c20316b8e7bb545be48d7ed36e26bbe279fd56f2c4a12510e60b4b", - "sha256:737bd70e454a284d456aa1fa71a0b429dd527bcbf52c5c33f7c8eee81ac16b89", - "sha256:8635d53223b1f561b081ff4adecb828fd484b8efffe542edcfdff471997f7c39", - "sha256:8b818b6c5a920cbe4203b5a6b14256f0e5244338244560da89b7b0f1313ea4b6", - "sha256:8bf38641b4713d77da19e91f8b5296b832e4db87338d6aeffe422d42f1ca896d", - "sha256:a36a48a51e5471513a5aea920cdad84cbd56d70a5057cca3499a637496ea379c", - "sha256:b2243dd033fd02c01212ad5c601dafb44fbb293065f430b0d3dbf03f3254d615", - "sha256:cc547d3ead3754712223abb7b403f0a184e4c3eae18c9bb7fd15adef1597cc4b", - "sha256:cc552b6434b90d9dbed6a4f13339625dc466fd82597119897e9489c953acbc22", - "sha256:f3790156c606299ff499ec44db422f66f05a7363b39eb9d5b064f17bd7d7c47b", - "sha256:f7a21e3d99aa3095ef0553e7ceba36fb693998fbb1226f1392ce33681047465f", - "sha256:fdc6b2cb4b19e431994f25a9160695cc59a4e861710cc6fc97161c5e845fc579" - ], - "version": "==5.4" + "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", + "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", + "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", + "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", + "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", + "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", + "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", + "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", + "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", + "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", + "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", + "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", + "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", + "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", + "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", + "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", + "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", + "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", + "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", + "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", + "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc" + ], + "version": "==5.4.1" }, "rasterio": { "hashes": [ @@ -645,28 +662,28 @@ }, "shapely": { "hashes": [ + "sha256:052eb5b9ba756808a7825e8a8020fb146ec489dd5c919e7d139014775411e688", + "sha256:1641724c1055459a7e2b8bbe47ba25bdc89554582e62aec23cb3f3ca25f9b129", + "sha256:17df66e87d0fe0193910aeaa938c99f0b04f67b430edb8adae01e7be557b141b", + "sha256:182716ffb500d114b5d1b75d7fd9d14b7d3414cef3c38c0490534cc9ce20981a", + "sha256:2df5260d0f2983309776cb41bfa85c464ec07018d88c0ecfca23d40bfadae2f1", + "sha256:35be1c5d869966569d3dfd4ec31832d7c780e9df760e1fe52131105685941891", "sha256:46da0ea527da9cf9503e66c18bab6981c5556859e518fe71578b47126e54ca93", - "sha256:b40cc7bb089ae4aa9ddba1db900b4cd1bce3925d2a4b5837b639e49de054784f", - "sha256:a3774516c8a83abfd1ddffb8b6ec1b0935d7fe6ea0ff5c31a18bfdae567b4eba", - "sha256:a5c3a50d823c192f32615a2a6920e8c046b09e07a58eba220407335a9cd2e8ea", - "sha256:90a3e2ae0d6d7d50ff2370ba168fbd416a53e7d8448410758c5d6a5920646c1d", + "sha256:4c10f317e379cc404f8fc510cd9982d5d3e7ba13a9cfd39aa251d894c6366798", + "sha256:4f3c59f6dbf86a9fc293546de492f5e07344e045f9333f3a753f2dda903c45d1", "sha256:60e5b2282619249dbe8dc5266d781cc7d7fb1b27fa49f8241f2167672ad26719", - "sha256:e3afccf0437edc108eef1e2bb9cc4c7073e7705924eb4cd0bf7715cd1ef0ce1b", + "sha256:6593026cd3f5daaea12bcc51ae5c979318070fefee210e7990cb8ac2364e79a1", + "sha256:6871acba8fbe744efa4f9f34e726d070bfbf9bffb356a8f6d64557846324232b", "sha256:791477edb422692e7dc351c5ed6530eb0e949a31b45569946619a0d9cd5f53cb", - "sha256:4f3c59f6dbf86a9fc293546de492f5e07344e045f9333f3a753f2dda903c45d1", - "sha256:de618e67b64a51a0768d26a9963ecd7d338a2cf6e9e7582d2385f88ad005b3d1", - "sha256:182716ffb500d114b5d1b75d7fd9d14b7d3414cef3c38c0490534cc9ce20981a", + "sha256:8e7659dd994792a0aad8fb80439f59055a21163e236faf2f9823beb63a380e19", "sha256:8f15b6ce67dcc05b61f19c689b60f3fe58550ba994290ff8332f711f5aaa9840", + "sha256:90a3e2ae0d6d7d50ff2370ba168fbd416a53e7d8448410758c5d6a5920646c1d", + "sha256:a3774516c8a83abfd1ddffb8b6ec1b0935d7fe6ea0ff5c31a18bfdae567b4eba", + "sha256:a5c3a50d823c192f32615a2a6920e8c046b09e07a58eba220407335a9cd2e8ea", + "sha256:b40cc7bb089ae4aa9ddba1db900b4cd1bce3925d2a4b5837b639e49de054784f", "sha256:da38ed3d65b8091447dc3717e5218cc336d20303b77b0634b261bc5c1aa2bae8", - "sha256:1641724c1055459a7e2b8bbe47ba25bdc89554582e62aec23cb3f3ca25f9b129", - "sha256:6871acba8fbe744efa4f9f34e726d070bfbf9bffb356a8f6d64557846324232b", - "sha256:8e7659dd994792a0aad8fb80439f59055a21163e236faf2f9823beb63a380e19", - "sha256:4c10f317e379cc404f8fc510cd9982d5d3e7ba13a9cfd39aa251d894c6366798", - "sha256:6593026cd3f5daaea12bcc51ae5c979318070fefee210e7990cb8ac2364e79a1", - "sha256:35be1c5d869966569d3dfd4ec31832d7c780e9df760e1fe52131105685941891", - "sha256:052eb5b9ba756808a7825e8a8020fb146ec489dd5c919e7d139014775411e688", - "sha256:17df66e87d0fe0193910aeaa938c99f0b04f67b430edb8adae01e7be557b141b", - "sha256:2df5260d0f2983309776cb41bfa85c464ec07018d88c0ecfca23d40bfadae2f1" + "sha256:de618e67b64a51a0768d26a9963ecd7d338a2cf6e9e7582d2385f88ad005b3d1", + "sha256:e3afccf0437edc108eef1e2bb9cc4c7073e7705924eb4cd0bf7715cd1ef0ce1b" ], "version": "==1.7.1" }, @@ -687,10 +704,11 @@ }, "sqlakeyset": { "hashes": [ - "sha256:933b3fe8e97d4648ce85e31df9d97c461f20d72105317208b3d2af39ffedb57b", - "sha256:ecc9abcf4e4e681c13ceea4fa45883bb5ebe8f43925f63bbd29ad37c6b34ba26" + "sha256:0a42aa35fb6ef4e92d1db24c02e169e8bf01e96566f4c0adc605896969f9ec97", + "sha256:6b9f5e1ff0a556e7dc0b5916d7ce762c6a3a6f9537f481d91d6a2c5818fac0fa" ], - "version": "==1.0.1602495765" + "markers": "python_version >= '3.4'", + "version": "==1.0.1611286921" }, "sqlalchemy": { "hashes": [ @@ -1037,29 +1055,29 @@ }, "pyyaml": { "hashes": [ - "sha256:02c78d77281d8f8d07a255e57abdbf43b02257f59f50cc6b636937d68efa5dd0", - "sha256:0dc9f2eb2e3c97640928dec63fd8dc1dd91e6b6ed236bd5ac00332b99b5c2ff9", - "sha256:124fd7c7bc1e95b1eafc60825f2daf67c73ce7b33f1194731240d24b0d1bf628", - "sha256:26fcb33776857f4072601502d93e1a619f166c9c00befb52826e7b774efaa9db", - "sha256:31ba07c54ef4a897758563e3a0fcc60077698df10180abe4b8165d9895c00ebf", - "sha256:3c49e39ac034fd64fd576d63bb4db53cda89b362768a67f07749d55f128ac18a", - "sha256:52bf0930903818e600ae6c2901f748bc4869c0c406056f679ab9614e5d21a166", - "sha256:5a3f345acff76cad4aa9cb171ee76c590f37394186325d53d1aa25318b0d4a09", - "sha256:5e7ac4e0e79a53451dc2814f6876c2fa6f71452de1498bbe29c0b54b69a986f4", - "sha256:7242790ab6c20316b8e7bb545be48d7ed36e26bbe279fd56f2c4a12510e60b4b", - "sha256:737bd70e454a284d456aa1fa71a0b429dd527bcbf52c5c33f7c8eee81ac16b89", - "sha256:8635d53223b1f561b081ff4adecb828fd484b8efffe542edcfdff471997f7c39", - "sha256:8b818b6c5a920cbe4203b5a6b14256f0e5244338244560da89b7b0f1313ea4b6", - "sha256:8bf38641b4713d77da19e91f8b5296b832e4db87338d6aeffe422d42f1ca896d", - "sha256:a36a48a51e5471513a5aea920cdad84cbd56d70a5057cca3499a637496ea379c", - "sha256:b2243dd033fd02c01212ad5c601dafb44fbb293065f430b0d3dbf03f3254d615", - "sha256:cc547d3ead3754712223abb7b403f0a184e4c3eae18c9bb7fd15adef1597cc4b", - "sha256:cc552b6434b90d9dbed6a4f13339625dc466fd82597119897e9489c953acbc22", - "sha256:f3790156c606299ff499ec44db422f66f05a7363b39eb9d5b064f17bd7d7c47b", - "sha256:f7a21e3d99aa3095ef0553e7ceba36fb693998fbb1226f1392ce33681047465f", - "sha256:fdc6b2cb4b19e431994f25a9160695cc59a4e861710cc6fc97161c5e845fc579" - ], - "version": "==5.4" + "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", + "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", + "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", + "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", + "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", + "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", + "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", + "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", + "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", + "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", + "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", + "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", + "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", + "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", + "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", + "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", + "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", + "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", + "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", + "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", + "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc" + ], + "version": "==5.4.1" }, "requests": { "hashes": [ diff --git a/setup.py b/setup.py index 4d2a1e9a0..d5fccabc3 100644 --- a/setup.py +++ b/setup.py @@ -20,6 +20,7 @@ "stac-pydantic>=1.3.5", "pydantic[dotenv]", "titiler==0.1.0a12", + "fastapi-utils", ] extra_reqs = { diff --git a/stac_api/api/app.py b/stac_api/api/app.py index 6f2225e06..62fc3c7e6 100644 --- a/stac_api/api/app.py +++ b/stac_api/api/app.py @@ -4,11 +4,8 @@ import attr from fastapi import APIRouter, FastAPI from fastapi.openapi.utils import get_openapi -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker from stac_pydantic import ItemCollection from stac_pydantic.api import ConformanceClasses, LandingPage -from starlette.requests import Request from stac_api.api.extensions import FieldsExtension from stac_api.api.extensions.extension import ApiExtension @@ -25,7 +22,6 @@ from stac_api.config import ApiSettings, inject_settings from stac_api.errors import DEFAULT_STATUS_CODES, add_exception_handlers from stac_api.models import schemas -from stac_api.utils.dependencies import READER, WRITER @attr.s @@ -164,45 +160,6 @@ async def ping(): self.app.include_router(mgmt_router, tags=["Liveliness/Readiness"]) - def setup_db_connection(self): - """setup database connection""" - - @self.app.on_event("startup") - async def on_startup(): - """Create database engines and sessions on startup""" - self.app.state.ENGINE_READER = create_engine( - self.settings.reader_connection_string, echo=self.settings.debug - ) - self.app.state.ENGINE_WRITER = create_engine( - self.settings.writer_connection_string, echo=self.settings.debug - ) - self.app.state.DB_READER = sessionmaker( - autocommit=False, autoflush=False, bind=self.app.state.ENGINE_READER - ) - self.app.state.DB_WRITER = sessionmaker( - autocommit=False, autoflush=False, bind=self.app.state.ENGINE_WRITER - ) - - @self.app.on_event("shutdown") - async def on_shutdown(): - """Dispose of database engines and sessions on app shutdown""" - self.app.state.ENGINE_READER.dispose() - self.app.state.ENGINE_WRITER.dispose() - - @self.app.middleware("http") - async def create_db_connection(request: Request, call_next): - """Create a new database connection for each request""" - if "titiler" in str(request.url): - return await call_next(request) - reader = request.app.state.DB_READER() - writer = request.app.state.DB_WRITER() - READER.set(reader) - WRITER.set(writer) - resp = await call_next(request) - reader.close() - writer.close() - return resp - def __attrs_post_init__(self): """post-init hook""" # inject settings @@ -226,7 +183,5 @@ def __attrs_post_init__(self): # register exception handlers add_exception_handlers(self.app, status_codes=self.exceptions) - self.setup_db_connection() - # customize openapi self.app.openapi = self.customize_openapi diff --git a/stac_api/app.py b/stac_api/app.py index 905fcab6b..53d5078d0 100644 --- a/stac_api/app.py +++ b/stac_api/app.py @@ -8,19 +8,22 @@ TransactionExtension, ) from stac_api.clients.postgres.core import CoreCrudClient -from stac_api.clients.postgres.tokens import PaginationTokenClient +from stac_api.clients.postgres.session import Session from stac_api.clients.postgres.transactions import TransactionsClient +from stac_api.clients.tiles.ogc import TilesClient from stac_api.config import ApiSettings +settings = ApiSettings() +session = Session(settings.reader_connection_string, settings.writer_connection_string) api = StacApi( - settings=ApiSettings(), + settings=settings, extensions=[ - TransactionExtension(client=TransactionsClient()), + TransactionExtension(client=TransactionsClient(session=session)), FieldsExtension(), QueryExtension(), SortExtension(), - TilesExtension(), + TilesExtension(TilesClient(session=session)), ], - client=CoreCrudClient(pagination_client=PaginationTokenClient()), + client=CoreCrudClient(session=session), ) app = api.app diff --git a/stac_api/clients/postgres/base.py b/stac_api/clients/postgres/base.py deleted file mode 100644 index bf882724b..000000000 --- a/stac_api/clients/postgres/base.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Postgresql base client""" -import abc -import logging -from typing import Optional, Type - -import attr -import sqlalchemy as sa -from sqlalchemy.orm import Query - -import psycopg2 -from stac_api import errors -from stac_api.models import database -from stac_api.utils.dependencies import READER, WRITER - -logger = logging.getLogger(__name__) - - -@attr.s -class PostgresClient(abc.ABC): - """Database CRUD operations on the defined table""" - - table: Type[database.BaseModel] = attr.ib(default=database.BaseModel) - - @property - def reader_session(self): - """Get reader session from context var""" - return READER.get() - - @property - def writer_session(self): - """Get writer session from context var""" - return WRITER.get() - - @staticmethod - def row_exists(query: Query) -> bool: - """Check if a record exists from the sqlalchemy query object""" - return True if query.scalar() else False - - def commit(self) -> None: - """Commit both reader and writer sessions to keep them in sync, rolling back on psycopg2 errors""" - try: - self.writer_session.commit() - self.reader_session.commit() - except sa.exc.IntegrityError as e: - self.writer_session.rollback() - self.reader_session.rollback() - logger.error(e.orig.pgerror, exc_info=True) - # Explicitly catch foreign key errors to be reraised by the API as validation errors - if isinstance(e.orig, psycopg2.errors.ForeignKeyViolation): - raise errors.ForeignKeyError(e.orig.pgerror) - raise errors.DatabaseError(e.orig.pgerror) from e - except Exception as e: - logger.error(e, exc_info=True) - raise errors.DatabaseError("Unhandled database exception during commit") - - def lookup_id( - self, item_id: str, table: Optional[Type[database.BaseModel]] = None - ) -> Query: - """Create a query to access a single record from the table""" - table = table or self.table - try: - query = self.reader_session.query(table).filter(table.id == item_id) - except Exception as e: - logger.error(e, exc_info=True) - raise errors.DatabaseError("Unhandled database during ID lookup") - if not self.row_exists(query): - error_message = f"Row {item_id} does not exist" - logger.warning(error_message) - raise errors.NotFoundError(error_message) - return query diff --git a/stac_api/clients/postgres/core.py b/stac_api/clients/postgres/core.py index 18013e581..3504e2d02 100644 --- a/stac_api/clients/postgres/core.py +++ b/stac_api/clients/postgres/core.py @@ -8,6 +8,7 @@ import attr import sqlalchemy as sa from sqlalchemy import func +from sqlalchemy.orm import Session as SqlSession from stac_pydantic import ItemCollection from stac_pydantic.api import ConformanceClasses, LandingPage from stac_pydantic.api.extensions.paging import PaginationLink @@ -15,12 +16,11 @@ import geoalchemy2 as ga from sqlakeyset import get_page -from stac_api import errors from stac_api.api.extensions import ContextExtension, FieldsExtension from stac_api.clients.base import BaseCoreClient -from stac_api.clients.postgres.base import PostgresClient +from stac_api.clients.postgres.session import Session from stac_api.clients.postgres.tokens import PaginationTokenClient -from stac_api.errors import DatabaseError +from stac_api.errors import NotFoundError from stac_api.models import database, schemas from stac_api.models.links import CollectionLinks @@ -30,13 +30,23 @@ @attr.s -class CoreCrudClient(PostgresClient, BaseCoreClient): +class CoreCrudClient(PaginationTokenClient, BaseCoreClient): """Client for core endpoints defined by stac""" - pagination_client: PaginationTokenClient = attr.ib(default=None) - table: Type[database.Item] = attr.ib(default=database.Item) + session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) + item_table: Type[database.Item] = attr.ib(default=database.Item) collection_table: Type[database.Collection] = attr.ib(default=database.Collection) + @staticmethod + def _lookup_id( + id: str, table: Type[database.BaseModel], session: SqlSession + ) -> Type[database.BaseModel]: + """lookup row by id""" + row = session.query(table).filter(table.id == id).first() + if not row: + raise NotFoundError(f"{table.__name__} {id} not found") + return row + def landing_page(self, **kwargs) -> LandingPage: """landing page""" landing_page = LandingPage( @@ -89,36 +99,32 @@ def conformance(self, **kwargs) -> ConformanceClasses: def all_collections(self, **kwargs) -> List[schemas.Collection]: """Read all collections from the database""" - try: - collections = self.reader_session.query(self.collection_table).all() - except Exception as e: - logger.error(e, exc_info=True) - raise errors.DatabaseError( - "Unhandled database error when getting item collection" - ) - - response = [] - for collection in collections: - collection.base_url = str(kwargs["request"].base_url) - response.append(schemas.Collection.from_orm(collection)) - return response + with self.session.reader.context_session() as session: + collections = session.query(self.collection_table).all() + response = [] + for collection in collections: + collection.base_url = str(kwargs["request"].base_url) + response.append(schemas.Collection.from_orm(collection)) + return response def get_collection(self, id: str, **kwargs) -> schemas.Collection: """Get collection by id""" - collection = self.lookup_id(id, table=self.collection_table).first() - collection.base_url = str(kwargs["request"].base_url) - return schemas.Collection.from_orm(collection) + with self.session.reader.context_session() as session: + collection = self._lookup_id(id, self.collection_table, session) + # TODO: Don't do this + collection.base_url = str(kwargs["request"].base_url) + return schemas.Collection.from_orm(collection) def item_collection( self, id: str, limit: int = 10, token: str = None, **kwargs ) -> ItemCollection: """Read an item collection from the database""" - try: + with self.session.reader.context_session() as session: collection_children = ( - self.reader_session.query(self.table) + session.query(self.item_table) .join(self.collection_table) .filter(self.collection_table.id == id) - .order_by(self.table.datetime.desc(), self.table.id) + .order_by(self.item_table.datetime.desc(), self.item_table.id) ) count = None if self.extension_is_enabled(ContextExtension): @@ -126,68 +132,62 @@ def item_collection( [func.count()] ).order_by(None) count = collection_children.session.execute(count_query).scalar() - token = self.pagination_client.get(token) if token else token + token = self.get_token(token) if token else token page = get_page(collection_children, per_page=limit, page=(token or False)) # Create dynamic attributes for each page page.next = ( - self.pagination_client.insert(keyset=page.paging.bookmark_next) + self.insert_token(keyset=page.paging.bookmark_next) if page.paging.has_next else None ) page.previous = ( - self.pagination_client.insert(keyset=page.paging.bookmark_previous) + self.insert_token(keyset=page.paging.bookmark_previous) if page.paging.has_previous else None ) - except errors.NotFoundError: - raise - except Exception as e: - logger.error(e, exc_info=True) - raise errors.DatabaseError( - "Unhandled database error when getting collection children" - ) - links = [] - if page.next: - links.append( - PaginationLink( - rel=Relations.next, - type="application/geo+json", - href=f"{kwargs['request'].base_url}collections/{id}/items?token={page.next}&limit={limit}", - method="GET", + links = [] + if page.next: + links.append( + PaginationLink( + rel=Relations.next, + type="application/geo+json", + href=f"{kwargs['request'].base_url}collections/{id}/items?token={page.next}&limit={limit}", + method="GET", + ) ) - ) - if page.previous: - links.append( - PaginationLink( - rel=Relations.previous, - type="application/geo+json", - href=f"{kwargs['request'].base_url}collections/{id}/items?token={page.previous}&limit={limit}", - method="GET", + if page.previous: + links.append( + PaginationLink( + rel=Relations.previous, + type="application/geo+json", + href=f"{kwargs['request'].base_url}collections/{id}/items?token={page.previous}&limit={limit}", + method="GET", + ) ) - ) - response_features = [] - for item in page: - item.base_url = str(kwargs["request"].base_url) - response_features.append(schemas.Item.from_orm(item)) + response_features = [] + for item in page: + item.base_url = str(kwargs["request"].base_url) + response_features.append(schemas.Item.from_orm(item)) - context_obj = None - if self.extension_is_enabled(ContextExtension): - context_obj = {"returned": len(page), "limit": limit, "matched": count} + context_obj = None + if self.extension_is_enabled(ContextExtension): + context_obj = {"returned": len(page), "limit": limit, "matched": count} - return ItemCollection( - type="FeatureCollection", - context=context_obj, - features=response_features, - links=links, - ) + return ItemCollection( + type="FeatureCollection", + context=context_obj, + features=response_features, + links=links, + ) def get_item(self, id: str, **kwargs) -> schemas.Item: """Get item by id""" - obj = self.lookup_id(id).first() - obj.base_url = str(kwargs["request"].base_url) - return schemas.Item.from_orm(obj) + with self.session.reader.context_session() as session: + item = self._lookup_id(id, self.item_table, session) + item.base_url = str(kwargs["request"].base_url) + return schemas.Item.from_orm(item) def get_search( self, @@ -263,95 +263,94 @@ def post_search( self, search_request: schemas.STACSearch, **kwargs ) -> Dict[str, Any]: """POST search catalog""" - token = ( - self.pagination_client.get(search_request.token) - if search_request.token - else False - ) - query = self.reader_session.query(self.table) - - # Filter by collection - count = None - if search_request.collections: - query = query.join(self.collection_table).filter( - sa.or_( - *[ - self.collection_table.id == col_id - for col_id in search_request.collections - ] - ) + with self.session.reader.context_session() as session: + token = ( + self.get_token(search_request.token) if search_request.token else False ) + query = session.query(self.item_table) - # Sort - if search_request.sortby: - sort_fields = [ - getattr(self.table.get_field(sort.field), sort.direction.value)() - for sort in search_request.sortby - ] - sort_fields.append(self.table.id) - query = query.order_by(*sort_fields) - else: - # Default sort is date - query = query.order_by(self.table.datetime.desc(), self.table.id) - - # Ignore other parameters if ID is present - if search_request.ids: - id_filter = sa.or_(*[self.table.id == i for i in search_request.ids]) - try: - items = query.filter(id_filter).order_by(self.table.id) + # Filter by collection + count = None + if search_request.collections: + query = query.join(self.collection_table).filter( + sa.or_( + *[ + self.collection_table.id == col_id + for col_id in search_request.collections + ] + ) + ) + + # Sort + if search_request.sortby: + sort_fields = [ + getattr( + self.item_table.get_field(sort.field), sort.direction.value + )() + for sort in search_request.sortby + ] + sort_fields.append(self.item_table.id) + query = query.order_by(*sort_fields) + else: + # Default sort is date + query = query.order_by( + self.item_table.datetime.desc(), self.item_table.id + ) + + # Ignore other parameters if ID is present + if search_request.ids: + id_filter = sa.or_( + *[self.item_table.id == i for i in search_request.ids] + ) + items = query.filter(id_filter).order_by(self.item_table.id) page = get_page(items, per_page=search_request.limit, page=token) if self.extension_is_enabled(ContextExtension): count = len(search_request.ids) page.next = ( - self.pagination_client.insert(keyset=page.paging.bookmark_next) + self.insert_token(keyset=page.paging.bookmark_next) if page.paging.has_next else None ) page.previous = ( - self.pagination_client.insert(keyset=page.paging.bookmark_previous) + self.insert_token(keyset=page.paging.bookmark_previous) if page.paging.has_previous else None ) - except Exception as e: - logger.error(e, exc_info=True) - raise DatabaseError( - "Unhandled database error when searching for items by id" - ) - else: - # Spatial query - poly = search_request.polygon() - if poly: - filter_geom = ga.shape.from_shape(poly, srid=4326) - query = query.filter( - ga.func.ST_Intersects(self.table.geometry, filter_geom) - ) - # Temporal query - if search_request.datetime: - # Two tailed query (between) - if ".." not in search_request.datetime: - query = query.filter( - self.table.datetime.between(*search_request.datetime) - ) - # All items after the start date - if search_request.datetime[0] != "..": - query = query.filter( - self.table.datetime >= search_request.datetime[0] - ) - # All items before the end date - if search_request.datetime[1] != "..": + else: + # Spatial query + poly = search_request.polygon() + if poly: + filter_geom = ga.shape.from_shape(poly, srid=4326) query = query.filter( - self.table.datetime <= search_request.datetime[1] + ga.func.ST_Intersects(self.item_table.geometry, filter_geom) ) - # Query fields - if search_request.query: - for (field_name, expr) in search_request.query.items(): - field = self.table.get_field(field_name) - for (op, value) in expr.items(): - query = query.filter(op.operator(field, value)) + # Temporal query + if search_request.datetime: + # Two tailed query (between) + if ".." not in search_request.datetime: + query = query.filter( + self.item_table.datetime.between(*search_request.datetime) + ) + # All items after the start date + if search_request.datetime[0] != "..": + query = query.filter( + self.item_table.datetime >= search_request.datetime[0] + ) + # All items before the end date + if search_request.datetime[1] != "..": + query = query.filter( + self.item_table.datetime <= search_request.datetime[1] + ) + + # Query fields + if search_request.query: + for (field_name, expr) in search_request.query.items(): + field = self.item_table.get_field(field_name) + for (op, value) in expr.items(): + query = query.filter(op.operator(field, value)) - try: if self.extension_is_enabled(ContextExtension): count_query = query.statement.with_only_columns( [func.count()] @@ -360,57 +359,53 @@ def post_search( page = get_page(query, per_page=search_request.limit, page=token) # Create dynamic attributes for each page page.next = ( - self.pagination_client.insert(keyset=page.paging.bookmark_next) + self.insert_token(keyset=page.paging.bookmark_next) if page.paging.has_next else None ) page.previous = ( - self.pagination_client.insert(keyset=page.paging.bookmark_previous) + self.insert_token(keyset=page.paging.bookmark_previous) if page.paging.has_previous else None ) - except Exception as e: - logger.error(e, exc_info=True) - raise DatabaseError( - "Unhandled database error during spatial/temporal query" - ) - links = [] - if page.next: - links.append( - PaginationLink( - rel=Relations.next, - type="application/geo+json", - href=f"{kwargs['request'].base_url}search", - method="POST", - body={"token": page.next}, - merge=True, + + links = [] + if page.next: + links.append( + PaginationLink( + rel=Relations.next, + type="application/geo+json", + href=f"{kwargs['request'].base_url}search", + method="POST", + body={"token": page.next}, + merge=True, + ) ) - ) - if page.previous: - links.append( - PaginationLink( - rel=Relations.previous, - type="application/geo+json", - href=f"{kwargs['request'].base_url}search", - method="POST", - body={"token": page.previous}, - merge=True, + if page.previous: + links.append( + PaginationLink( + rel=Relations.previous, + type="application/geo+json", + href=f"{kwargs['request'].base_url}search", + method="POST", + body={"token": page.previous}, + merge=True, + ) ) - ) - response_features = [] - filter_kwargs = {} - if self.extension_is_enabled(FieldsExtension): - filter_kwargs = search_request.field.filter_fields - - xvals = [] - yvals = [] - for item in page: - item.base_url = str(kwargs["request"].base_url) - item_model = schemas.Item.from_orm(item) - xvals += [item_model.bbox[0], item_model.bbox[2]] - yvals += [item_model.bbox[1], item_model.bbox[3]] - response_features.append(item_model.to_dict(**filter_kwargs)) + response_features = [] + filter_kwargs = {} + if self.extension_is_enabled(FieldsExtension): + filter_kwargs = search_request.field.filter_fields + + xvals = [] + yvals = [] + for item in page: + item.base_url = str(kwargs["request"].base_url) + item_model = schemas.Item.from_orm(item) + xvals += [item_model.bbox[0], item_model.bbox[2]] + yvals += [item_model.bbox[1], item_model.bbox[3]] + response_features.append(item_model.to_dict(**filter_kwargs)) try: bbox = (min(xvals), min(yvals), max(xvals), max(yvals)) diff --git a/stac_api/clients/postgres/session.py b/stac_api/clients/postgres/session.py new file mode 100644 index 000000000..3c3d9efd5 --- /dev/null +++ b/stac_api/clients/postgres/session.py @@ -0,0 +1,50 @@ +"""database session management""" +import logging +import os +from contextlib import contextmanager +from typing import Iterator + +import attr +import sqlalchemy as sa +import sqlalchemy.exc +from sqlalchemy.orm import Session as SqlSession + +import psycopg2 +from fastapi_utils.session import FastAPISessionMaker as _FastAPISessionMaker +from stac_api import errors + +logger = logging.getLogger(__name__) + + +class FastAPISessionMaker(_FastAPISessionMaker): + """FastAPISessionMaker""" + + @contextmanager + def context_session(self) -> Iterator[SqlSession]: + """override base method to include exception handling""" + try: + yield from self.get_db() + except sa.exc.StatementError as e: + if isinstance(e.orig, psycopg2.errors.UniqueViolation): + raise errors.ConflictError("resource already exists") from e + elif isinstance(e.orig, psycopg2.errors.ForeignKeyViolation): + raise errors.ForeignKeyError("collection does not exist") from e + logger.error(e, exc_info=True) + raise errors.DatabaseError("unhandled database error") + + +@attr.s +class Session: + reader_conn_string: str = attr.ib() + writer_conn_string: str = attr.ib() + + @classmethod + def create_from_env(cls): + return cls( + reader_conn_string=os.environ["READER_CONN_STRING"], + writer_conn_string=os.environ["WRITER_CONN_STRING"], + ) + + def __attrs_post_init__(self): + self.reader: FastAPISessionMaker = FastAPISessionMaker(self.reader_conn_string) + self.writer: FastAPISessionMaker = FastAPISessionMaker(self.writer_conn_string) diff --git a/stac_api/clients/postgres/tokens.py b/stac_api/clients/postgres/tokens.py index d6f330d3c..ae144da11 100644 --- a/stac_api/clients/postgres/tokens.py +++ b/stac_api/clients/postgres/tokens.py @@ -1,44 +1,55 @@ """Pagination token client.""" - +import abc +import logging import os from base64 import urlsafe_b64encode from typing import Type import attr +from sqlalchemy.orm import Session as SqlSession -from stac_api.clients.postgres.base import PostgresClient +from stac_api.clients.postgres.session import Session from stac_api.errors import DatabaseError from stac_api.models import database +logger = logging.getLogger(__name__) + @attr.s -class PaginationTokenClient(PostgresClient): +class PaginationTokenClient(abc.ABC): """Pagination token specific CRUD operations""" - table: Type[database.PaginationToken] = attr.ib(default=database.PaginationToken) + session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) + token_table: Type[database.PaginationToken] = attr.ib( + default=database.PaginationToken + ) + + @staticmethod + @abc.abstractmethod + def _lookup_id( + id: str, table: Type[database.BaseModel], session: SqlSession + ) -> Type[database.BaseModel]: + """lookup row by id""" + ... - def insert(self, keyset: str, tries: int = 0) -> str: # type:ignore + def insert_token(self, keyset: str, tries: int = 0) -> str: # type:ignore """Insert a keyset into the database""" # uid has collision chance of 1e-7 percent uid = urlsafe_b64encode(os.urandom(6)).decode() - try: - token = database.PaginationToken(id=uid, keyset=keyset) - self.writer_session.add(token) - self.commit() - return uid - except DatabaseError: - # Try again if uid already exists in the database - # TODO: Explicitely check for ConflictError (if insert fails for other reasons it should be raised) - self.insert(keyset, tries=tries + 1) - if tries > 5: - raise - - def get(self, token_id: str) -> str: + with self.session.writer.context_session() as session: + try: + token = database.PaginationToken(id=uid, keyset=keyset) + session.add(token) + return uid + except DatabaseError: + # Try again if uid already exists in the database + # TODO: Explicitely check for ConflictError (if insert fails for other reasons it should be raised) + if tries > 5: + raise + self.insert_token(keyset, tries=tries + 1) + + def get_token(self, token_id: str) -> str: """Retrieve a keyset from the database""" - row = self.lookup_id(token_id).first() - return row.keyset - - -def pagination_token_client_factory() -> PaginationTokenClient: - """FastAPI dependency""" - return PaginationTokenClient() + with self.session.reader.context_session() as session: + token = self._lookup_id(token_id, self.token_table, session) + return token.keyset diff --git a/stac_api/clients/postgres/transactions.py b/stac_api/clients/postgres/transactions.py index 52a572e8e..40b6ec368 100644 --- a/stac_api/clients/postgres/transactions.py +++ b/stac_api/clients/postgres/transactions.py @@ -2,117 +2,102 @@ import json import logging -from typing import Dict, List, Optional, Type, Union +from typing import Dict, List, Optional, Type import attr from sqlalchemy import create_engine -from stac_api import errors from stac_api.clients.base import BaseTransactionsClient, BulkTransactionsClient -from stac_api.clients.postgres.base import PostgresClient +from stac_api.clients.postgres.session import Session +from stac_api.errors import NotFoundError from stac_api.models import database, schemas logger = logging.getLogger(__name__) @attr.s -class TransactionsClient(PostgresClient, BaseTransactionsClient): +class TransactionsClient(BaseTransactionsClient): """Transactions extension specific CRUD operations""" - table: Type[database.Collection] = attr.ib(default=database.Collection) + session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) + collection_table: Type[database.Collection] = attr.ib(default=database.Collection) item_table: Type[database.Item] = attr.ib(default=database.Item) - @property - def collection_table(self): - """alias for `self.table` # TODO: Figure out a better way to do this""" - return self.table - - def _create( - self, - model: Union[schemas.Collection, schemas.Item], - table: Union[Type[database.Collection], Type[database.Item]], - ) -> Union[database.Collection, database.Item]: - """Create a single record""" - try: - self.lookup_id(model.id, table=table) - error_message = f"Row {model.id} already exists" - logger.error(error_message, exc_info=True) - raise errors.ConflictError(error_message) - except errors.NotFoundError: - row_data = table.from_schema(model) - self.writer_session.add(row_data) - self.commit() - return row_data - - def _update( - self, - model: Union[schemas.Collection, schemas.Item], - table: Union[Type[database.Collection], Type[database.Item]], - ) -> Union[database.Collection, database.Item]: - """Create a single record if it does not exist or update an existing record""" - try: - query = self.lookup_id(model.id, table=table) - update_data = table.get_database_model(model) - # SQLAlchemy orm updates don't seem to like geoalchemy types - update_data.pop("geometry", None) - query.update(update_data) - self.commit() - return table.from_schema(model) - except errors.NotFoundError: - row_data = table.from_schema(model) - self.writer_session.add(row_data) - self.commit() - return row_data - - def _delete( - self, item_id: str, table: Union[Type[database.Collection], Type[database.Item]] - ) -> Union[database.Collection, database.Item]: - """Delete a single record""" - query = self.lookup_id(item_id, table=table) - row_data = query.first() - query.delete() - self.commit() - return row_data - def create_item(self, model: schemas.Item, **kwargs) -> schemas.Item: - """Create an item""" - obj = self._create(model, table=self.item_table) - obj.base_url = str(kwargs["request"].base_url) - return schemas.Item.from_orm(obj) - - def update_item(self, model: schemas.Item, **kwargs) -> schemas.Item: - """Update an item""" - obj = self._update(model, table=self.item_table) - obj.base_url = str(kwargs["request"].base_url) - return schemas.Item.from_orm(obj) - - def delete_item(self, id: str, **kwargs) -> schemas.Item: - """Delete an item""" - obj = self._delete(id, table=self.item_table) - obj.base_url = str(kwargs["request"].base_url) - return schemas.Item.from_orm(obj) + """create item""" + data = self.item_table.from_schema(model) + with self.session.writer.context_session() as session: + session.add(data) + data.base_url = str(kwargs["request"].base_url) + return schemas.Item.from_orm(data) def create_collection( self, model: schemas.Collection, **kwargs ) -> schemas.Collection: - """Create a collection""" - obj = self._create(model, table=self.collection_table) - obj.base_url = str(kwargs["request"].base_url) - return schemas.Collection.from_orm(obj) + """create collection""" + data = self.collection_table.from_schema(model) + with self.session.writer.context_session() as session: + session.add(data) + data.base_url = str(kwargs["request"].base_url) + return schemas.Collection.from_orm(data) + + def update_item(self, model: schemas.Item, **kwargs) -> schemas.Item: + """update item""" + with self.session.reader.context_session() as session: + query = session.query(self.item_table).filter( + self.item_table.id == model.id + ) + if not query.scalar(): + raise NotFoundError(f"Item {model.id} not found") + # SQLAlchemy orm updates don't seem to like geoalchemy types + data = self.item_table.get_database_model(model) + data.pop("geometry", None) + query.update(data) + + response = self.item_table.from_schema(model) + response.base_url = str(kwargs["request"].base_url) + return schemas.Item.from_orm(response) + return model def update_collection( self, model: schemas.Collection, **kwargs ) -> schemas.Collection: - """Update a collection""" - obj = self._update(model, table=self.collection_table) - obj.base_url = str(kwargs["request"].base_url) - return schemas.Collection.from_orm(obj) + """update collection""" + with self.session.reader.context_session() as session: + query = session.query(self.collection_table).filter( + self.collection_table.id == model.id + ) + if not query.scalar(): + raise NotFoundError(f"Item {model.id} not found") + # SQLAlchemy orm updates don't seem to like geoalchemy types + data = self.collection_table.get_database_model(model) + data.pop("geometry", None) + query.update(data) + return model + + def delete_item(self, id: str, **kwargs) -> schemas.Item: + """delete item""" + with self.session.writer.context_session() as session: + query = session.query(self.item_table).filter(self.item_table.id == id) + data = query.first() + if not data: + raise NotFoundError(f"Item {id} not found") + query.delete() + data.base_url = str(kwargs["request"].base_url) + return schemas.Item.from_orm(data) def delete_collection(self, id: str, **kwargs) -> schemas.Collection: - """Delete a collection""" - obj = self._delete(id, table=self.collection_table) - obj.base_url = str(kwargs["request"].base_url) - return schemas.Collection.from_orm(obj) + """delete collection""" + with self.session.writer.context_session() as session: + query = session.query(self.collection_table).filter( + self.collection_table.id == id + ) + data = query.first() + if not data: + raise NotFoundError(f"Collection {id} not found") + query.delete() + data.base_url = str(kwargs["request"].base_url) + return schemas.Collection.from_orm(data) @attr.s diff --git a/stac_api/utils/__init__.py b/stac_api/utils/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/stac_api/utils/dependencies.py b/stac_api/utils/dependencies.py deleted file mode 100644 index a1e87a08a..000000000 --- a/stac_api/utils/dependencies.py +++ /dev/null @@ -1,7 +0,0 @@ -"""FastAPI dependencies.""" - -from contextvars import ContextVar - -# TODO: Find a new home -READER: ContextVar = ContextVar("reader") -WRITER: ContextVar = ContextVar("writer") diff --git a/tests/conftest.py b/tests/conftest.py index bb490115a..41cc4d2e1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,11 +1,8 @@ import json import os -from typing import Callable, Dict, Generator -from unittest.mock import PropertyMock, patch +from typing import Callable, Dict import pytest -from sqlalchemy import create_engine -from sqlalchemy.orm import Session, sessionmaker from starlette.testclient import TestClient from stac_api.api.app import StacApi @@ -17,12 +14,13 @@ TransactionExtension, ) from stac_api.clients.postgres.core import CoreCrudClient -from stac_api.clients.postgres.tokens import PaginationTokenClient +from stac_api.clients.postgres.session import Session from stac_api.clients.postgres.transactions import ( PostgresBulkTransactions, TransactionsClient, ) from stac_api.config import ApiSettings, inject_settings +from stac_api.models import database from stac_api.models.schemas import Collection DATA_DIR = os.path.join(os.path.dirname(__file__), "data") @@ -70,62 +68,30 @@ class MockStarletteRequest: @pytest.fixture -def sqlalchemy_engine(): - engine = create_engine(settings.reader_connection_string) - yield engine - engine.dispose() - - -@pytest.fixture -def reader_connection(sqlalchemy_engine) -> Generator[Session, None, None]: - """Create a reader connection""" - db_session = sessionmaker( - autocommit=False, autoflush=False, bind=sqlalchemy_engine - )() - yield db_session - db_session.close() - - -@pytest.fixture -def writer_connection(sqlalchemy_engine) -> Generator[Session, None, None]: - """Create a writer connection""" - db_session = sessionmaker( - autocommit=False, autoflush=False, bind=sqlalchemy_engine - )() - yield db_session - db_session.close() +def db_session() -> Session: + return Session( + reader_conn_string=settings.reader_connection_string, + writer_conn_string=settings.writer_connection_string, + ) @pytest.fixture -def postgres_core(reader_connection, writer_connection): - with patch( - "stac_api.clients.postgres.base.PostgresClient.writer_session", - new_callable=PropertyMock, - ) as mock_writer: - mock_writer.return_value = writer_connection - with patch( - "stac_api.clients.postgres.base.PostgresClient.reader_session", - new_callable=PropertyMock, - ) as mock_reader: - mock_reader.return_value = reader_connection - client = CoreCrudClient() - yield client +def postgres_core(db_session): + return CoreCrudClient( + session=db_session, + item_table=database.Item, + collection_table=database.Collection, + token_table=database.PaginationToken, + ) @pytest.fixture -def postgres_transactions(reader_connection, writer_connection): - with patch( - "stac_api.clients.postgres.base.PostgresClient.writer_session", - new_callable=PropertyMock, - ) as mock_writer: - mock_writer.return_value = writer_connection - with patch( - "stac_api.clients.postgres.base.PostgresClient.reader_session", - new_callable=PropertyMock, - ) as mock_reader: - mock_reader.return_value = reader_connection - client = TransactionsClient() - yield client +def postgres_transactions(db_session): + return TransactionsClient( + session=db_session, + item_table=database.Item, + collection_table=database.Collection, + ) @pytest.fixture @@ -135,12 +101,12 @@ def postgres_bulk_transactions(): @pytest.fixture -def api_client(): +def api_client(db_session): return StacApi( settings=ApiSettings(), - client=CoreCrudClient(pagination_client=PaginationTokenClient()), + client=CoreCrudClient(session=db_session), extensions=[ - TransactionExtension(client=TransactionsClient()), + TransactionExtension(client=TransactionsClient(session=db_session)), ContextExtension(), SortExtension(), FieldsExtension(), diff --git a/tests/resources/test_collection.py b/tests/resources/test_collection.py index 68136c58a..1a1a3b3c1 100644 --- a/tests/resources/test_collection.py +++ b/tests/resources/test_collection.py @@ -43,12 +43,7 @@ def test_update_new_collection(app_client, load_test_data): test_collection["id"] = "new-test-collection" resp = app_client.put("/collections", json=test_collection) - assert resp.status_code == 200 - - resp = app_client.get(f"/collections/{test_collection['id']}") - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["id"] == test_collection["id"] + assert resp.status_code == 404 def test_collection_not_found(app_client): diff --git a/tests/resources/test_item.py b/tests/resources/test_item.py index 9933ddbf1..d75fd927d 100644 --- a/tests/resources/test_item.py +++ b/tests/resources/test_item.py @@ -7,7 +7,6 @@ from urllib.parse import parse_qs, urlparse, urlsplit from shapely.geometry import Polygon - from stac_pydantic.api.search import DATETIME_RFC339 @@ -79,12 +78,20 @@ def test_update_new_item(app_client, load_test_data): resp = app_client.put( f"/collections/{test_item['collection']}/items", json=test_item ) - assert resp.status_code == 200 + assert resp.status_code == 404 def test_update_item_missing_collection(app_client, load_test_data): """Test updating an item without a parent collection (transactions extension)""" test_item = load_test_data("test_item.json") + + # Create the item + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + # Try to update collection of the item test_item["collection"] = "stac is cool" resp = app_client.put( f"/collections/{test_item['collection']}/items", json=test_item